From f61cc6df435880b9919d5109e27a6583bbf57e1c Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Tue, 18 Jul 2023 16:41:46 +0300 Subject: [PATCH 01/58] Initial integration of Icicle GPU acceleration library for groth16 with BN254. Full replacement of CPU prove with GPU prove can be used with the build tag 'gpu'. --- backend/groth16/bn254/icicle_wrapper_gpu.go | 105 +++ backend/groth16/bn254/prove.go | 2 + backend/groth16/bn254/prove_gpu.go | 356 +++++++++ backend/groth16/bn254/setup.go | 2 + backend/groth16/bn254/setup_gpu.go | 796 ++++++++++++++++++++ go.mod | 3 +- go.sum | 2 + 7 files changed, 1265 insertions(+), 1 deletion(-) create mode 100644 backend/groth16/bn254/icicle_wrapper_gpu.go create mode 100644 backend/groth16/bn254/prove_gpu.go create mode 100644 backend/groth16/bn254/setup_gpu.go diff --git a/backend/groth16/bn254/icicle_wrapper_gpu.go b/backend/groth16/bn254/icicle_wrapper_gpu.go new file mode 100644 index 0000000000..295688b0ab --- /dev/null +++ b/backend/groth16/bn254/icicle_wrapper_gpu.go @@ -0,0 +1,105 @@ +//go:build gpu + +package groth16 + +import ( + "fmt" + "unsafe" + + curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + cudawrapper "github.com/ingonyama-zk/icicle/goicicle" + icicle "github.com/ingonyama-zk/icicle/goicicle/curves/bn254" +) + +type OnDeviceData struct { + p unsafe.Pointer + size int +} + +func INttOnDevice(scalars_d, twiddles_d, cosetPowers_d unsafe.Pointer, size, sizeBytes int, isCoset bool) unsafe.Pointer { + icicle.ReverseScalars(scalars_d, size) + + scalarsInterp := icicle.Interpolate(scalars_d, twiddles_d, cosetPowers_d, size, isCoset) + + return scalarsInterp +} + + +func NttOnDevice(scalars_out, scalars_d, twiddles_d, coset_powers_d unsafe.Pointer, size, twid_size, size_bytes int, isCoset bool) { + res := icicle.Evaluate(scalars_out, scalars_d, twiddles_d, coset_powers_d, size, twid_size, isCoset) + + if res != 0 { + fmt.Print("Issue evaluating") + } + + icicle.ReverseScalars(scalars_out, size) + + return +} + +func MsmOnDevice(scalars_d, points_d unsafe.Pointer, count int, convert bool) (curve.G1Jac, unsafe.Pointer, error) { + out_d, _ := cudawrapper.CudaMalloc(96) + + icicle.Commit(out_d, scalars_d, points_d, count, 10) + + if convert { + outHost := make([]icicle.PointBN254, 1) + cudawrapper.CudaMemCpyDtoH[icicle.PointBN254](outHost, out_d, 96) + return *outHost[0].ToGnarkJac(), nil, nil + } + + return curve.G1Jac{}, out_d, nil +} + +func MsmG2OnDevice(scalars_d, points_d unsafe.Pointer, count int, convert bool) (curve.G2Jac, unsafe.Pointer, error) { + out_d, _ := cudawrapper.CudaMalloc(192) + + icicle.CommitG2(out_d, scalars_d, points_d, count, 10) + + if convert { + outHost := make([]icicle.G2Point, 1) + cudawrapper.CudaMemCpyDtoH[icicle.G2Point](outHost, out_d, 192) + return *outHost[0].ToGnarkJac(), nil, nil + } + + return curve.G2Jac{}, out_d, nil +} + +func PolyOps(a_d, b_d, c_d, den_d unsafe.Pointer, size int) { + ret := icicle.VecScalarMulMod(a_d, b_d, size) + + if ret != 0 { + fmt.Print("Vector mult a*b issue") + } + ret = icicle.VecScalarSub(a_d, c_d, size) + + if ret != 0 { + fmt.Print("Vector sub issue") + } + ret = icicle.VecScalarMulMod(a_d, den_d, size) + + if ret != 0 { + fmt.Print("Vector mult a*den issue") + } + + return +} + +func MontConvOnDevice(scalars_d unsafe.Pointer, size int, is_into bool) { + if is_into { + icicle.ToMontgomery(scalars_d, size) + } else { + icicle.FromMontgomery(scalars_d, size) + } + + return +} + +func CopyToDevice(scalars []fr.Element, bytes int, copyDone chan unsafe.Pointer) { + devicePtr, _ := cudawrapper.CudaMalloc(bytes) + cudawrapper.CudaMemCpyHtoD[fr.Element](devicePtr, scalars, bytes) + MontConvOnDevice(devicePtr, len(scalars), false) + + copyDone <- devicePtr +} diff --git a/backend/groth16/bn254/prove.go b/backend/groth16/bn254/prove.go index 42ec4de8b9..db84bc2991 100644 --- a/backend/groth16/bn254/prove.go +++ b/backend/groth16/bn254/prove.go @@ -1,3 +1,5 @@ +//go:build !gpu + // Copyright 2020 ConsenSys Software Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go new file mode 100644 index 0000000000..db953709e4 --- /dev/null +++ b/backend/groth16/bn254/prove_gpu.go @@ -0,0 +1,356 @@ +//go:build gpu + +// Copyright 2020 ConsenSys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by gnark DO NOT EDIT + +package groth16 + +import ( + "github.com/consensys/gnark-crypto/ecc" + curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" + "github.com/consensys/gnark/backend" + "github.com/consensys/gnark/backend/groth16/internal" + "github.com/consensys/gnark/backend/witness" + "github.com/consensys/gnark/constraint" + "github.com/consensys/gnark/constraint/bn254" + "github.com/consensys/gnark/constraint/solver" + "github.com/consensys/gnark/internal/utils" + "github.com/consensys/gnark/logger" + "math/big" + "time" + "unsafe" + goicicle "github.com/ingonyama-zk/icicle/goicicle" + icicle "github.com/ingonyama-zk/icicle/goicicle/curves/bn254" +) + +// Proof represents a Groth16 proof that was encoded with a ProvingKey and can be verified +// with a valid statement and a VerifyingKey +// Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf +type Proof struct { + Ar, Krs curve.G1Affine + Bs curve.G2Affine + Commitments []curve.G1Affine // Pedersen commitments a la https://eprint.iacr.org/2022/1072 + CommitmentPok curve.G1Affine // Batched proof of knowledge of the above commitments +} + +// isValid ensures proof elements are in the correct subgroup +func (proof *Proof) isValid() bool { + return proof.Ar.IsInSubGroup() && proof.Krs.IsInSubGroup() && proof.Bs.IsInSubGroup() +} + +// CurveID returns the curveID +func (proof *Proof) CurveID() ecc.ID { + return curve.ID +} + +// Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { + opt, err := backend.NewProverConfig(opts...) + if err != nil { + return nil, err + } + + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + + proof := &Proof{Commitments: make([]curve.G1Affine, len(commitmentInfo))} + + solverOpts := opt.SolverOpts[:len(opt.SolverOpts):len(opt.SolverOpts)] + + privateCommittedValues := make([][]fr.Element, len(commitmentInfo)) + for i := range commitmentInfo { + solverOpts = append(solverOpts, solver.OverrideHint(commitmentInfo[i].HintID, func(i int) solver.Hint { + return func(_ *big.Int, in []*big.Int, out []*big.Int) error { + privateCommittedValues[i] = make([]fr.Element, len(commitmentInfo[i].PrivateCommitted)) + hashed := in[:len(commitmentInfo[i].PublicAndCommitmentCommitted)] + committed := in[len(hashed):] + for j, inJ := range committed { + privateCommittedValues[i][j].SetBigInt(inJ) + } + + var err error + if proof.Commitments[i], err = pk.CommitmentKeys[i].Commit(privateCommittedValues[i]); err != nil { + return err + } + + var res fr.Element + res, err = solveCommitmentWire(&proof.Commitments[i], hashed) + res.BigInt(out[0]) + return err + } + }(i))) + } + + _solution, err := r1cs.Solve(fullWitness, solverOpts...) + if err != nil { + return nil, err + } + + solution := _solution.(*cs.R1CSSolution) + wireValues := []fr.Element(solution.W) + + start := time.Now() + + commitmentsSerialized := make([]byte, fr.Bytes*len(commitmentInfo)) + for i := range commitmentInfo { + copy(commitmentsSerialized[fr.Bytes*i:], wireValues[commitmentInfo[i].CommitmentIndex].Marshal()) + } + + if proof.CommitmentPok, err = pedersen.BatchProve(pk.CommitmentKeys, privateCommittedValues, commitmentsSerialized); err != nil { + return nil, err + } + + // H (witness reduction / FFT part) + var h unsafe.Pointer + chHDone := make(chan struct{}, 1) + go func() { + h = computeH(solution.A, solution.B, solution.C, pk) + solution.A = nil + solution.B = nil + solution.C = nil + chHDone <- struct{}{} + }() + + // we need to copy and filter the wireValues for each multi exp + // as pk.G1.A, pk.G1.B and pk.G2.B may have (a significant) number of point at infinity + var wireValuesADevice, wireValuesBDevice OnDeviceData + chWireValuesA, chWireValuesB := make(chan struct{}, 1), make(chan struct{}, 1) + + go func() { + wireValuesA := make([]fr.Element, len(wireValues)-int(pk.NbInfinityA)) + for i, j := 0, 0; j < len(wireValuesA); i++ { + if pk.InfinityA[i] { + continue + } + wireValuesA[j] = wireValues[i] + j++ + } + wireValuesASize := len(wireValuesA) + scalarBytes := wireValuesASize*fr.Bytes + wireValuesADevicePtr, _ := goicicle.CudaMalloc(scalarBytes) + goicicle.CudaMemCpyHtoD[fr.Element](wireValuesADevicePtr, wireValuesA, scalarBytes) + MontConvOnDevice(wireValuesADevicePtr, wireValuesASize, false) + wireValuesADevice = OnDeviceData{wireValuesADevicePtr, wireValuesASize} + + close(chWireValuesA) + }() + go func() { + wireValuesB := make([]fr.Element, len(wireValues)-int(pk.NbInfinityB)) + for i, j := 0, 0; j < len(wireValuesB); i++ { + if pk.InfinityB[i] { + continue + } + wireValuesB[j] = wireValues[i] + j++ + } + wireValuesBSize := len(wireValuesB) + scalarBytes := wireValuesBSize*fr.Bytes + wireValuesBDevicePtr, _ := goicicle.CudaMalloc(scalarBytes) + goicicle.CudaMemCpyHtoD[fr.Element](wireValuesBDevicePtr, wireValuesB, scalarBytes) + MontConvOnDevice(wireValuesBDevicePtr, wireValuesBSize, false) + wireValuesBDevice = OnDeviceData{wireValuesBDevicePtr, wireValuesBSize} + + close(chWireValuesB) + }() + + // sample random r and s + var r, s big.Int + var _r, _s, _kr fr.Element + if _, err := _r.SetRandom(); err != nil { + return nil, err + } + if _, err := _s.SetRandom(); err != nil { + return nil, err + } + _kr.Mul(&_r, &_s).Neg(&_kr) + + _r.BigInt(&r) + _s.BigInt(&s) + + // computes r[δ], s[δ], kr[δ] + deltas := curve.BatchScalarMultiplicationG1(&pk.G1.Delta, []fr.Element{_r, _s, _kr}) + + var bs1, ar curve.G1Jac + + computeBS1 := func() { + <-chWireValuesB + + bs1, _, _ = MsmOnDevice(wireValuesBDevice.p, pk.G1Device.B, wireValuesBDevice.size, true) + + bs1.AddMixed(&pk.G1.Beta) + bs1.AddMixed(&deltas[1]) + } + + computeAR1 := func() { + <-chWireValuesA + + ar, _, _ = MsmOnDevice(wireValuesADevice.p, pk.G1Device.A, wireValuesADevice.size, true) + ar.AddMixed(&pk.G1.Alpha) + ar.AddMixed(&deltas[0]) + proof.Ar.FromJacobian(&ar) + } + + computeKRS := func() { + // we could NOT split the Krs multiExp in 2, and just append pk.G1.K and pk.G1.Z + // however, having similar lengths for our tasks helps with parallelism + + var krs, krs2, p1 curve.G1Jac + sizeH := int(pk.Domain.Cardinality - 1) // comes from the fact the deg(H)=(n-1)+(n-1)-n=n-2 + + krs, _, _ = MsmOnDevice(h, pk.G1Device.Z, sizeH, true) + + // filter the wire values if needed + // TODO Perf @Tabaie worst memory allocation offender + toRemove := commitmentInfo.GetPrivateCommitted() + toRemove = append(toRemove, commitmentInfo.CommitmentIndexes()) + _wireValues := filterHeap(wireValues[r1cs.GetNbPublicVariables():], r1cs.GetNbPublicVariables(), internal.ConcatAll(toRemove...)) + + scalarBytes := len(_wireValues)*fr.Bytes + scalars_d, _ := goicicle.CudaMalloc(scalarBytes) + goicicle.CudaMemCpyHtoD[fr.Element](scalars_d, _wireValues, scalarBytes) + MontConvOnDevice(scalars_d, len(_wireValues), false) + + krs2, _, _ = MsmOnDevice(scalars_d, pk.G1Device.K, len(_wireValues), true) + + krs.AddMixed(&deltas[2]) + + krs.AddAssign(&krs2) + + p1.ScalarMultiplication(&ar, &s) + krs.AddAssign(&p1) + + p1.ScalarMultiplication(&bs1, &r) + krs.AddAssign(&p1) + + proof.Krs.FromJacobian(&krs) + } + + computeBS2 := func() error { + // Bs2 (1 multi exp G2 - size = len(wires)) + var Bs, deltaS curve.G2Jac + + <-chWireValuesB + + Bs, _, _ = MsmG2OnDevice(wireValuesBDevice.p, pk.G2Device.B, wireValuesBDevice.size, true) + + deltaS.FromAffine(&pk.G2.Delta) + deltaS.ScalarMultiplication(&deltaS, &s) + Bs.AddAssign(&deltaS) + Bs.AddMixed(&pk.G2.Beta) + + proof.Bs.FromJacobian(&Bs) + return nil + } + + // wait for FFT to end, as it uses all our CPUs + <-chHDone + + // schedule our proof part computations + computeKRS() + computeAR1() + computeBS1() + if err := computeBS2(); err != nil { + return nil, err + } + + log.Debug().Dur("took", time.Since(start)).Msg("prover done") + + return proof, nil +} + +// if len(toRemove) == 0, returns slice +// else, returns a new slice without the indexes in toRemove. The first value in the slice is taken as indexes as sliceFirstIndex +// this assumes len(slice) > len(toRemove) +// filterHeap modifies toRemove +func filterHeap(slice []fr.Element, sliceFirstIndex int, toRemove []int) (r []fr.Element) { + + if len(toRemove) == 0 { + return slice + } + + heap := utils.IntHeap(toRemove) + heap.Heapify() + + r = make([]fr.Element, 0, len(slice)) + + // note: we can optimize that for the likely case where len(slice) >>> len(toRemove) + for i := 0; i < len(slice); i++ { + if len(heap) > 0 && i+sliceFirstIndex == heap[0] { + for len(heap) > 0 && i+sliceFirstIndex == heap[0] { + heap.Pop() + } + continue + } + r = append(r, slice[i]) + } + + return +} + +func computeH(a, b, c []fr.Element, pk *ProvingKey) unsafe.Pointer { + // H part of Krs + // Compute H (hz=ab-c, where z=-2 on ker X^n+1 (z(x)=x^n-1)) + // 1 - _a = ifft(a), _b = ifft(b), _c = ifft(c) + // 2 - ca = fft_coset(_a), ba = fft_coset(_b), cc = fft_coset(_c) + // 3 - h = ifft_coset(ca o cb - cc) + + n := len(a) + + // add padding to ensure input length is domain cardinality + padding := make([]fr.Element, int(pk.Domain.Cardinality)-n) + a = append(a, padding...) + b = append(b, padding...) + c = append(c, padding...) + n = len(a) + + sizeBytes := n * fr.Bytes + + /*********** Copy a,b,c to Device Start ************/ + copyADone := make(chan unsafe.Pointer, 1) + copyBDone := make(chan unsafe.Pointer, 1) + copyCDone := make(chan unsafe.Pointer, 1) + + go CopyToDevice(a, sizeBytes, copyADone) + go CopyToDevice(b, sizeBytes, copyBDone) + go CopyToDevice(c, sizeBytes, copyCDone) + + a_device := <- copyADone + b_device := <- copyBDone + c_device := <- copyCDone + /*********** Copy a,b,c to Device End ************/ + + computeInttNttDone := make(chan error, 1) + computeInttNttOnDevice := func (devicePointer unsafe.Pointer) { + a_intt_d := INttOnDevice(devicePointer, pk.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) + NttOnDevice(devicePointer, a_intt_d, pk.DomainDevice.Twiddles, pk.DomainDevice.CosetTable, n, n, sizeBytes, true) + computeInttNttDone <- nil + } + + go computeInttNttOnDevice(a_device) + go computeInttNttOnDevice(b_device) + go computeInttNttOnDevice(c_device) + _, _, _ = <- computeInttNttDone, <- computeInttNttDone, <- computeInttNttDone + + PolyOps(a_device, b_device, c_device, pk.DenDevice, n) + + h := INttOnDevice(a_device, pk.DomainDevice.TwiddlesInv, pk.DomainDevice.CosetTableInv, n, sizeBytes, true) + icicle.ReverseScalars(h, n) + + return h +} diff --git a/backend/groth16/bn254/setup.go b/backend/groth16/bn254/setup.go index 372c723da0..3b1a0ee1a9 100644 --- a/backend/groth16/bn254/setup.go +++ b/backend/groth16/bn254/setup.go @@ -1,3 +1,5 @@ +//go:build !gpu + // Copyright 2020 ConsenSys Software Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/backend/groth16/bn254/setup_gpu.go b/backend/groth16/bn254/setup_gpu.go new file mode 100644 index 0000000000..d11b64a00f --- /dev/null +++ b/backend/groth16/bn254/setup_gpu.go @@ -0,0 +1,796 @@ +//go:build gpu + +// Copyright 2020 ConsenSys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by gnark DO NOT EDIT + +package groth16 + +import ( + "errors" + "github.com/consensys/gnark-crypto/ecc" + curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/fp" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" + "github.com/consensys/gnark/backend/groth16/internal" + "github.com/consensys/gnark/constraint" + "github.com/consensys/gnark/constraint/bn254" + "math/big" + "math" + "math/bits" + "unsafe" + "fmt" + icicle "github.com/ingonyama-zk/icicle/goicicle/curves/bn254" + goicicle "github.com/ingonyama-zk/icicle/goicicle" +) + +// ProvingKey is used by a Groth16 prover to encode a proof of a statement +// Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf +type ProvingKey struct { + // domain + Domain fft.Domain + + // [α]₁, [β]₁, [δ]₁ + // [A(t)]₁, [B(t)]₁, [Kpk(t)]₁, [Z(t)]₁ + G1 struct { + Alpha, Beta, Delta curve.G1Affine + A, B, Z []curve.G1Affine + K []curve.G1Affine // the indexes correspond to the private wires + } + + G1Device struct { + A, B, K, Z unsafe.Pointer + } + + DomainDevice struct { + Twiddles, TwiddlesInv unsafe.Pointer + CosetTable, CosetTableInv unsafe.Pointer + } + + // [β]₂, [δ]₂, [B(t)]₂ + G2 struct { + Beta, Delta curve.G2Affine + B []curve.G2Affine + } + + G2Device struct { + B unsafe.Pointer + } + + DenDevice unsafe.Pointer + + // if InfinityA[i] == true, the point G1.A[i] == infinity + InfinityA, InfinityB []bool + NbInfinityA, NbInfinityB uint64 + + CommitmentKeys []pedersen.ProvingKey +} + +// VerifyingKey is used by a Groth16 verifier to verify the validity of a proof and a statement +// Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf +type VerifyingKey struct { + // [α]₁, [Kvk]₁ + G1 struct { + Alpha curve.G1Affine + Beta, Delta curve.G1Affine // unused, here for compatibility purposes + K []curve.G1Affine // The indexes correspond to the public wires + } + + // [β]₂, [δ]₂, [γ]₂, + // -[δ]₂, -[γ]₂: see proof.Verify() for more details + G2 struct { + Beta, Delta, Gamma curve.G2Affine + deltaNeg, gammaNeg curve.G2Affine // not serialized + } + + // e(α, β) + e curve.GT // not serialized + + CommitmentKey pedersen.VerifyingKey + PublicAndCommitmentCommitted [][]int // indexes of public/commitment committed variables +} + +// Setup constructs the SRS +func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { + /* + Setup + ----- + To build the verifying keys: + - compile the r1cs system -> the number of gates is len(GateOrdering)+len(PureStructuralConstraints)+len(InpureStructuralConstraints) + - loop through the ordered computational constraints (=gate in r1cs system structure), eValuate A(X), B(X), C(X) with simple formula (the gate number is the current iterator) + - loop through the inpure structural constraints, eValuate A(X), B(X), C(X) with simple formula, the gate number is len(gateOrdering)+ current iterator + - loop through the pure structural constraints, eValuate A(X), B(X), C(X) with simple formula, the gate number is len(gateOrdering)+len(InpureStructuralConstraints)+current iterator + */ + + // get R1CS nb constraints, wires and public/private inputs + nbWires := r1cs.NbInternalVariables + r1cs.GetNbPublicVariables() + r1cs.GetNbSecretVariables() + + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + commitmentWires := commitmentInfo.CommitmentIndexes() + privateCommitted := commitmentInfo.GetPrivateCommitted() + nbPrivateCommittedWires := internal.NbElements(privateCommitted) + + // a commitment is itself defined by a hint so the prover considers it private + // but the verifier will need to inject the value itself so on the groth16 + // level it must be considered public + nbPublicWires := r1cs.GetNbPublicVariables() + len(commitmentInfo) + nbPrivateWires := r1cs.GetNbSecretVariables() + r1cs.NbInternalVariables - nbPrivateCommittedWires - len(commitmentInfo) + + // Setting group for fft + domain := fft.NewDomain(uint64(r1cs.GetNbConstraints())) + + // samples toxic waste + toxicWaste, err := sampleToxicWaste() + if err != nil { + return err + } + + // Setup coeffs to compute pk.G1.A, pk.G1.B, pk.G1.K + A, B, C := setupABC(r1cs, domain, toxicWaste) + + // To fill in the Proving and Verifying keys, we need to perform a lot of ecc scalar multiplication (with generator) + // and convert the resulting points to affine + // this is done using the curve.BatchScalarMultiplicationGX API, which takes as input the base point + // (in our case the generator) and the list of scalars, and outputs a list of points (len(points) == len(scalars)) + // to use this batch call, we need to order our scalars in the same slice + // we have 1 batch call for G1 and 1 batch call for G1 + // scalars are fr.Element in non montgomery form + _, _, g1, g2 := curve.Generators() + + // --------------------------------------------------------------------------------------------- + // G1 scalars + + // the G1 scalars are ordered (arbitrary) as follows: + // + // [[α], [β], [δ], [A(i)], [B(i)], [pk.K(i)], [Z(i)], [vk.K(i)]] + // len(A) == len(B) == nbWires + // len(pk.K) == nbPrivateWires + // len(vk.K) == nbPublicWires + // len(Z) == domain.Cardinality + + // compute scalars for pkK, vkK and ckK + pkK := make([]fr.Element, nbPrivateWires) + vkK := make([]fr.Element, nbPublicWires) + ckK := make([][]fr.Element, len(commitmentInfo)) + for i := range commitmentInfo { + ckK[i] = make([]fr.Element, len(privateCommitted[i])) + } + + var t0, t1 fr.Element + + computeK := func(i int, coeff *fr.Element) { // TODO: Inline again + t1.Mul(&A[i], &toxicWaste.beta) + t0.Mul(&B[i], &toxicWaste.alpha) + t1.Add(&t1, &t0). + Add(&t1, &C[i]). + Mul(&t1, coeff) + } + vI := 0 // number of public wires seen so far + cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + nbCommitmentsSeen := 0 + + for i := range A { + commitment := -1 // index of the commitment that commits to this variable as a private or commitment value + var isCommitment, isPublic bool + if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { + isCommitment = true + nbCommitmentsSeen++ + } + + for j := range commitmentInfo { // does commitment j commit to i? + if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { + commitment = j + break // frontend guarantees that no private variable is committed to more than once + } + } + } + + if isPublic || commitment != -1 || isCommitment { + computeK(i, &toxicWaste.gammaInv) + + if isPublic || isCommitment { + vkK[vI] = t1 + vI++ + } else { // committed and private + ckK[commitment][cI[commitment]] = t1 + cI[commitment]++ + nbPrivateCommittedSeen++ + } + } else { + computeK(i, &toxicWaste.deltaInv) + pkK[i-vI-nbPrivateCommittedSeen] = t1 // vI = nbPublicSeen + nbCommitmentsSeen + } + } + + // Z part of the proving key (scalars) + Z := make([]fr.Element, domain.Cardinality) + one := fr.One() + var zdt fr.Element + + zdt.Exp(toxicWaste.t, new(big.Int).SetUint64(domain.Cardinality)). + Sub(&zdt, &one). + Mul(&zdt, &toxicWaste.deltaInv) // sets Zdt to Zdt/delta + + for i := 0; i < int(domain.Cardinality); i++ { + Z[i] = zdt + zdt.Mul(&zdt, &toxicWaste.t) + } + + // mark points at infinity and filter them + pk.InfinityA = make([]bool, len(A)) + pk.InfinityB = make([]bool, len(B)) + + n := 0 + for i, e := range A { + if e.IsZero() { + pk.InfinityA[i] = true + continue + } + A[n] = A[i] + n++ + } + A = A[:n] + pk.NbInfinityA = uint64(nbWires - n) + n = 0 + for i, e := range B { + if e.IsZero() { + pk.InfinityB[i] = true + continue + } + B[n] = B[i] + n++ + } + B = B[:n] + pk.NbInfinityB = uint64(nbWires - n) + + // compute our batch scalar multiplication with g1 elements + g1Scalars := make([]fr.Element, 0, (nbWires*3)+int(domain.Cardinality)+3) + g1Scalars = append(g1Scalars, toxicWaste.alpha, toxicWaste.beta, toxicWaste.delta) + g1Scalars = append(g1Scalars, A...) + g1Scalars = append(g1Scalars, B...) + g1Scalars = append(g1Scalars, Z...) + g1Scalars = append(g1Scalars, vkK...) + g1Scalars = append(g1Scalars, pkK...) + for i := range ckK { + g1Scalars = append(g1Scalars, ckK[i]...) + } + + g1PointsAff := curve.BatchScalarMultiplicationG1(&g1, g1Scalars) + + // sets pk: [α]₁, [β]₁, [δ]₁ + pk.G1.Alpha = g1PointsAff[0] + pk.G1.Beta = g1PointsAff[1] + pk.G1.Delta = g1PointsAff[2] + + offset := 3 + pk.G1.A = g1PointsAff[offset : offset+len(A)] + offset += len(A) + + pk.G1.B = g1PointsAff[offset : offset+len(B)] + offset += len(B) + + bitReverse(g1PointsAff[offset : offset+int(domain.Cardinality)]) + sizeZ := int(domain.Cardinality) - 1 // deg(H)=deg(A*B-C/X^n-1)=(n-1)+(n-1)-n=n-2 + pk.G1.Z = g1PointsAff[offset : offset+sizeZ] + + offset += int(domain.Cardinality) + + vk.G1.K = g1PointsAff[offset : offset+nbPublicWires] + offset += nbPublicWires + + pk.G1.K = g1PointsAff[offset : offset+nbPrivateWires] + offset += nbPrivateWires + + // --------------------------------------------------------------------------------------------- + // Commitment setup + + commitmentBases := make([][]curve.G1Affine, len(commitmentInfo)) + for i := range commitmentBases { + size := len(ckK[i]) + commitmentBases[i] = g1PointsAff[offset : offset+size] + offset += size + } + if offset != len(g1PointsAff) { + return errors.New("didn't consume all G1 points") // TODO @Tabaie Remove this + } + + pk.CommitmentKeys, vk.CommitmentKey, err = pedersen.Setup(commitmentBases...) + if err != nil { + return err + } + + vk.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentWires, r1cs.GetNbPublicVariables()) + + // --------------------------------------------------------------------------------------------- + // G2 scalars + + // the G2 scalars are ordered as follow: + // + // [[B(i)], [β], [δ], [γ]] + // len(B) == nbWires + + // compute our batch scalar multiplication with g2 elements + g2Scalars := append(B, toxicWaste.beta, toxicWaste.delta, toxicWaste.gamma) + + g2PointsAff := curve.BatchScalarMultiplicationG2(&g2, g2Scalars) + + pk.G2.B = g2PointsAff[:len(B)] + + // sets pk: [β]₂, [δ]₂ + pk.G2.Beta = g2PointsAff[len(B)+0] + pk.G2.Delta = g2PointsAff[len(B)+1] + + // sets vk: [δ]₂, [γ]₂ + vk.G2.Delta = g2PointsAff[len(B)+1] + vk.G2.Gamma = g2PointsAff[len(B)+2] + + // --------------------------------------------------------------------------------------------- + // Pairing: vk.e + vk.G1.Alpha = pk.G1.Alpha + vk.G2.Beta = pk.G2.Beta + + // unused, here for compatibility purposes + vk.G1.Beta = pk.G1.Beta + vk.G1.Delta = pk.G1.Delta + + if err := vk.Precompute(); err != nil { + return err + } + + // set domain + pk.Domain = *domain + + pk.setupDevicePointers() + + return nil +} + +func (pk *ProvingKey) setupDevicePointers() { + n := int(pk.Domain.Cardinality) + sizeBytes := n*fr.Bytes + + /************************* Start Domain Device Setup ***************************/ + + /************************* CosetTableInv ***************************/ + cosetPowersInv_d, _ := goicicle.CudaMalloc(sizeBytes) + goicicle.CudaMemCpyHtoD[fr.Element](cosetPowersInv_d, pk.Domain.CosetTableInv, sizeBytes) + MontConvOnDevice(cosetPowersInv_d, len(pk.Domain.CosetTable), false) + + pk.DomainDevice.CosetTableInv = cosetPowersInv_d + + /************************* CosetTable ***************************/ + cosetPowers_d, _ := goicicle.CudaMalloc(sizeBytes) + goicicle.CudaMemCpyHtoD[fr.Element](cosetPowers_d, pk.Domain.CosetTable, sizeBytes) + MontConvOnDevice(cosetPowers_d, len(pk.Domain.CosetTable), false) + + pk.DomainDevice.CosetTable = cosetPowers_d + + /************************* Twiddles and Twiddles Inv ***************************/ + om_selector := int(math.Log(float64(n)) / math.Log(2)) + twiddlesInv_d_gen, twddles_err := icicle.GenerateTwiddles(n, om_selector, true) + + if twddles_err != nil { + fmt.Print(twiddlesInv_d_gen) + } + + twiddles_d_gen, twddles_err := icicle.GenerateTwiddles(n, om_selector, false) + if twddles_err != nil { + fmt.Print(twiddles_d_gen) + } + + pk.DomainDevice.Twiddles = twiddles_d_gen + pk.DomainDevice.TwiddlesInv = twiddlesInv_d_gen + + /************************* Den ***************************/ + var denI, oneI fr.Element + oneI.SetOne() + denI.Exp(pk.Domain.FrMultiplicativeGen, big.NewInt(int64(pk.Domain.Cardinality))) + denI.Sub(&denI, &oneI).Inverse(&denI) + + den_d, _ := goicicle.CudaMalloc(sizeBytes) + log2Size := int(math.Floor(math.Log2(float64(n)))) + denIcicle := *icicle.NewFieldFromFrGnark[icicle.ScalarField](denI) + denIcicleArr := []icicle.ScalarField{denIcicle} + for i := 0; i < log2Size; i++ { + denIcicleArr = append(denIcicleArr, denIcicleArr...) + } + for i := 0; i < (n - int(math.Pow(2, float64(log2Size)))); i++ { + denIcicleArr = append(denIcicleArr, denIcicle) + } + + goicicle.CudaMemCpyHtoD[icicle.ScalarField](den_d, denIcicleArr, sizeBytes) + + pk.DenDevice = den_d + + /************************* End Domain Device Setup ***************************/ + + /************************* Start G1 Device Setup ***************************/ + /************************* A ***************************/ + pointsBytesA := len(pk.G1.A) * fp.Bytes * 2 + a_d, _ := goicicle.CudaMalloc(pointsBytesA) + iciclePointsA := icicle.BatchConvertFromG1Affine(pk.G1.A) + goicicle.CudaMemCpyHtoD[icicle.PointAffineNoInfinityBN254](a_d, iciclePointsA, pointsBytesA) + + pk.G1Device.A = a_d + + /************************* B ***************************/ + pointsBytesB := len(pk.G1.B) * fp.Bytes * 2 + b_d, _ := goicicle.CudaMalloc(pointsBytesB) + iciclePointsB := icicle.BatchConvertFromG1Affine(pk.G1.B) + goicicle.CudaMemCpyHtoD[icicle.PointAffineNoInfinityBN254](b_d, iciclePointsB, pointsBytesB) + + pk.G1Device.B = b_d + + /************************* K ***************************/ + pointsBytesK := len(pk.G1.K) * fp.Bytes * 2 + k_d, _ := goicicle.CudaMalloc(pointsBytesK) + iciclePointsK := icicle.BatchConvertFromG1Affine(pk.G1.K) + goicicle.CudaMemCpyHtoD[icicle.PointAffineNoInfinityBN254](k_d, iciclePointsK, pointsBytesK) + + pk.G1Device.K = k_d + + /************************* Z ***************************/ + pointsBytesZ := len(pk.G1.Z) * fp.Bytes * 2 + z_d, _ := goicicle.CudaMalloc(pointsBytesZ) + iciclePointsZ := icicle.BatchConvertFromG1Affine(pk.G1.Z) + goicicle.CudaMemCpyHtoD[icicle.PointAffineNoInfinityBN254](z_d, iciclePointsZ, pointsBytesZ) + + pk.G1Device.Z = z_d + /************************* End G1 Device Setup ***************************/ + + /************************* Start G2 Device Setup ***************************/ + pointsBytesB2 := len(pk.G2.B) * fp.Bytes * 4 + b2_d, _ := goicicle.CudaMalloc(pointsBytesB2) + iciclePointsB2 := icicle.BatchConvertFromG2Affine(pk.G2.B) + goicicle.CudaMemCpyHtoD[icicle.G2PointAffine](b2_d, iciclePointsB2, pointsBytesB2) + pk.G2Device.B = b2_d + /************************* End G2 Device Setup ***************************/ +} + +// Precompute sets e, -[δ]₂, -[γ]₂ +// This is meant to be called internally during setup or deserialization. +func (vk *VerifyingKey) Precompute() error { + var err error + vk.e, err = curve.Pair([]curve.G1Affine{vk.G1.Alpha}, []curve.G2Affine{vk.G2.Beta}) + if err != nil { + return err + } + vk.G2.deltaNeg.Neg(&vk.G2.Delta) + vk.G2.gammaNeg.Neg(&vk.G2.Gamma) + return nil +} + +func setupABC(r1cs *cs.R1CS, domain *fft.Domain, toxicWaste toxicWaste) (A []fr.Element, B []fr.Element, C []fr.Element) { + + nbWires := r1cs.NbInternalVariables + r1cs.GetNbPublicVariables() + r1cs.GetNbSecretVariables() + + A = make([]fr.Element, nbWires) + B = make([]fr.Element, nbWires) + C = make([]fr.Element, nbWires) + + one := fr.One() + + // first we compute [t-w^i] and its inverse + var w fr.Element + w.Set(&domain.Generator) + wi := fr.One() + t := make([]fr.Element, r1cs.GetNbConstraints()+1) + for i := 0; i < len(t); i++ { + t[i].Sub(&toxicWaste.t, &wi) + wi.Mul(&wi, &w) // TODO this is already pre computed in fft.Domain + } + tInv := fr.BatchInvert(t) + + // evaluation of the i-th lagrange polynomial at t + var L fr.Element + + // L = 1/n*(t^n-1)/(t-1), Li+1 = w*Li*(t-w^i)/(t-w^(i+1)) + + // Setting L0 + L.Exp(toxicWaste.t, new(big.Int).SetUint64(uint64(domain.Cardinality))). + Sub(&L, &one) + L.Mul(&L, &tInv[0]). + Mul(&L, &domain.CardinalityInv) + + accumulate := func(res *fr.Element, t constraint.Term, value *fr.Element) { + cID := t.CoeffID() + switch cID { + case constraint.CoeffIdZero: + return + case constraint.CoeffIdOne: + res.Add(res, value) + case constraint.CoeffIdMinusOne: + res.Sub(res, value) + case constraint.CoeffIdTwo: + var buffer fr.Element + buffer.Double(value) + res.Add(res, &buffer) + default: + var buffer fr.Element + buffer.Mul(&r1cs.Coefficients[cID], value) + res.Add(res, &buffer) + } + } + + // each constraint is in the form + // L * R == O + // L, R and O being linear expressions + // for each term appearing in the linear expression, + // we compute term.Coefficient * L, and cumulate it in + // A, B or C at the index of the variable + + j := 0 + it := r1cs.GetR1CIterator() + for c := it.Next(); c != nil; c = it.Next() { + for _, t := range c.L { + accumulate(&A[t.WireID()], t, &L) + } + for _, t := range c.R { + accumulate(&B[t.WireID()], t, &L) + } + for _, t := range c.O { + accumulate(&C[t.WireID()], t, &L) + } + + // Li+1 = w*Li*(t-w^i)/(t-w^(i+1)) + L.Mul(&L, &w) + L.Mul(&L, &t[j]) + L.Mul(&L, &tInv[j+1]) + + j++ + } + + return + +} + +// toxicWaste toxic waste +type toxicWaste struct { + + // Montgomery form of params + t, alpha, beta, gamma, delta fr.Element + gammaInv, deltaInv fr.Element +} + +func sampleToxicWaste() (toxicWaste, error) { + + res := toxicWaste{} + + for res.t.IsZero() { + if _, err := res.t.SetRandom(); err != nil { + return res, err + } + } + for res.alpha.IsZero() { + if _, err := res.alpha.SetRandom(); err != nil { + return res, err + } + } + for res.beta.IsZero() { + if _, err := res.beta.SetRandom(); err != nil { + return res, err + } + } + for res.gamma.IsZero() { + if _, err := res.gamma.SetRandom(); err != nil { + return res, err + } + } + for res.delta.IsZero() { + if _, err := res.delta.SetRandom(); err != nil { + return res, err + } + } + + res.gammaInv.Inverse(&res.gamma) + res.deltaInv.Inverse(&res.delta) + + return res, nil +} + +// DummySetup fills a random ProvingKey +// used for test or benchmarking purposes +func DummySetup(r1cs *cs.R1CS, pk *ProvingKey) error { + // get R1CS nb constraints, wires and public/private inputs + nbWires := r1cs.NbInternalVariables + r1cs.GetNbPublicVariables() + r1cs.GetNbSecretVariables() + nbConstraints := r1cs.GetNbConstraints() + + // Setting group for fft + domain := fft.NewDomain(uint64(nbConstraints)) + + // count number of infinity points we would have had we a normal setup + // in pk.G1.A, pk.G1.B, and pk.G2.B + nbZeroesA, nbZeroesB := dummyInfinityCount(r1cs) + + // initialize proving key + pk.G1.A = make([]curve.G1Affine, nbWires-nbZeroesA) + pk.G1.B = make([]curve.G1Affine, nbWires-nbZeroesB) + pk.G1.K = make([]curve.G1Affine, nbWires-r1cs.GetNbPublicVariables()) + pk.G1.Z = make([]curve.G1Affine, domain.Cardinality) + pk.G2.B = make([]curve.G2Affine, nbWires-nbZeroesB) + + // set infinity markers + pk.InfinityA = make([]bool, nbWires) + pk.InfinityB = make([]bool, nbWires) + pk.NbInfinityA = uint64(nbZeroesA) + pk.NbInfinityB = uint64(nbZeroesB) + for i := 0; i < nbZeroesA; i++ { + pk.InfinityA[i] = true + } + for i := 0; i < nbZeroesB; i++ { + pk.InfinityB[i] = true + } + + // samples toxic waste + toxicWaste, err := sampleToxicWaste() + if err != nil { + return err + } + + var r1Jac curve.G1Jac + var r1Aff curve.G1Affine + var b big.Int + g1, g2, _, _ := curve.Generators() + r1Jac.ScalarMultiplication(&g1, toxicWaste.alpha.BigInt(&b)) + r1Aff.FromJacobian(&r1Jac) + var r2Jac curve.G2Jac + var r2Aff curve.G2Affine + r2Jac.ScalarMultiplication(&g2, &b) + r2Aff.FromJacobian(&r2Jac) + for i := 0; i < len(pk.G1.A); i++ { + pk.G1.A[i] = r1Aff + } + for i := 0; i < len(pk.G1.B); i++ { + pk.G1.B[i] = r1Aff + } + for i := 0; i < len(pk.G2.B); i++ { + pk.G2.B[i] = r2Aff + } + for i := 0; i < len(pk.G1.Z); i++ { + pk.G1.Z[i] = r1Aff + } + for i := 0; i < len(pk.G1.K); i++ { + pk.G1.K[i] = r1Aff + } + pk.G1.Alpha = r1Aff + pk.G1.Beta = r1Aff + pk.G1.Delta = r1Aff + pk.G2.Beta = r2Aff + pk.G2.Delta = r2Aff + + pk.Domain = *domain + + return nil +} + +// dummyInfinityCount helps us simulate the number of infinity points we have with the given R1CS +// in A and B as it directly impacts prover performance +func dummyInfinityCount(r1cs *cs.R1CS) (nbZeroesA, nbZeroesB int) { + + nbWires := r1cs.NbInternalVariables + r1cs.GetNbPublicVariables() + r1cs.GetNbSecretVariables() + + A := make([]bool, nbWires) + B := make([]bool, nbWires) + + it := r1cs.GetR1CIterator() + for c := it.Next(); c != nil; c = it.Next() { + for _, t := range c.L { + A[t.WireID()] = true + } + for _, t := range c.R { + B[t.WireID()] = true + } + } + + for i := 0; i < nbWires; i++ { + if !A[i] { + nbZeroesA++ + } + if !B[i] { + nbZeroesB++ + } + } + return + +} + +// IsDifferent returns true if provided vk is different than self +// this is used by groth16.Assert to ensure random sampling +func (vk *VerifyingKey) IsDifferent(_other interface{}) bool { + vk2 := _other.(*VerifyingKey) + for i := 0; i < len(vk.G1.K); i++ { + if !vk.G1.K[i].IsInfinity() { + if vk.G1.K[i].Equal(&vk2.G1.K[i]) { + return false + } + } + } + + return true +} + +// IsDifferent returns true if provided pk is different than self +// this is used by groth16.Assert to ensure random sampling +func (pk *ProvingKey) IsDifferent(_other interface{}) bool { + pk2 := _other.(*ProvingKey) + + if pk.G1.Alpha.Equal(&pk2.G1.Alpha) || + pk.G1.Beta.Equal(&pk2.G1.Beta) || + pk.G1.Delta.Equal(&pk2.G1.Delta) { + return false + } + + for i := 0; i < len(pk.G1.K); i++ { + if !pk.G1.K[i].IsInfinity() { + if pk.G1.K[i].Equal(&pk2.G1.K[i]) { + return false + } + } + } + + return true +} + +// CurveID returns the curveID +func (pk *ProvingKey) CurveID() ecc.ID { + return curve.ID +} + +// CurveID returns the curveID +func (vk *VerifyingKey) CurveID() ecc.ID { + return curve.ID +} + +// NbPublicWitness returns the number of elements in the expected public witness +func (vk *VerifyingKey) NbPublicWitness() int { + return (len(vk.G1.K) - 1) +} + +// NbG1 returns the number of G1 elements in the VerifyingKey +func (vk *VerifyingKey) NbG1() int { + return 3 + len(vk.G1.K) +} + +// NbG2 returns the number of G2 elements in the VerifyingKey +func (vk *VerifyingKey) NbG2() int { + return 3 +} + +// NbG1 returns the number of G1 elements in the ProvingKey +func (pk *ProvingKey) NbG1() int { + return 3 + len(pk.G1.A) + len(pk.G1.B) + len(pk.G1.Z) + len(pk.G1.K) +} + +// NbG2 returns the number of G2 elements in the ProvingKey +func (pk *ProvingKey) NbG2() int { + return 2 + len(pk.G2.B) +} + +// bitRerverse permutation as in fft.BitReverse , but with []curve.G1Affine +func bitReverse(a []curve.G1Affine) { + n := uint(len(a)) + nn := uint(bits.UintSize - bits.TrailingZeros(n)) + + for i := uint(0); i < n; i++ { + irev := bits.Reverse(i) >> nn + if irev > i { + a[i], a[irev] = a[irev], a[i] + } + } +} diff --git a/go.mod b/go.mod index 1bdd626f74..2f26ee9dfa 100644 --- a/go.mod +++ b/go.mod @@ -10,12 +10,12 @@ require ( github.com/fxamacker/cbor/v2 v2.5.0 github.com/google/go-cmp v0.5.9 github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b + github.com/ingonyama-zk/icicle v0.0.0-20230719184412-d13143506ece github.com/leanovate/gopter v0.2.9 github.com/rs/zerolog v1.30.0 github.com/stretchr/testify v1.8.4 golang.org/x/crypto v0.12.0 golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 - golang.org/x/sys v0.11.0 ) require ( @@ -30,6 +30,7 @@ require ( github.com/mmcloughlin/addchain v0.4.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/x448/float16 v0.8.4 // indirect + golang.org/x/sys v0.11.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect rsc.io/tmplfunc v0.0.3 // indirect ) diff --git a/go.sum b/go.sum index e35a6b6f46..b3244dd4d1 100644 --- a/go.sum +++ b/go.sum @@ -18,6 +18,8 @@ github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b h1:h9U78+dx9a4BKdQkBBos92HalKpaGKHrp+3Uo6yTodo= github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= +github.com/ingonyama-zk/icicle v0.0.0-20230719184412-d13143506ece h1:RHCWrXIoz6iadqKcStdn0k4mDhSgOYLmyPpRM6yMNMo= +github.com/ingonyama-zk/icicle v0.0.0-20230719184412-d13143506ece/go.mod h1:kPvbQqCrBszFYe7cqctnvXVMHzGihiQsIAr/qP3TbNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= From b6efd944d71fdd648ec84e91adc8e3fc96d7fa72 Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Thu, 14 Sep 2023 12:11:32 +0300 Subject: [PATCH 02/58] Remove icicle wrapper and update dependency to use iciclegnark repo for NTT and MSM. Add GPU memory cleanup using cudaFree Add GPU version of marshal --- backend/groth16/bn254/icicle_wrapper_gpu.go | 105 ------ backend/groth16/bn254/marshal.go | 2 + backend/groth16/bn254/marshal_gpu.go | 366 ++++++++++++++++++++ backend/groth16/bn254/prove_gpu.go | 100 ++++-- backend/groth16/bn254/setup_gpu.go | 131 ++++--- go.mod | 9 +- go.sum | 6 +- 7 files changed, 503 insertions(+), 216 deletions(-) delete mode 100644 backend/groth16/bn254/icicle_wrapper_gpu.go create mode 100644 backend/groth16/bn254/marshal_gpu.go diff --git a/backend/groth16/bn254/icicle_wrapper_gpu.go b/backend/groth16/bn254/icicle_wrapper_gpu.go deleted file mode 100644 index 295688b0ab..0000000000 --- a/backend/groth16/bn254/icicle_wrapper_gpu.go +++ /dev/null @@ -1,105 +0,0 @@ -//go:build gpu - -package groth16 - -import ( - "fmt" - "unsafe" - - curve "github.com/consensys/gnark-crypto/ecc/bn254" - "github.com/consensys/gnark-crypto/ecc/bn254/fr" - cudawrapper "github.com/ingonyama-zk/icicle/goicicle" - icicle "github.com/ingonyama-zk/icicle/goicicle/curves/bn254" -) - -type OnDeviceData struct { - p unsafe.Pointer - size int -} - -func INttOnDevice(scalars_d, twiddles_d, cosetPowers_d unsafe.Pointer, size, sizeBytes int, isCoset bool) unsafe.Pointer { - icicle.ReverseScalars(scalars_d, size) - - scalarsInterp := icicle.Interpolate(scalars_d, twiddles_d, cosetPowers_d, size, isCoset) - - return scalarsInterp -} - - -func NttOnDevice(scalars_out, scalars_d, twiddles_d, coset_powers_d unsafe.Pointer, size, twid_size, size_bytes int, isCoset bool) { - res := icicle.Evaluate(scalars_out, scalars_d, twiddles_d, coset_powers_d, size, twid_size, isCoset) - - if res != 0 { - fmt.Print("Issue evaluating") - } - - icicle.ReverseScalars(scalars_out, size) - - return -} - -func MsmOnDevice(scalars_d, points_d unsafe.Pointer, count int, convert bool) (curve.G1Jac, unsafe.Pointer, error) { - out_d, _ := cudawrapper.CudaMalloc(96) - - icicle.Commit(out_d, scalars_d, points_d, count, 10) - - if convert { - outHost := make([]icicle.PointBN254, 1) - cudawrapper.CudaMemCpyDtoH[icicle.PointBN254](outHost, out_d, 96) - return *outHost[0].ToGnarkJac(), nil, nil - } - - return curve.G1Jac{}, out_d, nil -} - -func MsmG2OnDevice(scalars_d, points_d unsafe.Pointer, count int, convert bool) (curve.G2Jac, unsafe.Pointer, error) { - out_d, _ := cudawrapper.CudaMalloc(192) - - icicle.CommitG2(out_d, scalars_d, points_d, count, 10) - - if convert { - outHost := make([]icicle.G2Point, 1) - cudawrapper.CudaMemCpyDtoH[icicle.G2Point](outHost, out_d, 192) - return *outHost[0].ToGnarkJac(), nil, nil - } - - return curve.G2Jac{}, out_d, nil -} - -func PolyOps(a_d, b_d, c_d, den_d unsafe.Pointer, size int) { - ret := icicle.VecScalarMulMod(a_d, b_d, size) - - if ret != 0 { - fmt.Print("Vector mult a*b issue") - } - ret = icicle.VecScalarSub(a_d, c_d, size) - - if ret != 0 { - fmt.Print("Vector sub issue") - } - ret = icicle.VecScalarMulMod(a_d, den_d, size) - - if ret != 0 { - fmt.Print("Vector mult a*den issue") - } - - return -} - -func MontConvOnDevice(scalars_d unsafe.Pointer, size int, is_into bool) { - if is_into { - icicle.ToMontgomery(scalars_d, size) - } else { - icicle.FromMontgomery(scalars_d, size) - } - - return -} - -func CopyToDevice(scalars []fr.Element, bytes int, copyDone chan unsafe.Pointer) { - devicePtr, _ := cudawrapper.CudaMalloc(bytes) - cudawrapper.CudaMemCpyHtoD[fr.Element](devicePtr, scalars, bytes) - MontConvOnDevice(devicePtr, len(scalars), false) - - copyDone <- devicePtr -} diff --git a/backend/groth16/bn254/marshal.go b/backend/groth16/bn254/marshal.go index c6539a7ba2..63932950c7 100644 --- a/backend/groth16/bn254/marshal.go +++ b/backend/groth16/bn254/marshal.go @@ -1,3 +1,5 @@ +//go:build !gpu + // Copyright 2020 ConsenSys Software Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/backend/groth16/bn254/marshal_gpu.go b/backend/groth16/bn254/marshal_gpu.go new file mode 100644 index 0000000000..518f6800af --- /dev/null +++ b/backend/groth16/bn254/marshal_gpu.go @@ -0,0 +1,366 @@ +//go:build gpu +// +build gpu + +// Copyright 2020 ConsenSys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by gnark DO NOT EDIT + +package groth16 + +import ( + curve "github.com/consensys/gnark-crypto/ecc/bn254" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" + "github.com/consensys/gnark/internal/utils" + "io" +) + +// WriteTo writes binary encoding of the Proof elements to writer +// points are stored in compressed form Ar | Krs | Bs +// use WriteRawTo(...) to encode the proof without point compression +func (proof *Proof) WriteTo(w io.Writer) (n int64, err error) { + return proof.writeTo(w, false) +} + +// WriteRawTo writes binary encoding of the Proof elements to writer +// points are stored in uncompressed form Ar | Krs | Bs +// use WriteTo(...) to encode the proof with point compression +func (proof *Proof) WriteRawTo(w io.Writer) (n int64, err error) { + return proof.writeTo(w, true) +} + +func (proof *Proof) writeTo(w io.Writer, raw bool) (int64, error) { + var enc *curve.Encoder + if raw { + enc = curve.NewEncoder(w, curve.RawEncoding()) + } else { + enc = curve.NewEncoder(w) + } + + if err := enc.Encode(&proof.Ar); err != nil { + return enc.BytesWritten(), err + } + if err := enc.Encode(&proof.Bs); err != nil { + return enc.BytesWritten(), err + } + if err := enc.Encode(&proof.Krs); err != nil { + return enc.BytesWritten(), err + } + return enc.BytesWritten(), nil +} + +// ReadFrom attempts to decode a Proof from reader +// Proof must be encoded through WriteTo (compressed) or WriteRawTo (uncompressed) +func (proof *Proof) ReadFrom(r io.Reader) (n int64, err error) { + + dec := curve.NewDecoder(r) + + if err := dec.Decode(&proof.Ar); err != nil { + return dec.BytesRead(), err + } + if err := dec.Decode(&proof.Bs); err != nil { + return dec.BytesRead(), err + } + if err := dec.Decode(&proof.Krs); err != nil { + return dec.BytesRead(), err + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of the key elements to writer +// points are compressed +// use WriteRawTo(...) to encode the key without point compression +func (vk *VerifyingKey) WriteTo(w io.Writer) (n int64, err error) { + if n, err = vk.writeTo(w, false); err != nil { + return n, err + } + var m int64 + m, err = vk.CommitmentKey.WriteTo(w) + return m + n, err +} + +// WriteRawTo writes binary encoding of the key elements to writer +// points are not compressed +// use WriteTo(...) to encode the key with point compression +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (n int64, err error) { + if n, err = vk.writeTo(w, true); err != nil { + return n, err + } + var m int64 + m, err = vk.CommitmentKey.WriteRawTo(w) + return m + n, err +} + +// writeTo serialization format: +// follows bellman format: +// https://github.com/zkcrypto/bellman/blob/fa9be45588227a8c6ec34957de3f68705f07bd92/src/groth16/mod.rs#L143 +// [α]1,[β]1,[β]2,[γ]2,[δ]1,[δ]2,uint32(len(Kvk)),[Kvk]1 +func (vk *VerifyingKey) writeTo(w io.Writer, raw bool) (int64, error) { + var enc *curve.Encoder + if raw { + enc = curve.NewEncoder(w, curve.RawEncoding()) + } else { + enc = curve.NewEncoder(w) + } + + // [α]1,[β]1,[β]2,[γ]2,[δ]1,[δ]2 + if err := enc.Encode(&vk.G1.Alpha); err != nil { + return enc.BytesWritten(), err + } + if err := enc.Encode(&vk.G1.Beta); err != nil { + return enc.BytesWritten(), err + } + if err := enc.Encode(&vk.G2.Beta); err != nil { + return enc.BytesWritten(), err + } + if err := enc.Encode(&vk.G2.Gamma); err != nil { + return enc.BytesWritten(), err + } + if err := enc.Encode(&vk.G1.Delta); err != nil { + return enc.BytesWritten(), err + } + if err := enc.Encode(&vk.G2.Delta); err != nil { + return enc.BytesWritten(), err + } + + // uint32(len(Kvk)),[Kvk]1 + if err := enc.Encode(vk.G1.K); err != nil { + return enc.BytesWritten(), err + } + + if vk.PublicAndCommitmentCommitted == nil { + vk.PublicAndCommitmentCommitted = [][]int{} // only matters in tests + } + if err := enc.Encode(utils.IntSliceSliceToUint64SliceSlice(vk.PublicAndCommitmentCommitted)); err != nil { + return enc.BytesWritten(), err + } + + return enc.BytesWritten(), nil +} + +// ReadFrom attempts to decode a VerifyingKey from reader +// VerifyingKey must be encoded through WriteTo (compressed) or WriteRawTo (uncompressed) +// serialization format: +// https://github.com/zkcrypto/bellman/blob/fa9be45588227a8c6ec34957de3f68705f07bd92/src/groth16/mod.rs#L143 +// [α]1,[β]1,[β]2,[γ]2,[δ]1,[δ]2,uint32(len(Kvk)),[Kvk]1 +func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { + n, err := vk.readFrom(r) + if err != nil { + return n, err + } + var m int64 + m, err = vk.CommitmentKey.ReadFrom(r) + return m + n, err +} + +// UnsafeReadFrom has the same behavior as ReadFrom, except that it will not check that decode points +// are on the curve and in the correct subgroup. +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + n, err := vk.readFrom(r, curve.NoSubgroupChecks()) + if err != nil { + return n, err + } + var m int64 + m, err = vk.CommitmentKey.UnsafeReadFrom(r) + return m + n, err +} + +func (vk *VerifyingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + dec := curve.NewDecoder(r, decOptions...) + + // [α]1,[β]1,[β]2,[γ]2,[δ]1,[δ]2 + if err := dec.Decode(&vk.G1.Alpha); err != nil { + return dec.BytesRead(), err + } + if err := dec.Decode(&vk.G1.Beta); err != nil { + return dec.BytesRead(), err + } + if err := dec.Decode(&vk.G2.Beta); err != nil { + return dec.BytesRead(), err + } + if err := dec.Decode(&vk.G2.Gamma); err != nil { + return dec.BytesRead(), err + } + if err := dec.Decode(&vk.G1.Delta); err != nil { + return dec.BytesRead(), err + } + if err := dec.Decode(&vk.G2.Delta); err != nil { + return dec.BytesRead(), err + } + + // uint32(len(Kvk)),[Kvk]1 + if err := dec.Decode(&vk.G1.K); err != nil { + return dec.BytesRead(), err + } + var publicCommitted [][]uint64 + if err := dec.Decode(&publicCommitted); err != nil { + return dec.BytesRead(), err + } + vk.PublicAndCommitmentCommitted = utils.Uint64SliceSliceToIntSliceSlice(publicCommitted) + + // recompute vk.e (e(α, β)) and -[δ]2, -[γ]2 + if err := vk.Precompute(); err != nil { + return dec.BytesRead(), err + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of the key elements to writer +// points are compressed +// use WriteRawTo(...) to encode the key without point compression +func (pk *ProvingKey) WriteTo(w io.Writer) (n int64, err error) { + return pk.writeTo(w, false) +} + +// WriteRawTo writes binary encoding of the key elements to writer +// points are not compressed +// use WriteTo(...) to encode the key with point compression +func (pk *ProvingKey) WriteRawTo(w io.Writer) (n int64, err error) { + return pk.writeTo(w, true) +} + +func (pk *ProvingKey) writeTo(w io.Writer, raw bool) (int64, error) { + n, err := pk.Domain.WriteTo(w) + if err != nil { + return n, err + } + + var enc *curve.Encoder + if raw { + enc = curve.NewEncoder(w, curve.RawEncoding()) + } else { + enc = curve.NewEncoder(w) + } + nbWires := uint64(len(pk.InfinityA)) + + toEncode := []interface{}{ + &pk.G1.Alpha, + &pk.G1.Beta, + &pk.G1.Delta, + pk.G1.A, + pk.G1.B, + pk.G1.Z, + pk.G1.K, + &pk.G2.Beta, + &pk.G2.Delta, + pk.G2.B, + nbWires, + pk.NbInfinityA, + pk.NbInfinityB, + pk.InfinityA, + pk.InfinityB, + uint32(len(pk.CommitmentKeys)), + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return n + enc.BytesWritten(), err + } + } + + for i := range pk.CommitmentKeys { + var ( + n2 int64 + err error + ) + if raw { + n2, err = pk.CommitmentKeys[i].WriteRawTo(w) + } else { + n2, err = pk.CommitmentKeys[i].WriteTo(w) + } + + n += n2 + if err != nil { + return n, err + } + } + + return n + enc.BytesWritten(), nil + +} + +// ReadFrom attempts to decode a ProvingKey from reader +// ProvingKey must be encoded through WriteTo (compressed) or WriteRawTo (uncompressed) +// note that we don't check that the points are on the curve or in the correct subgroup at this point +func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { + return pk.readFrom(r) +} + +// UnsafeReadFrom behaves like ReadFrom excepts it doesn't check if the decoded points are on the curve +// or in the correct subgroup +func (pk *ProvingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return pk.readFrom(r, curve.NoSubgroupChecks()) +} + +func (pk *ProvingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + n, err := pk.Domain.ReadFrom(r) + if err != nil { + return n, err + } + + dec := curve.NewDecoder(r, decOptions...) + + var nbWires uint64 + var nbCommitments uint32 + + toDecode := []interface{}{ + &pk.G1.Alpha, + &pk.G1.Beta, + &pk.G1.Delta, + &pk.G1.A, + &pk.G1.B, + &pk.G1.Z, + &pk.G1.K, + &pk.G2.Beta, + &pk.G2.Delta, + &pk.G2.B, + &nbWires, + &pk.NbInfinityA, + &pk.NbInfinityB, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return n + dec.BytesRead(), err + } + } + pk.InfinityA = make([]bool, nbWires) + pk.InfinityB = make([]bool, nbWires) + + if err := dec.Decode(&pk.InfinityA); err != nil { + return n + dec.BytesRead(), err + } + if err := dec.Decode(&pk.InfinityB); err != nil { + return n + dec.BytesRead(), err + } + if err := dec.Decode(&nbCommitments); err != nil { + return n + dec.BytesRead(), err + } + + pk.CommitmentKeys = make([]pedersen.ProvingKey, nbCommitments) + for i := range pk.CommitmentKeys { + n2, err := pk.CommitmentKeys[i].ReadFrom(r) + n += n2 + if err != nil { + return n, err + } + } + + pk.setupDevicePointers() + + return n + dec.BytesRead(), nil +} diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go index db953709e4..71635acbf0 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/prove_gpu.go @@ -1,4 +1,5 @@ //go:build gpu +// +build gpu // Copyright 2020 ConsenSys Software Inc. // @@ -19,6 +20,10 @@ package groth16 import ( + "math/big" + "time" + "unsafe" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" @@ -31,11 +36,9 @@ import ( "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/internal/utils" "github.com/consensys/gnark/logger" - "math/big" - "time" - "unsafe" goicicle "github.com/ingonyama-zk/icicle/goicicle" icicle "github.com/ingonyama-zk/icicle/goicicle/curves/bn254" + iciclegnark "github.com/ingonyama-zk/iciclegnark/curves/bn254" ) // Proof represents a Groth16 proof that was encoded with a ProvingKey and can be verified @@ -129,7 +132,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b // we need to copy and filter the wireValues for each multi exp // as pk.G1.A, pk.G1.B and pk.G2.B may have (a significant) number of point at infinity - var wireValuesADevice, wireValuesBDevice OnDeviceData + var wireValuesADevice, wireValuesBDevice iciclegnark.OnDeviceData chWireValuesA, chWireValuesB := make(chan struct{}, 1), make(chan struct{}, 1) go func() { @@ -142,12 +145,15 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b j++ } wireValuesASize := len(wireValuesA) - scalarBytes := wireValuesASize*fr.Bytes + scalarBytes := wireValuesASize * fr.Bytes wireValuesADevicePtr, _ := goicicle.CudaMalloc(scalarBytes) goicicle.CudaMemCpyHtoD[fr.Element](wireValuesADevicePtr, wireValuesA, scalarBytes) - MontConvOnDevice(wireValuesADevicePtr, wireValuesASize, false) - wireValuesADevice = OnDeviceData{wireValuesADevicePtr, wireValuesASize} - + iciclegnark.MontConvOnDevice(wireValuesADevicePtr, wireValuesASize, false) + wireValuesADevice = iciclegnark.OnDeviceData{ + P: wireValuesADevicePtr, + Size: wireValuesASize, + } + close(chWireValuesA) }() go func() { @@ -160,11 +166,14 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b j++ } wireValuesBSize := len(wireValuesB) - scalarBytes := wireValuesBSize*fr.Bytes + scalarBytes := wireValuesBSize * fr.Bytes wireValuesBDevicePtr, _ := goicicle.CudaMalloc(scalarBytes) goicicle.CudaMemCpyHtoD[fr.Element](wireValuesBDevicePtr, wireValuesB, scalarBytes) - MontConvOnDevice(wireValuesBDevicePtr, wireValuesBSize, false) - wireValuesBDevice = OnDeviceData{wireValuesBDevicePtr, wireValuesBSize} + iciclegnark.MontConvOnDevice(wireValuesBDevicePtr, wireValuesBSize, false) + wireValuesBDevice = iciclegnark.OnDeviceData{ + P: wireValuesBDevicePtr, + Size: wireValuesBSize, + } close(chWireValuesB) }() @@ -191,7 +200,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b computeBS1 := func() { <-chWireValuesB - bs1, _, _ = MsmOnDevice(wireValuesBDevice.p, pk.G1Device.B, wireValuesBDevice.size, true) + bs1, _, _ = iciclegnark.MsmOnDevice(wireValuesBDevice.P, pk.G1Device.B, wireValuesBDevice.Size, true) bs1.AddMixed(&pk.G1.Beta) bs1.AddMixed(&deltas[1]) @@ -200,7 +209,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b computeAR1 := func() { <-chWireValuesA - ar, _, _ = MsmOnDevice(wireValuesADevice.p, pk.G1Device.A, wireValuesADevice.size, true) + ar, _, _ = iciclegnark.MsmOnDevice(wireValuesADevice.P, pk.G1Device.A, wireValuesADevice.Size, true) ar.AddMixed(&pk.G1.Alpha) ar.AddMixed(&deltas[0]) proof.Ar.FromJacobian(&ar) @@ -212,24 +221,26 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b var krs, krs2, p1 curve.G1Jac sizeH := int(pk.Domain.Cardinality - 1) // comes from the fact the deg(H)=(n-1)+(n-1)-n=n-2 - - krs, _, _ = MsmOnDevice(h, pk.G1Device.Z, sizeH, true) + + if len(pk.G1.Z) > 0 { + krs2, _, _ = iciclegnark.MsmOnDevice(h, pk.G1Device.Z, sizeH, true) + } // filter the wire values if needed // TODO Perf @Tabaie worst memory allocation offender toRemove := commitmentInfo.GetPrivateCommitted() toRemove = append(toRemove, commitmentInfo.CommitmentIndexes()) _wireValues := filterHeap(wireValues[r1cs.GetNbPublicVariables():], r1cs.GetNbPublicVariables(), internal.ConcatAll(toRemove...)) - - scalarBytes := len(_wireValues)*fr.Bytes + + scalarBytes := len(_wireValues) * fr.Bytes scalars_d, _ := goicicle.CudaMalloc(scalarBytes) goicicle.CudaMemCpyHtoD[fr.Element](scalars_d, _wireValues, scalarBytes) - MontConvOnDevice(scalars_d, len(_wireValues), false) - - krs2, _, _ = MsmOnDevice(scalars_d, pk.G1Device.K, len(_wireValues), true) + iciclegnark.MontConvOnDevice(scalars_d, len(_wireValues), false) + krs, _, _ = iciclegnark.MsmOnDevice(scalars_d, pk.G1Device.K, len(_wireValues), true) + goicicle.CudaFree(scalars_d) krs.AddMixed(&deltas[2]) - + krs.AddAssign(&krs2) p1.ScalarMultiplication(&ar, &s) @@ -246,8 +257,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b var Bs, deltaS curve.G2Jac <-chWireValuesB - - Bs, _, _ = MsmG2OnDevice(wireValuesBDevice.p, pk.G2Device.B, wireValuesBDevice.size, true) + Bs, _, _ = iciclegnark.MsmG2OnDevice(wireValuesBDevice.P, pk.G2Device.B, wireValuesBDevice.Size, true) deltaS.FromAffine(&pk.G2.Delta) deltaS.ScalarMultiplication(&deltaS, &s) @@ -271,6 +281,12 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b log.Debug().Dur("took", time.Since(start)).Msg("prover done") + go func() { + goicicle.CudaFree(wireValuesADevice.P) + goicicle.CudaFree(wireValuesBDevice.P) + goicicle.CudaFree(h) + }() + return proof, nil } @@ -322,35 +338,45 @@ func computeH(a, b, c []fr.Element, pk *ProvingKey) unsafe.Pointer { sizeBytes := n * fr.Bytes /*********** Copy a,b,c to Device Start ************/ + // Individual channels are necessary to know which device pointers + // point to which vector copyADone := make(chan unsafe.Pointer, 1) copyBDone := make(chan unsafe.Pointer, 1) copyCDone := make(chan unsafe.Pointer, 1) - go CopyToDevice(a, sizeBytes, copyADone) - go CopyToDevice(b, sizeBytes, copyBDone) - go CopyToDevice(c, sizeBytes, copyCDone) + go iciclegnark.CopyToDevice(a, sizeBytes, copyADone) + go iciclegnark.CopyToDevice(b, sizeBytes, copyBDone) + go iciclegnark.CopyToDevice(c, sizeBytes, copyCDone) - a_device := <- copyADone - b_device := <- copyBDone - c_device := <- copyCDone + a_device := <-copyADone + b_device := <-copyBDone + c_device := <-copyCDone /*********** Copy a,b,c to Device End ************/ - + computeInttNttDone := make(chan error, 1) - computeInttNttOnDevice := func (devicePointer unsafe.Pointer) { - a_intt_d := INttOnDevice(devicePointer, pk.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) - NttOnDevice(devicePointer, a_intt_d, pk.DomainDevice.Twiddles, pk.DomainDevice.CosetTable, n, n, sizeBytes, true) + computeInttNttOnDevice := func(devicePointer unsafe.Pointer) { + a_intt_d := iciclegnark.INttOnDevice(devicePointer, pk.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) + iciclegnark.NttOnDevice(devicePointer, a_intt_d, pk.DomainDevice.Twiddles, pk.DomainDevice.CosetTable, n, n, sizeBytes, true) computeInttNttDone <- nil + goicicle.CudaFree(a_intt_d) } go computeInttNttOnDevice(a_device) go computeInttNttOnDevice(b_device) go computeInttNttOnDevice(c_device) - _, _, _ = <- computeInttNttDone, <- computeInttNttDone, <- computeInttNttDone + _, _, _ = <-computeInttNttDone, <-computeInttNttDone, <-computeInttNttDone + + iciclegnark.PolyOps(a_device, b_device, c_device, pk.DenDevice, n) + + h := iciclegnark.INttOnDevice(a_device, pk.DomainDevice.TwiddlesInv, pk.DomainDevice.CosetTableInv, n, sizeBytes, true) - PolyOps(a_device, b_device, c_device, pk.DenDevice, n) + go func() { + goicicle.CudaFree(a_device) + goicicle.CudaFree(b_device) + goicicle.CudaFree(c_device) + }() - h := INttOnDevice(a_device, pk.DomainDevice.TwiddlesInv, pk.DomainDevice.CosetTableInv, n, sizeBytes, true) icicle.ReverseScalars(h, n) - + return h } diff --git a/backend/groth16/bn254/setup_gpu.go b/backend/groth16/bn254/setup_gpu.go index d11b64a00f..a6a2dad6ee 100644 --- a/backend/groth16/bn254/setup_gpu.go +++ b/backend/groth16/bn254/setup_gpu.go @@ -1,4 +1,5 @@ //go:build gpu +// +build gpu // Copyright 2020 ConsenSys Software Inc. // @@ -20,22 +21,22 @@ package groth16 import ( "errors" + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" - "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/fp" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/constraint/bn254" - "math/big" + icicle "github.com/ingonyama-zk/icicle/goicicle/curves/bn254" + iciclegnark "github.com/ingonyama-zk/iciclegnark/curves/bn254" "math" + "math/big" "math/bits" "unsafe" - "fmt" - icicle "github.com/ingonyama-zk/icicle/goicicle/curves/bn254" - goicicle "github.com/ingonyama-zk/icicle/goicicle" ) // ProvingKey is used by a Groth16 prover to encode a proof of a statement @@ -53,12 +54,12 @@ type ProvingKey struct { } G1Device struct { - A, B, K, Z unsafe.Pointer + A, B, K, Z unsafe.Pointer } DomainDevice struct { - Twiddles, TwiddlesInv unsafe.Pointer - CosetTable, CosetTableInv unsafe.Pointer + Twiddles, TwiddlesInv unsafe.Pointer + CosetTable, CosetTableInv unsafe.Pointer } // [β]₂, [δ]₂, [B(t)]₂ @@ -68,14 +69,15 @@ type ProvingKey struct { } G2Device struct { - B unsafe.Pointer + B unsafe.Pointer } - DenDevice unsafe.Pointer + DenDevice unsafe.Pointer // if InfinityA[i] == true, the point G1.A[i] == infinity InfinityA, InfinityB []bool NbInfinityA, NbInfinityB uint64 + InfinityPointIndicesK []int CommitmentKeys []pedersen.ProvingKey } @@ -363,23 +365,34 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { func (pk *ProvingKey) setupDevicePointers() { n := int(pk.Domain.Cardinality) - sizeBytes := n*fr.Bytes - + sizeBytes := n * fr.Bytes + /************************* Start Domain Device Setup ***************************/ - + copyCosetInvDone := make(chan unsafe.Pointer, 1) + copyCosetDone := make(chan unsafe.Pointer, 1) + copyDenDone := make(chan unsafe.Pointer, 1) /************************* CosetTableInv ***************************/ - cosetPowersInv_d, _ := goicicle.CudaMalloc(sizeBytes) - goicicle.CudaMemCpyHtoD[fr.Element](cosetPowersInv_d, pk.Domain.CosetTableInv, sizeBytes) - MontConvOnDevice(cosetPowersInv_d, len(pk.Domain.CosetTable), false) + go iciclegnark.CopyToDevice(pk.Domain.CosetTableInv, sizeBytes, copyCosetInvDone) - pk.DomainDevice.CosetTableInv = cosetPowersInv_d - /************************* CosetTable ***************************/ - cosetPowers_d, _ := goicicle.CudaMalloc(sizeBytes) - goicicle.CudaMemCpyHtoD[fr.Element](cosetPowers_d, pk.Domain.CosetTable, sizeBytes) - MontConvOnDevice(cosetPowers_d, len(pk.Domain.CosetTable), false) + go iciclegnark.CopyToDevice(pk.Domain.CosetTable, sizeBytes, copyCosetDone) + + /************************* Den ***************************/ + var denI, oneI fr.Element + oneI.SetOne() + denI.Exp(pk.Domain.FrMultiplicativeGen, big.NewInt(int64(pk.Domain.Cardinality))) + denI.Sub(&denI, &oneI).Inverse(&denI) + + log2Size := int(math.Floor(math.Log2(float64(n)))) + denIcicleArr := []fr.Element{denI} + for i := 0; i < log2Size; i++ { + denIcicleArr = append(denIcicleArr, denIcicleArr...) + } + for i := 0; i < (n - int(math.Pow(2, float64(log2Size)))); i++ { + denIcicleArr = append(denIcicleArr, denI) + } - pk.DomainDevice.CosetTable = cosetPowers_d + go iciclegnark.CopyToDevice(denIcicleArr, sizeBytes, copyDenDone) /************************* Twiddles and Twiddles Inv ***************************/ om_selector := int(math.Log(float64(n)) / math.Log(2)) @@ -394,72 +407,56 @@ func (pk *ProvingKey) setupDevicePointers() { fmt.Print(twiddles_d_gen) } + /************************* End Domain Device Setup ***************************/ pk.DomainDevice.Twiddles = twiddles_d_gen pk.DomainDevice.TwiddlesInv = twiddlesInv_d_gen - /************************* Den ***************************/ - var denI, oneI fr.Element - oneI.SetOne() - denI.Exp(pk.Domain.FrMultiplicativeGen, big.NewInt(int64(pk.Domain.Cardinality))) - denI.Sub(&denI, &oneI).Inverse(&denI) + pk.DomainDevice.CosetTableInv = <-copyCosetInvDone + pk.DomainDevice.CosetTable = <-copyCosetDone + pk.DenDevice = <-copyDenDone - den_d, _ := goicicle.CudaMalloc(sizeBytes) - log2Size := int(math.Floor(math.Log2(float64(n)))) - denIcicle := *icicle.NewFieldFromFrGnark[icicle.ScalarField](denI) - denIcicleArr := []icicle.ScalarField{denIcicle} - for i := 0; i < log2Size; i++ { - denIcicleArr = append(denIcicleArr, denIcicleArr...) - } - for i := 0; i < (n - int(math.Pow(2, float64(log2Size)))); i++ { - denIcicleArr = append(denIcicleArr, denIcicle) - } - - goicicle.CudaMemCpyHtoD[icicle.ScalarField](den_d, denIcicleArr, sizeBytes) - - pk.DenDevice = den_d - - /************************* End Domain Device Setup ***************************/ - /************************* Start G1 Device Setup ***************************/ /************************* A ***************************/ pointsBytesA := len(pk.G1.A) * fp.Bytes * 2 - a_d, _ := goicicle.CudaMalloc(pointsBytesA) - iciclePointsA := icicle.BatchConvertFromG1Affine(pk.G1.A) - goicicle.CudaMemCpyHtoD[icicle.PointAffineNoInfinityBN254](a_d, iciclePointsA, pointsBytesA) - - pk.G1Device.A = a_d + copyADone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyPointsToDevice(pk.G1.A, pointsBytesA, copyADone) // Make a function for points /************************* B ***************************/ pointsBytesB := len(pk.G1.B) * fp.Bytes * 2 - b_d, _ := goicicle.CudaMalloc(pointsBytesB) - iciclePointsB := icicle.BatchConvertFromG1Affine(pk.G1.B) - goicicle.CudaMemCpyHtoD[icicle.PointAffineNoInfinityBN254](b_d, iciclePointsB, pointsBytesB) + copyBDone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyPointsToDevice(pk.G1.B, pointsBytesB, copyBDone) // Make a function for points - pk.G1Device.B = b_d - /************************* K ***************************/ - pointsBytesK := len(pk.G1.K) * fp.Bytes * 2 - k_d, _ := goicicle.CudaMalloc(pointsBytesK) - iciclePointsK := icicle.BatchConvertFromG1Affine(pk.G1.K) - goicicle.CudaMemCpyHtoD[icicle.PointAffineNoInfinityBN254](k_d, iciclePointsK, pointsBytesK) + var pointsNoInfinity []curve.G1Affine + for i, gnarkPoint := range pk.G1.K { + if gnarkPoint.IsInfinity() { + pk.InfinityPointIndicesK = append(pk.InfinityPointIndicesK, i) + } else { + pointsNoInfinity = append(pointsNoInfinity, gnarkPoint) + } + } - pk.G1Device.K = k_d + pointsBytesK := len(pointsNoInfinity) * fp.Bytes * 2 + copyKDone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyPointsToDevice(pointsNoInfinity, pointsBytesK, copyKDone) // Make a function for points /************************* Z ***************************/ pointsBytesZ := len(pk.G1.Z) * fp.Bytes * 2 - z_d, _ := goicicle.CudaMalloc(pointsBytesZ) - iciclePointsZ := icicle.BatchConvertFromG1Affine(pk.G1.Z) - goicicle.CudaMemCpyHtoD[icicle.PointAffineNoInfinityBN254](z_d, iciclePointsZ, pointsBytesZ) + copyZDone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyPointsToDevice(pk.G1.Z, pointsBytesZ, copyZDone) // Make a function for points - pk.G1Device.Z = z_d /************************* End G1 Device Setup ***************************/ + pk.G1Device.A = <-copyADone + pk.G1Device.B = <-copyBDone + pk.G1Device.K = <-copyKDone + pk.G1Device.Z = <-copyZDone /************************* Start G2 Device Setup ***************************/ pointsBytesB2 := len(pk.G2.B) * fp.Bytes * 4 - b2_d, _ := goicicle.CudaMalloc(pointsBytesB2) - iciclePointsB2 := icicle.BatchConvertFromG2Affine(pk.G2.B) - goicicle.CudaMemCpyHtoD[icicle.G2PointAffine](b2_d, iciclePointsB2, pointsBytesB2) - pk.G2Device.B = b2_d + copyG2BDone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyG2PointsToDevice(pk.G2.B, pointsBytesB2, copyG2BDone) // Make a function for points + pk.G2Device.B = <-copyG2BDone + /************************* End G2 Device Setup ***************************/ } diff --git a/go.mod b/go.mod index 2f26ee9dfa..03d855a967 100644 --- a/go.mod +++ b/go.mod @@ -10,7 +10,8 @@ require ( github.com/fxamacker/cbor/v2 v2.5.0 github.com/google/go-cmp v0.5.9 github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b - github.com/ingonyama-zk/icicle v0.0.0-20230719184412-d13143506ece + github.com/ingonyama-zk/icicle v0.0.0-20230907052343-04e5ff5d1af4 + github.com/ingonyama-zk/iciclegnark v0.0.0-20230914093448-b18f425e7e74 github.com/leanovate/gopter v0.2.9 github.com/rs/zerolog v1.30.0 github.com/stretchr/testify v1.8.4 @@ -18,13 +19,11 @@ require ( golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 ) -require ( - github.com/kr/text v0.2.0 // indirect - github.com/rogpeppe/go-internal v1.11.0 // indirect -) +require github.com/rogpeppe/go-internal v1.11.0 // indirect require ( github.com/davecgh/go-spew v1.1.1 // indirect + github.com/kr/text v0.2.0 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.19 // indirect github.com/mmcloughlin/addchain v0.4.0 // indirect diff --git a/go.sum b/go.sum index b3244dd4d1..a1a4fe1c04 100644 --- a/go.sum +++ b/go.sum @@ -18,8 +18,10 @@ github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b h1:h9U78+dx9a4BKdQkBBos92HalKpaGKHrp+3Uo6yTodo= github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= -github.com/ingonyama-zk/icicle v0.0.0-20230719184412-d13143506ece h1:RHCWrXIoz6iadqKcStdn0k4mDhSgOYLmyPpRM6yMNMo= -github.com/ingonyama-zk/icicle v0.0.0-20230719184412-d13143506ece/go.mod h1:kPvbQqCrBszFYe7cqctnvXVMHzGihiQsIAr/qP3TbNo= +github.com/ingonyama-zk/icicle v0.0.0-20230907052343-04e5ff5d1af4 h1:3Va/VmQ+KZ0pI8eLRnS1j25eFzTQvjgfh6o85xbChcM= +github.com/ingonyama-zk/icicle v0.0.0-20230907052343-04e5ff5d1af4/go.mod h1:kAK8/EoN7fUEmakzgZIYdWy1a2rBnpCaZLqSHwZWxEk= +github.com/ingonyama-zk/iciclegnark v0.0.0-20230914093448-b18f425e7e74 h1:JuKkY/y6k1iYrudHTiekzQLKQGs12pp5danZa5eGLX0= +github.com/ingonyama-zk/iciclegnark v0.0.0-20230914093448-b18f425e7e74/go.mod h1:bPTeffNOZJtNcDx7SOYN0KQyGo49HNrk8wcIRqs5l80= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= From 0fe1afc833943377c6d562598938706d9423bf7e Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Thu, 21 Sep 2023 16:30:23 +0300 Subject: [PATCH 03/58] Add infinity point and corresponding scalar removals for KRS MSM Update generation to include build tags --- backend/groth16/bn254/marshal.go | 1 + backend/groth16/bn254/prove.go | 1 + backend/groth16/bn254/prove_gpu.go | 15 ++++++++++----- backend/groth16/bn254/setup.go | 1 + internal/generator/backend/main.go | 13 ++++++++++--- 5 files changed, 23 insertions(+), 8 deletions(-) diff --git a/backend/groth16/bn254/marshal.go b/backend/groth16/bn254/marshal.go index 63932950c7..2162ad3b24 100644 --- a/backend/groth16/bn254/marshal.go +++ b/backend/groth16/bn254/marshal.go @@ -1,4 +1,5 @@ //go:build !gpu +// +build !gpu // Copyright 2020 ConsenSys Software Inc. // diff --git a/backend/groth16/bn254/prove.go b/backend/groth16/bn254/prove.go index db84bc2991..c78d208a62 100644 --- a/backend/groth16/bn254/prove.go +++ b/backend/groth16/bn254/prove.go @@ -1,4 +1,5 @@ //go:build !gpu +// +build !gpu // Copyright 2020 ConsenSys Software Inc. // diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go index 71635acbf0..878fdcf137 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/prove_gpu.go @@ -20,6 +20,7 @@ package groth16 import ( + "fmt" "math/big" "time" "unsafe" @@ -230,13 +231,17 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b // TODO Perf @Tabaie worst memory allocation offender toRemove := commitmentInfo.GetPrivateCommitted() toRemove = append(toRemove, commitmentInfo.CommitmentIndexes()) - _wireValues := filterHeap(wireValues[r1cs.GetNbPublicVariables():], r1cs.GetNbPublicVariables(), internal.ConcatAll(toRemove...)) + scalars := filterHeap(wireValues[r1cs.GetNbPublicVariables():], r1cs.GetNbPublicVariables(), internal.ConcatAll(toRemove...)) - scalarBytes := len(_wireValues) * fr.Bytes + for _, indexToRemove := range pk.InfinityPointIndicesK { + scalars = append(scalars[:indexToRemove], scalars[indexToRemove+1:]...) + } + + scalarBytes := len(scalars) * fr.Bytes scalars_d, _ := goicicle.CudaMalloc(scalarBytes) - goicicle.CudaMemCpyHtoD[fr.Element](scalars_d, _wireValues, scalarBytes) - iciclegnark.MontConvOnDevice(scalars_d, len(_wireValues), false) - krs, _, _ = iciclegnark.MsmOnDevice(scalars_d, pk.G1Device.K, len(_wireValues), true) + goicicle.CudaMemCpyHtoD[fr.Element](scalars_d, scalars, scalarBytes) + iciclegnark.MontConvOnDevice(scalars_d, len(scalars), false) + krs, _, _ = iciclegnark.MsmOnDevice(scalars_d, pk.G1Device.K, len(scalars), true) goicicle.CudaFree(scalars_d) krs.AddMixed(&deltas[2]) diff --git a/backend/groth16/bn254/setup.go b/backend/groth16/bn254/setup.go index 3b1a0ee1a9..fa1c56989c 100644 --- a/backend/groth16/bn254/setup.go +++ b/backend/groth16/bn254/setup.go @@ -1,4 +1,5 @@ //go:build !gpu +// +build !gpu // Copyright 2020 ConsenSys Software Inc. // diff --git a/internal/generator/backend/main.go b/internal/generator/backend/main.go index cddd33357c..60ff988fba 100644 --- a/internal/generator/backend/main.go +++ b/internal/generator/backend/main.go @@ -36,6 +36,7 @@ func main() { CSPath: "../../../constraint/bn254/", Curve: "BN254", CurveID: "BN254", + BuildTag: "!gpu", } bw6_761 := templateData{ RootPath: "../../../backend/{?}/bw6-761/", @@ -157,12 +158,17 @@ func main() { panic(err) } + var buildTag string = "" + if d.BuildTag != "" { + buildTag = d.BuildTag + } + entries = []bavard.Entry{ {File: filepath.Join(groth16Dir, "verify.go"), Templates: []string{"groth16/groth16.verify.go.tmpl", importCurve}}, - {File: filepath.Join(groth16Dir, "prove.go"), Templates: []string{"groth16/groth16.prove.go.tmpl", importCurve}}, - {File: filepath.Join(groth16Dir, "setup.go"), Templates: []string{"groth16/groth16.setup.go.tmpl", importCurve}}, + {File: filepath.Join(groth16Dir, "prove.go"), Templates: []string{"groth16/groth16.prove.go.tmpl", importCurve}, BuildTag: buildTag}, + {File: filepath.Join(groth16Dir, "setup.go"), Templates: []string{"groth16/groth16.setup.go.tmpl", importCurve}, BuildTag: buildTag}, {File: filepath.Join(groth16Dir, "commitment.go"), Templates: []string{"groth16/groth16.commitment.go.tmpl", importCurve}}, - {File: filepath.Join(groth16Dir, "marshal.go"), Templates: []string{"groth16/groth16.marshal.go.tmpl", importCurve}}, + {File: filepath.Join(groth16Dir, "marshal.go"), Templates: []string{"groth16/groth16.marshal.go.tmpl", importCurve}, BuildTag: buildTag}, {File: filepath.Join(groth16Dir, "marshal_test.go"), Templates: []string{"groth16/tests/groth16.marshal.go.tmpl", importCurve}}, } if err := bgen.Generate(d, "groth16", "./template/zkpschemes/", entries...); err != nil { @@ -244,5 +250,6 @@ type templateData struct { CSPath string Curve string CurveID string + BuildTag string noBackend bool } From d7d3218d80eee90dbc0a0f0953c17c4a5ac4b551 Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Tue, 26 Sep 2023 16:00:29 +0300 Subject: [PATCH 04/58] Fix order of MSMs --- backend/groth16/bn254/prove_gpu.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go index 878fdcf137..09df480b6f 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/prove_gpu.go @@ -277,9 +277,9 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b <-chHDone // schedule our proof part computations - computeKRS() computeAR1() computeBS1() + computeKRS() if err := computeBS2(); err != nil { return nil, err } From c0e59943aacf6b4abaa87199a7d9e4f59d3ad5b0 Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Tue, 26 Sep 2023 18:00:54 +0300 Subject: [PATCH 05/58] Add hardware debug log tag --- backend/groth16/bn254/prove.go | 2 +- backend/groth16/bn254/prove_gpu.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/groth16/bn254/prove.go b/backend/groth16/bn254/prove.go index c78d208a62..e0759c2315 100644 --- a/backend/groth16/bn254/prove.go +++ b/backend/groth16/bn254/prove.go @@ -65,7 +65,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return nil, err } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go index 09df480b6f..c95e2d8843 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/prove_gpu.go @@ -69,7 +69,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return nil, err } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "GPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) From 7aec04473e661f8e28693db400bff292cec8a455 Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Wed, 27 Sep 2023 12:37:21 +0300 Subject: [PATCH 06/58] Update gpu files with changes from upstream --- backend/groth16/bn254/marshal_gpu.go | 13 +++++++++++++ backend/groth16/bn254/prove_gpu.go | 7 +++++++ backend/groth16/bn254/setup_gpu.go | 25 ++++++++++++++++++++++--- 3 files changed, 42 insertions(+), 3 deletions(-) diff --git a/backend/groth16/bn254/marshal_gpu.go b/backend/groth16/bn254/marshal_gpu.go index 518f6800af..0ddfc73bf1 100644 --- a/backend/groth16/bn254/marshal_gpu.go +++ b/backend/groth16/bn254/marshal_gpu.go @@ -58,6 +58,13 @@ func (proof *Proof) writeTo(w io.Writer, raw bool) (int64, error) { if err := enc.Encode(&proof.Krs); err != nil { return enc.BytesWritten(), err } + if err := enc.Encode(proof.Commitments); err != nil { + return enc.BytesWritten(), err + } + if err := enc.Encode(&proof.CommitmentPok); err != nil { + return enc.BytesWritten(), err + } + return enc.BytesWritten(), nil } @@ -76,6 +83,12 @@ func (proof *Proof) ReadFrom(r io.Reader) (n int64, err error) { if err := dec.Decode(&proof.Krs); err != nil { return dec.BytesRead(), err } + if err := dec.Decode(&proof.Commitments); err != nil { + return dec.BytesRead(), err + } + if err := dec.Decode(&proof.CommitmentPok); err != nil { + return dec.BytesRead(), err + } return dec.BytesRead(), nil } diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go index c95e2d8843..1a01b6f022 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/prove_gpu.go @@ -101,6 +101,13 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b }(i))) } + if r1cs.GkrInfo.Is() { + var gkrData cs.GkrSolvingData + solverOpts = append(solverOpts, + solver.OverrideHint(r1cs.GkrInfo.SolveHintID, cs.GkrSolveHint(r1cs.GkrInfo, &gkrData)), + solver.OverrideHint(r1cs.GkrInfo.ProveHintID, cs.GkrProveHint(r1cs.GkrInfo.HashName, &gkrData))) + } + _solution, err := r1cs.Solve(fullWitness, solverOpts...) if err != nil { return nil, err diff --git a/backend/groth16/bn254/setup_gpu.go b/backend/groth16/bn254/setup_gpu.go index a6a2dad6ee..1724f38c1d 100644 --- a/backend/groth16/bn254/setup_gpu.go +++ b/backend/groth16/bn254/setup_gpu.go @@ -607,6 +607,9 @@ func DummySetup(r1cs *cs.R1CS, pk *ProvingKey) error { // get R1CS nb constraints, wires and public/private inputs nbWires := r1cs.NbInternalVariables + r1cs.GetNbPublicVariables() + r1cs.GetNbSecretVariables() nbConstraints := r1cs.GetNbConstraints() + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + privateCommitted := commitmentInfo.GetPrivateCommitted() + nbPrivateWires := r1cs.GetNbSecretVariables() + r1cs.NbInternalVariables - internal.NbElements(privateCommitted) - len(commitmentInfo) // Setting group for fft domain := fft.NewDomain(uint64(nbConstraints)) @@ -618,8 +621,8 @@ func DummySetup(r1cs *cs.R1CS, pk *ProvingKey) error { // initialize proving key pk.G1.A = make([]curve.G1Affine, nbWires-nbZeroesA) pk.G1.B = make([]curve.G1Affine, nbWires-nbZeroesB) - pk.G1.K = make([]curve.G1Affine, nbWires-r1cs.GetNbPublicVariables()) - pk.G1.Z = make([]curve.G1Affine, domain.Cardinality) + pk.G1.K = make([]curve.G1Affine, nbPrivateWires) + pk.G1.Z = make([]curve.G1Affine, domain.Cardinality-1) pk.G2.B = make([]curve.G2Affine, nbWires-nbZeroesB) // set infinity markers @@ -673,6 +676,22 @@ func DummySetup(r1cs *cs.R1CS, pk *ProvingKey) error { pk.Domain = *domain + // --------------------------------------------------------------------------------------------- + // Commitment setup + commitmentBases := make([][]curve.G1Affine, len(commitmentInfo)) + for i := range commitmentBases { + size := len(privateCommitted[i]) + commitmentBases[i] = make([]curve.G1Affine, size) + for j := range commitmentBases[i] { + commitmentBases[i][j] = r1Aff + } + } + + pk.CommitmentKeys, _, err = pedersen.Setup(commitmentBases...) + if err != nil { + return err + } + return nil } @@ -779,7 +798,7 @@ func (pk *ProvingKey) NbG2() int { return 2 + len(pk.G2.B) } -// bitRerverse permutation as in fft.BitReverse , but with []curve.G1Affine +// bitReverse permutation as in fft.BitReverse , but with []curve.G1Affine func bitReverse(a []curve.G1Affine) { n := uint(len(a)) nn := uint(bits.UintSize - bits.TrailingZeros(n)) From a5041a07ab1598695ef0c52441a70c05c133fc9c Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Wed, 27 Sep 2023 13:45:52 +0300 Subject: [PATCH 07/58] Small error handling --- backend/groth16/bn254/marshal_gpu.go | 4 ++- backend/groth16/bn254/prove_gpu.go | 49 +++++++++++++++++++++------- backend/groth16/bn254/setup_gpu.go | 10 +++--- 3 files changed, 45 insertions(+), 18 deletions(-) diff --git a/backend/groth16/bn254/marshal_gpu.go b/backend/groth16/bn254/marshal_gpu.go index 0ddfc73bf1..4a512a71a3 100644 --- a/backend/groth16/bn254/marshal_gpu.go +++ b/backend/groth16/bn254/marshal_gpu.go @@ -373,7 +373,9 @@ func (pk *ProvingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) } } - pk.setupDevicePointers() + if err := pk.setupDevicePointers(); err != nil { + return n + dec.BytesRead(), err + } return n + dec.BytesRead(), nil } diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go index 1a01b6f022..fc1ec391d5 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/prove_gpu.go @@ -205,25 +205,34 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b var bs1, ar curve.G1Jac - computeBS1 := func() { + computeBS1 := func() error { <-chWireValuesB - bs1, _, _ = iciclegnark.MsmOnDevice(wireValuesBDevice.P, pk.G1Device.B, wireValuesBDevice.Size, true) + if bs1, _, err = iciclegnark.MsmOnDevice(wireValuesBDevice.P, pk.G1Device.B, wireValuesBDevice.Size, true); err != nil { + return err + } bs1.AddMixed(&pk.G1.Beta) bs1.AddMixed(&deltas[1]) + + return nil } - computeAR1 := func() { + computeAR1 := func() error { <-chWireValuesA - ar, _, _ = iciclegnark.MsmOnDevice(wireValuesADevice.P, pk.G1Device.A, wireValuesADevice.Size, true) + if ar, _, err = iciclegnark.MsmOnDevice(wireValuesADevice.P, pk.G1Device.A, wireValuesADevice.Size, true); err != nil { + return err + } + ar.AddMixed(&pk.G1.Alpha) ar.AddMixed(&deltas[0]) proof.Ar.FromJacobian(&ar) + + return nil } - computeKRS := func() { + computeKRS := func() error { // we could NOT split the Krs multiExp in 2, and just append pk.G1.K and pk.G1.Z // however, having similar lengths for our tasks helps with parallelism @@ -231,7 +240,9 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b sizeH := int(pk.Domain.Cardinality - 1) // comes from the fact the deg(H)=(n-1)+(n-1)-n=n-2 if len(pk.G1.Z) > 0 { - krs2, _, _ = iciclegnark.MsmOnDevice(h, pk.G1Device.Z, sizeH, true) + if krs2, _, err = iciclegnark.MsmOnDevice(h, pk.G1Device.Z, sizeH, true); err != nil { + return err + } } // filter the wire values if needed @@ -248,8 +259,12 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b scalars_d, _ := goicicle.CudaMalloc(scalarBytes) goicicle.CudaMemCpyHtoD[fr.Element](scalars_d, scalars, scalarBytes) iciclegnark.MontConvOnDevice(scalars_d, len(scalars), false) - krs, _, _ = iciclegnark.MsmOnDevice(scalars_d, pk.G1Device.K, len(scalars), true) + krs, _, err = iciclegnark.MsmOnDevice(scalars_d, pk.G1Device.K, len(scalars), true) goicicle.CudaFree(scalars_d) + + if err != nil { + return err + } krs.AddMixed(&deltas[2]) @@ -262,6 +277,8 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b krs.AddAssign(&p1) proof.Krs.FromJacobian(&krs) + + return nil } computeBS2 := func() error { @@ -269,7 +286,9 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b var Bs, deltaS curve.G2Jac <-chWireValuesB - Bs, _, _ = iciclegnark.MsmG2OnDevice(wireValuesBDevice.P, pk.G2Device.B, wireValuesBDevice.Size, true) + if Bs, _, err = iciclegnark.MsmG2OnDevice(wireValuesBDevice.P, pk.G2Device.B, wireValuesBDevice.Size, true); err != nil { + return err + } deltaS.FromAffine(&pk.G2.Delta) deltaS.ScalarMultiplication(&deltaS, &s) @@ -280,13 +299,19 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return nil } - // wait for FFT to end, as it uses all our CPUs + // wait for FFT to end <-chHDone // schedule our proof part computations - computeAR1() - computeBS1() - computeKRS() + if err := computeAR1(); err != nil { + return nil, err + } + if err := computeBS1(); err != nil { + return nil, err + } + if err := computeKRS(); err != nil { + return nil, err + } if err := computeBS2(); err != nil { return nil, err } diff --git a/backend/groth16/bn254/setup_gpu.go b/backend/groth16/bn254/setup_gpu.go index 1724f38c1d..9475018d98 100644 --- a/backend/groth16/bn254/setup_gpu.go +++ b/backend/groth16/bn254/setup_gpu.go @@ -21,7 +21,6 @@ package groth16 import ( "errors" - "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fp" @@ -358,12 +357,12 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { // set domain pk.Domain = *domain - pk.setupDevicePointers() + err = pk.setupDevicePointers() return nil } -func (pk *ProvingKey) setupDevicePointers() { +func (pk *ProvingKey) setupDevicePointers() error { n := int(pk.Domain.Cardinality) sizeBytes := n * fr.Bytes @@ -399,12 +398,12 @@ func (pk *ProvingKey) setupDevicePointers() { twiddlesInv_d_gen, twddles_err := icicle.GenerateTwiddles(n, om_selector, true) if twddles_err != nil { - fmt.Print(twiddlesInv_d_gen) + return twddles_err } twiddles_d_gen, twddles_err := icicle.GenerateTwiddles(n, om_selector, false) if twddles_err != nil { - fmt.Print(twiddles_d_gen) + return twddles_err } /************************* End Domain Device Setup ***************************/ @@ -458,6 +457,7 @@ func (pk *ProvingKey) setupDevicePointers() { pk.G2Device.B = <-copyG2BDone /************************* End G2 Device Setup ***************************/ + return nil } // Precompute sets e, -[δ]₂, -[γ]₂ From 005bc9d956e90dbe69ab758aca1c266049db64c5 Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Mon, 2 Oct 2023 10:12:06 +0300 Subject: [PATCH 08/58] Remove direct icicle dependency and use iciclegnark for all icicle operations Update iciclegnark dependency to v0.1.0 --- backend/groth16/bn254/prove_gpu.go | 48 +++++++++++++++--------------- backend/groth16/bn254/setup_gpu.go | 7 ++--- go.mod | 8 ++--- go.sum | 10 +++---- 4 files changed, 33 insertions(+), 40 deletions(-) diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go index fc1ec391d5..5a43fd896b 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/prove_gpu.go @@ -20,7 +20,6 @@ package groth16 import ( - "fmt" "math/big" "time" "unsafe" @@ -37,8 +36,6 @@ import ( "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/internal/utils" "github.com/consensys/gnark/logger" - goicicle "github.com/ingonyama-zk/icicle/goicicle" - icicle "github.com/ingonyama-zk/icicle/goicicle/curves/bn254" iciclegnark "github.com/ingonyama-zk/iciclegnark/curves/bn254" ) @@ -154,9 +151,11 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b } wireValuesASize := len(wireValuesA) scalarBytes := wireValuesASize * fr.Bytes - wireValuesADevicePtr, _ := goicicle.CudaMalloc(scalarBytes) - goicicle.CudaMemCpyHtoD[fr.Element](wireValuesADevicePtr, wireValuesA, scalarBytes) - iciclegnark.MontConvOnDevice(wireValuesADevicePtr, wireValuesASize, false) + + copyDone := make(chan unsafe.Pointer, 1) + iciclegnark.CopyToDevice(wireValuesA, scalarBytes, copyDone) + wireValuesADevicePtr := <-copyDone + wireValuesADevice = iciclegnark.OnDeviceData{ P: wireValuesADevicePtr, Size: wireValuesASize, @@ -175,9 +174,11 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b } wireValuesBSize := len(wireValuesB) scalarBytes := wireValuesBSize * fr.Bytes - wireValuesBDevicePtr, _ := goicicle.CudaMalloc(scalarBytes) - goicicle.CudaMemCpyHtoD[fr.Element](wireValuesBDevicePtr, wireValuesB, scalarBytes) - iciclegnark.MontConvOnDevice(wireValuesBDevicePtr, wireValuesBSize, false) + + copyDone := make(chan unsafe.Pointer, 1) + iciclegnark.CopyToDevice(wireValuesB, scalarBytes, copyDone) + wireValuesBDevicePtr := <-copyDone + wireValuesBDevice = iciclegnark.OnDeviceData{ P: wireValuesBDevicePtr, Size: wireValuesBSize, @@ -233,9 +234,6 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b } computeKRS := func() error { - // we could NOT split the Krs multiExp in 2, and just append pk.G1.K and pk.G1.Z - // however, having similar lengths for our tasks helps with parallelism - var krs, krs2, p1 curve.G1Jac sizeH := int(pk.Domain.Cardinality - 1) // comes from the fact the deg(H)=(n-1)+(n-1)-n=n-2 @@ -256,11 +254,13 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b } scalarBytes := len(scalars) * fr.Bytes - scalars_d, _ := goicicle.CudaMalloc(scalarBytes) - goicicle.CudaMemCpyHtoD[fr.Element](scalars_d, scalars, scalarBytes) - iciclegnark.MontConvOnDevice(scalars_d, len(scalars), false) + + copyDone := make(chan unsafe.Pointer, 1) + iciclegnark.CopyToDevice(scalars, scalarBytes, copyDone) + scalars_d := <-copyDone + krs, _, err = iciclegnark.MsmOnDevice(scalars_d, pk.G1Device.K, len(scalars), true) - goicicle.CudaFree(scalars_d) + iciclegnark.FreeDevicePointer(scalars_d) if err != nil { return err @@ -319,9 +319,9 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b log.Debug().Dur("took", time.Since(start)).Msg("prover done") go func() { - goicicle.CudaFree(wireValuesADevice.P) - goicicle.CudaFree(wireValuesBDevice.P) - goicicle.CudaFree(h) + iciclegnark.FreeDevicePointer(wireValuesADevice.P) + iciclegnark.FreeDevicePointer(wireValuesBDevice.P) + iciclegnark.FreeDevicePointer(h) }() return proof, nil @@ -395,7 +395,7 @@ func computeH(a, b, c []fr.Element, pk *ProvingKey) unsafe.Pointer { a_intt_d := iciclegnark.INttOnDevice(devicePointer, pk.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) iciclegnark.NttOnDevice(devicePointer, a_intt_d, pk.DomainDevice.Twiddles, pk.DomainDevice.CosetTable, n, n, sizeBytes, true) computeInttNttDone <- nil - goicicle.CudaFree(a_intt_d) + iciclegnark.FreeDevicePointer(a_intt_d) } go computeInttNttOnDevice(a_device) @@ -408,12 +408,12 @@ func computeH(a, b, c []fr.Element, pk *ProvingKey) unsafe.Pointer { h := iciclegnark.INttOnDevice(a_device, pk.DomainDevice.TwiddlesInv, pk.DomainDevice.CosetTableInv, n, sizeBytes, true) go func() { - goicicle.CudaFree(a_device) - goicicle.CudaFree(b_device) - goicicle.CudaFree(c_device) + iciclegnark.FreeDevicePointer(a_device) + iciclegnark.FreeDevicePointer(b_device) + iciclegnark.FreeDevicePointer(c_device) }() - icicle.ReverseScalars(h, n) + iciclegnark.ReverseScalars(h, n) return h } diff --git a/backend/groth16/bn254/setup_gpu.go b/backend/groth16/bn254/setup_gpu.go index 9475018d98..e051890080 100644 --- a/backend/groth16/bn254/setup_gpu.go +++ b/backend/groth16/bn254/setup_gpu.go @@ -30,7 +30,6 @@ import ( "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/constraint/bn254" - icicle "github.com/ingonyama-zk/icicle/goicicle/curves/bn254" iciclegnark "github.com/ingonyama-zk/iciclegnark/curves/bn254" "math" "math/big" @@ -394,14 +393,12 @@ func (pk *ProvingKey) setupDevicePointers() error { go iciclegnark.CopyToDevice(denIcicleArr, sizeBytes, copyDenDone) /************************* Twiddles and Twiddles Inv ***************************/ - om_selector := int(math.Log(float64(n)) / math.Log(2)) - twiddlesInv_d_gen, twddles_err := icicle.GenerateTwiddles(n, om_selector, true) - + twiddlesInv_d_gen, twddles_err := iciclegnark.GenerateTwiddleFactors(n, true) if twddles_err != nil { return twddles_err } - twiddles_d_gen, twddles_err := icicle.GenerateTwiddles(n, om_selector, false) + twiddles_d_gen, twddles_err := iciclegnark.GenerateTwiddleFactors(n, false) if twddles_err != nil { return twddles_err } diff --git a/go.mod b/go.mod index 03d855a967..a1dd955fc9 100644 --- a/go.mod +++ b/go.mod @@ -10,8 +10,7 @@ require ( github.com/fxamacker/cbor/v2 v2.5.0 github.com/google/go-cmp v0.5.9 github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b - github.com/ingonyama-zk/icicle v0.0.0-20230907052343-04e5ff5d1af4 - github.com/ingonyama-zk/iciclegnark v0.0.0-20230914093448-b18f425e7e74 + github.com/ingonyama-zk/iciclegnark v0.1.0 github.com/leanovate/gopter v0.2.9 github.com/rs/zerolog v1.30.0 github.com/stretchr/testify v1.8.4 @@ -19,15 +18,14 @@ require ( golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 ) -require github.com/rogpeppe/go-internal v1.11.0 // indirect - require ( github.com/davecgh/go-spew v1.1.1 // indirect - github.com/kr/text v0.2.0 // indirect + github.com/ingonyama-zk/icicle v0.0.0-20230928131117-97f0079e5c71 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.19 // indirect github.com/mmcloughlin/addchain v0.4.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/rogpeppe/go-internal v1.11.0 // indirect github.com/x448/float16 v0.8.4 // indirect golang.org/x/sys v0.11.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index a1a4fe1c04..af6fcb193c 100644 --- a/go.sum +++ b/go.sum @@ -7,7 +7,6 @@ github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH github.com/consensys/gnark-crypto v0.11.2 h1:GJjjtWJ+db1xGao7vTsOgAOGgjfPe7eRGPL+xxMX0qE= github.com/consensys/gnark-crypto v0.11.2/go.mod h1:v2Gy7L/4ZRosZ7Ivs+9SfUDr0f5UlG+EM5t7MPHiLuY= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fxamacker/cbor/v2 v2.5.0 h1:oHsG0V/Q6E/wqTS2O1Cozzsy69nqCiguo5Q1a1ADivE= @@ -18,13 +17,12 @@ github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b h1:h9U78+dx9a4BKdQkBBos92HalKpaGKHrp+3Uo6yTodo= github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= -github.com/ingonyama-zk/icicle v0.0.0-20230907052343-04e5ff5d1af4 h1:3Va/VmQ+KZ0pI8eLRnS1j25eFzTQvjgfh6o85xbChcM= -github.com/ingonyama-zk/icicle v0.0.0-20230907052343-04e5ff5d1af4/go.mod h1:kAK8/EoN7fUEmakzgZIYdWy1a2rBnpCaZLqSHwZWxEk= -github.com/ingonyama-zk/iciclegnark v0.0.0-20230914093448-b18f425e7e74 h1:JuKkY/y6k1iYrudHTiekzQLKQGs12pp5danZa5eGLX0= -github.com/ingonyama-zk/iciclegnark v0.0.0-20230914093448-b18f425e7e74/go.mod h1:bPTeffNOZJtNcDx7SOYN0KQyGo49HNrk8wcIRqs5l80= +github.com/ingonyama-zk/icicle v0.0.0-20230928131117-97f0079e5c71 h1:YxI1RTPzpFJ3MBmxPl3Bo0F7ume7CmQEC1M9jL6CT94= +github.com/ingonyama-zk/icicle v0.0.0-20230928131117-97f0079e5c71/go.mod h1:kAK8/EoN7fUEmakzgZIYdWy1a2rBnpCaZLqSHwZWxEk= +github.com/ingonyama-zk/iciclegnark v0.1.0 h1:88MkEghzjQBMjrYRJFxZ9oR9CTIpB8NG2zLeCJSvXKQ= +github.com/ingonyama-zk/iciclegnark v0.1.0/go.mod h1:wz6+IpyHKs6UhMMoQpNqz1VY+ddfKqC/gRwR/64W6WU= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2Sh+Jxxv8= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= From ba152dccd9174d3d68540775161d64e33dfa2b82 Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Mon, 2 Oct 2023 11:05:29 +0300 Subject: [PATCH 09/58] Fix generator prove file to include hardware log attribute --- backend/groth16/bls12-377/prove.go | 2 +- backend/groth16/bls12-381/prove.go | 2 +- backend/groth16/bls24-315/prove.go | 2 +- backend/groth16/bls24-317/prove.go | 2 +- backend/groth16/bn254/prove_gpu.go | 4 ++-- backend/groth16/bw6-633/prove.go | 2 +- backend/groth16/bw6-761/prove.go | 2 +- .../backend/template/zkpschemes/groth16/groth16.prove.go.tmpl | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/backend/groth16/bls12-377/prove.go b/backend/groth16/bls12-377/prove.go index ed7124a557..68f0133367 100644 --- a/backend/groth16/bls12-377/prove.go +++ b/backend/groth16/bls12-377/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return nil, err } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bls12-381/prove.go b/backend/groth16/bls12-381/prove.go index 6e4c0a5227..0112677e3e 100644 --- a/backend/groth16/bls12-381/prove.go +++ b/backend/groth16/bls12-381/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return nil, err } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bls24-315/prove.go b/backend/groth16/bls24-315/prove.go index c464544ad0..83f45d4a46 100644 --- a/backend/groth16/bls24-315/prove.go +++ b/backend/groth16/bls24-315/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return nil, err } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bls24-317/prove.go b/backend/groth16/bls24-317/prove.go index 10f38c5f77..82fbc62e6a 100644 --- a/backend/groth16/bls24-317/prove.go +++ b/backend/groth16/bls24-317/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return nil, err } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go index 5a43fd896b..f6c5952d99 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/prove_gpu.go @@ -254,14 +254,14 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b } scalarBytes := len(scalars) * fr.Bytes - + copyDone := make(chan unsafe.Pointer, 1) iciclegnark.CopyToDevice(scalars, scalarBytes, copyDone) scalars_d := <-copyDone krs, _, err = iciclegnark.MsmOnDevice(scalars_d, pk.G1Device.K, len(scalars), true) iciclegnark.FreeDevicePointer(scalars_d) - + if err != nil { return err } diff --git a/backend/groth16/bw6-633/prove.go b/backend/groth16/bw6-633/prove.go index b92dbb6943..8a68b87eeb 100644 --- a/backend/groth16/bw6-633/prove.go +++ b/backend/groth16/bw6-633/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return nil, err } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bw6-761/prove.go b/backend/groth16/bw6-761/prove.go index 3ee6b9ad0f..8b6df934c7 100644 --- a/backend/groth16/bw6-761/prove.go +++ b/backend/groth16/bw6-761/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return nil, err } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl index fa72568220..0b7693bfa6 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl @@ -45,7 +45,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return nil, err } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) From f5f9b694fe3c62db155817dddad9b4cec1ebb5be Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Sun, 15 Oct 2023 09:03:41 +0300 Subject: [PATCH 10/58] Add additional comments on code --- backend/groth16/bn254/prove_gpu.go | 6 ++++++ backend/groth16/bn254/setup_gpu.go | 1 + 2 files changed, 7 insertions(+) diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go index f6c5952d99..e067c0d450 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/prove_gpu.go @@ -152,6 +152,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b wireValuesASize := len(wireValuesA) scalarBytes := wireValuesASize * fr.Bytes + // Copy scalars to the device and retain ptr to them copyDone := make(chan unsafe.Pointer, 1) iciclegnark.CopyToDevice(wireValuesA, scalarBytes, copyDone) wireValuesADevicePtr := <-copyDone @@ -175,6 +176,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b wireValuesBSize := len(wireValuesB) scalarBytes := wireValuesBSize * fr.Bytes + // Copy scalars to the device and retain ptr to them copyDone := make(chan unsafe.Pointer, 1) iciclegnark.CopyToDevice(wireValuesB, scalarBytes, copyDone) wireValuesBDevicePtr := <-copyDone @@ -237,6 +239,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b var krs, krs2, p1 curve.G1Jac sizeH := int(pk.Domain.Cardinality - 1) // comes from the fact the deg(H)=(n-1)+(n-1)-n=n-2 + // check for small circuits as iciclegnark doesn't handle zero sizes well if len(pk.G1.Z) > 0 { if krs2, _, err = iciclegnark.MsmOnDevice(h, pk.G1Device.Z, sizeH, true); err != nil { return err @@ -249,6 +252,8 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b toRemove = append(toRemove, commitmentInfo.CommitmentIndexes()) scalars := filterHeap(wireValues[r1cs.GetNbPublicVariables():], r1cs.GetNbPublicVariables(), internal.ConcatAll(toRemove...)) + // filter zero/infinity points since icicle doesn't handle them + // See https://github.com/ingonyama-zk/icicle/issues/169 for more info for _, indexToRemove := range pk.InfinityPointIndicesK { scalars = append(scalars[:indexToRemove], scalars[indexToRemove+1:]...) } @@ -318,6 +323,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b log.Debug().Dur("took", time.Since(start)).Msg("prover done") + // free device/GPU memory that is not needed for future proofs (scalars/hpoly) go func() { iciclegnark.FreeDevicePointer(wireValuesADevice.P) iciclegnark.FreeDevicePointer(wireValuesBDevice.P) diff --git a/backend/groth16/bn254/setup_gpu.go b/backend/groth16/bn254/setup_gpu.go index e051890080..7826d04ff2 100644 --- a/backend/groth16/bn254/setup_gpu.go +++ b/backend/groth16/bn254/setup_gpu.go @@ -356,6 +356,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { // set domain pk.Domain = *domain + // Move static values (points, domain, hpoly denom) to the device/GPU err = pk.setupDevicePointers() return nil From e32162b96ca8a2dad7723e9d126f675494728a8a Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Sun, 15 Oct 2023 09:12:03 +0300 Subject: [PATCH 11/58] Add documentation in README for using icicle gpu library --- README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/README.md b/README.md index c43caee473..1bd3fb645e 100644 --- a/README.md +++ b/README.md @@ -116,6 +116,20 @@ func main() { ``` +### GPU Support + +The following schemes and curves support experimental use of Ingomyama's Icicle GPU library for low level zk-SNARK primitives such as MSM, NTT, and polynomial operations: + +- [x] [Groth16](https://eprint.iacr.org/2016/260) + +instantiated with the following curve(s) + +- [x] BN254 + +To use GPUs, add the `gpu` buildtag to your build/run commands, e.g. `go run -tags=gpu main.go`. + +For more information about prerequisites see the [Icicle repo](https://github.com/ingonyama-zk/icicle). + ## Citing If you use `gnark` in your research a citation would be appreciated. From 943d9595aeb540bc67fc5959266bba0fbf166810 Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Wed, 1 Nov 2023 14:05:02 +0200 Subject: [PATCH 12/58] Update with upstream --- .github/workflows/push.yml | 6 +- backend/backend.go | 114 +- backend/groth16/bls12-377/commitment.go | 29 - backend/groth16/bls12-377/prove.go | 16 +- backend/groth16/bls12-377/verify.go | 31 +- backend/groth16/bls12-381/commitment.go | 29 - backend/groth16/bls12-381/prove.go | 16 +- backend/groth16/bls12-381/verify.go | 31 +- backend/groth16/bls24-315/commitment.go | 29 - backend/groth16/bls24-315/prove.go | 16 +- backend/groth16/bls24-315/verify.go | 31 +- backend/groth16/bls24-317/commitment.go | 29 - backend/groth16/bls24-317/prove.go | 16 +- backend/groth16/bls24-317/verify.go | 31 +- backend/groth16/bn254/commitment.go | 29 - backend/groth16/bn254/prove.go | 16 +- backend/groth16/bn254/verify.go | 33 +- backend/groth16/bw6-633/commitment.go | 29 - backend/groth16/bw6-633/prove.go | 16 +- backend/groth16/bw6-633/verify.go | 31 +- backend/groth16/bw6-761/commitment.go | 29 - backend/groth16/bw6-761/prove.go | 16 +- backend/groth16/bw6-761/verify.go | 31 +- backend/groth16/groth16.go | 16 +- backend/groth16/groth16_test.go | 60 + backend/plonk/bls12-377/prove.go | 62 +- backend/plonk/bls12-377/verify.go | 49 +- backend/plonk/bls12-381/prove.go | 62 +- backend/plonk/bls12-381/verify.go | 49 +- backend/plonk/bls24-315/prove.go | 62 +- backend/plonk/bls24-315/verify.go | 49 +- backend/plonk/bls24-317/prove.go | 62 +- backend/plonk/bls24-317/verify.go | 49 +- backend/plonk/bn254/prove.go | 62 +- backend/plonk/bn254/solidity.go | 1090 +++++++++-------- backend/plonk/bn254/verify.go | 52 +- backend/plonk/bw6-633/prove.go | 62 +- backend/plonk/bw6-633/verify.go | 49 +- backend/plonk/bw6-761/prove.go | 62 +- backend/plonk/bw6-761/verify.go | 49 +- backend/plonk/plonk.go | 16 +- backend/plonk/plonk_test.go | 156 +++ backend/plonkfri/bls12-377/prove.go | 6 +- backend/plonkfri/bls12-377/verify.go | 15 +- backend/plonkfri/bls12-381/prove.go | 6 +- backend/plonkfri/bls12-381/verify.go | 15 +- backend/plonkfri/bls24-315/prove.go | 6 +- backend/plonkfri/bls24-315/verify.go | 15 +- backend/plonkfri/bls24-317/prove.go | 6 +- backend/plonkfri/bls24-317/verify.go | 15 +- backend/plonkfri/bn254/prove.go | 6 +- backend/plonkfri/bn254/verify.go | 15 +- backend/plonkfri/bw6-633/prove.go | 6 +- backend/plonkfri/bw6-633/verify.go | 15 +- backend/plonkfri/bw6-761/prove.go | 6 +- backend/plonkfri/bw6-761/verify.go | 15 +- backend/plonkfri/plonkfri.go | 16 +- go.mod | 2 +- go.sum | 4 +- internal/generator/backend/main.go | 1 - .../backend/template/imports.go.tmpl | 4 + .../groth16/groth16.commitment.go.tmpl | 11 - .../zkpschemes/groth16/groth16.prove.go.tmpl | 23 +- .../zkpschemes/groth16/groth16.verify.go.tmpl | 38 +- .../zkpschemes/plonk/plonk.prove.go.tmpl | 74 +- .../zkpschemes/plonk/plonk.verify.go.tmpl | 56 +- .../zkpschemes/plonkfri/plonk.prove.go.tmpl | 6 +- .../zkpschemes/plonkfri/plonk.verify.go.tmpl | 13 +- internal/stats/latest.stats | Bin 2817 -> 2817 bytes std/algebra/defaults.go | 87 ++ std/algebra/doc.go | 5 +- .../emulated/fields_bls12381/e12_pairing.go | 19 +- .../emulated/fields_bn254/e12_pairing.go | 39 +- std/algebra/emulated/fields_bw6761/doc.go | 6 + std/algebra/emulated/fields_bw6761/e3.go | 408 ++++++ std/algebra/emulated/fields_bw6761/e3_test.go | 363 ++++++ std/algebra/emulated/fields_bw6761/e6.go | 419 +++++++ .../emulated/fields_bw6761/e6_pairing.go | 241 ++++ std/algebra/emulated/fields_bw6761/e6_test.go | 404 ++++++ std/algebra/emulated/fields_bw6761/hints.go | 124 ++ std/algebra/emulated/sw_bls12381/doc_test.go | 12 - std/algebra/emulated/sw_bls12381/g1.go | 14 + std/algebra/emulated/sw_bls12381/pairing.go | 133 +- std/algebra/emulated/sw_bn254/doc_test.go | 12 - std/algebra/emulated/sw_bn254/g1.go | 13 + std/algebra/emulated/sw_bn254/pairing.go | 288 +++-- std/algebra/emulated/sw_bw6761/doc.go | 7 + std/algebra/emulated/sw_bw6761/doc_test.go | 91 ++ std/algebra/emulated/sw_bw6761/g1.go | 29 + std/algebra/emulated/sw_bw6761/g2.go | 16 + std/algebra/emulated/sw_bw6761/pairing.go | 771 ++++++++++++ .../emulated/sw_bw6761/pairing_test.go | 321 +++++ .../emulated/sw_bw6761/precomputations.go | 669 ++++++++++ std/algebra/emulated/sw_emulated/doc_test.go | 13 +- std/algebra/emulated/sw_emulated/params.go | 19 + .../emulated/sw_emulated/params_compute.go | 27 + std/algebra/emulated/sw_emulated/point.go | 63 + .../emulated/sw_emulated/point_test.go | 135 +- std/algebra/interfaces.go | 67 + std/algebra/native/fields_bls12377/e12.go | 42 +- std/algebra/native/fields_bls12377/e2.go | 5 + std/algebra/native/fields_bls12377/e6.go | 10 + std/algebra/native/fields_bls24315/e2.go | 5 + std/algebra/native/fields_bls24315/e24.go | 25 +- std/algebra/native/fields_bls24315/e4.go | 5 + std/algebra/native/sw_bls12377/g1_test.go | 71 ++ std/algebra/native/sw_bls12377/pairing.go | 16 + std/algebra/native/sw_bls12377/pairing2.go | 252 ++++ .../native/sw_bls12377/pairing_test.go | 44 + std/algebra/native/sw_bls24315/g1_test.go | 73 ++ std/algebra/native/sw_bls24315/pairing.go | 16 + std/algebra/native/sw_bls24315/pairing2.go | 300 +++++ .../native/sw_bls24315/pairing_test.go | 44 + std/commitments/kzg/native_doc_test.go | 128 ++ std/commitments/kzg/nonnative_doc_test.go | 151 +++ std/commitments/kzg/verifier.go | 264 ++++ std/commitments/kzg/verifier_test.go | 413 +++++++ std/commitments/kzg_bls12377/verifier.go | 82 -- std/commitments/kzg_bls12377/verifier_test.go | 165 --- std/commitments/kzg_bls24315/verifier.go | 82 -- std/commitments/kzg_bls24315/verifier_test.go | 184 --- std/groth16_bls12377/verifier.go | 139 --- std/groth16_bls12377/verifier_test.go | 206 ---- std/groth16_bls24315/verifier.go | 139 --- std/groth16_bls24315/verifier_test.go | 206 ---- std/lookup/logderivlookup/doc_test.go | 21 +- std/math/cmp/doc_isless_test.go | 23 +- std/math/emulated/doc_example_field_test.go | 20 +- std/math/emulated/emparams/emparams.go | 34 + std/math/emulated/field_hint_test.go | 20 +- std/math/emulated/params.go | 2 + std/multicommit/doc_test.go | 21 +- std/recursion/doc.go | 2 + std/recursion/groth16/doc.go | 2 + std/recursion/groth16/native_doc_test.go | 85 ++ std/recursion/groth16/nonnative_doc_test.go | 166 +++ std/recursion/groth16/verifier.go | 303 +++++ std/recursion/groth16/verifier_test.go | 311 +++++ std/recursion/wrapped_hash.go | 240 ++++ std/recursion/wrapped_hash_test.go | 259 ++++ std/selector/doc_map_test.go | 20 +- std/selector/doc_mux_test.go | 20 +- std/selector/doc_partition_test.go | 23 +- test/assert_checkcircuit.go | 16 +- test/assert_options.go | 16 +- 145 files changed, 9793 insertions(+), 2860 deletions(-) delete mode 100644 backend/groth16/bls12-377/commitment.go delete mode 100644 backend/groth16/bls12-381/commitment.go delete mode 100644 backend/groth16/bls24-315/commitment.go delete mode 100644 backend/groth16/bls24-317/commitment.go delete mode 100644 backend/groth16/bn254/commitment.go delete mode 100644 backend/groth16/bw6-633/commitment.go delete mode 100644 backend/groth16/bw6-761/commitment.go delete mode 100644 internal/generator/backend/template/zkpschemes/groth16/groth16.commitment.go.tmpl create mode 100644 std/algebra/defaults.go create mode 100644 std/algebra/emulated/fields_bw6761/doc.go create mode 100644 std/algebra/emulated/fields_bw6761/e3.go create mode 100644 std/algebra/emulated/fields_bw6761/e3_test.go create mode 100644 std/algebra/emulated/fields_bw6761/e6.go create mode 100644 std/algebra/emulated/fields_bw6761/e6_pairing.go create mode 100644 std/algebra/emulated/fields_bw6761/e6_test.go create mode 100644 std/algebra/emulated/fields_bw6761/hints.go create mode 100644 std/algebra/emulated/sw_bw6761/doc.go create mode 100644 std/algebra/emulated/sw_bw6761/doc_test.go create mode 100644 std/algebra/emulated/sw_bw6761/g1.go create mode 100644 std/algebra/emulated/sw_bw6761/g2.go create mode 100644 std/algebra/emulated/sw_bw6761/pairing.go create mode 100644 std/algebra/emulated/sw_bw6761/pairing_test.go create mode 100644 std/algebra/emulated/sw_bw6761/precomputations.go create mode 100644 std/algebra/interfaces.go create mode 100644 std/algebra/native/sw_bls12377/pairing2.go create mode 100644 std/algebra/native/sw_bls24315/pairing2.go create mode 100644 std/commitments/kzg/native_doc_test.go create mode 100644 std/commitments/kzg/nonnative_doc_test.go create mode 100644 std/commitments/kzg/verifier.go create mode 100644 std/commitments/kzg/verifier_test.go delete mode 100644 std/commitments/kzg_bls12377/verifier.go delete mode 100644 std/commitments/kzg_bls12377/verifier_test.go delete mode 100644 std/commitments/kzg_bls24315/verifier.go delete mode 100644 std/commitments/kzg_bls24315/verifier_test.go delete mode 100644 std/groth16_bls12377/verifier.go delete mode 100644 std/groth16_bls12377/verifier_test.go delete mode 100644 std/groth16_bls24315/verifier.go delete mode 100644 std/groth16_bls24315/verifier_test.go create mode 100644 std/recursion/doc.go create mode 100644 std/recursion/groth16/doc.go create mode 100644 std/recursion/groth16/native_doc_test.go create mode 100644 std/recursion/groth16/nonnative_doc_test.go create mode 100644 std/recursion/groth16/verifier.go create mode 100644 std/recursion/groth16/verifier_test.go create mode 100644 std/recursion/wrapped_hash.go create mode 100644 std/recursion/wrapped_hash_test.go diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 0dc7ea8a10..52d34002c2 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -71,9 +71,9 @@ jobs: if: startsWith(matrix.os, 'ubuntu') == false run: | go test -tags=release_checks -v -timeout=60m . - go test -tags=release_checks -v -timeout=60m ./frontend/... - go test -tags=release_checks -v -timeout=60m ./backend/... - go test -short -v -timeout=60m ./... + # go test -tags=release_checks -v -timeout=60m ./frontend/... + # go test -tags=release_checks -v -timeout=60m ./backend/... + # go test -short -v -timeout=60m ./... - name: Test (ubuntu - race and solc) if: startsWith(matrix.os, 'ubuntu') == true run: | diff --git a/backend/backend.go b/backend/backend.go index 84a05271ab..aab30f2df1 100644 --- a/backend/backend.go +++ b/backend/backend.go @@ -15,7 +15,12 @@ // Package backend implements Zero Knowledge Proof systems: it consumes circuit compiled with gnark/frontend. package backend -import "github.com/consensys/gnark/constraint/solver" +import ( + "crypto/sha256" + "hash" + + "github.com/consensys/gnark/constraint/solver" +) // ID represent a unique ID for a proving scheme type ID uint16 @@ -53,13 +58,21 @@ type ProverOption func(*ProverConfig) error // ProverConfig is the configuration for the prover with the options applied. type ProverConfig struct { - SolverOpts []solver.Option + SolverOpts []solver.Option + HashToFieldFn hash.Hash + ChallengeHash hash.Hash + KZGFoldingHash hash.Hash } // NewProverConfig returns a default ProverConfig with given prover options opts // applied. func NewProverConfig(opts ...ProverOption) (ProverConfig, error) { - opt := ProverConfig{} + opt := ProverConfig{ + // we cannot initialize HashToFieldFn here as we use different domain + // separation tags for PLONK and Groth16 + ChallengeHash: sha256.New(), + KZGFoldingHash: sha256.New(), + } for _, option := range opts { if err := option(&opt); err != nil { return ProverConfig{}, err @@ -75,3 +88,98 @@ func WithSolverOptions(solverOpts ...solver.Option) ProverOption { return nil } } + +// WithProverHashToFieldFunction changes the hash function used for hashing +// bytes to field. If not set then the default hash function based on RFC 9380 +// is used. Used mainly for compatibility between different systems and +// efficient recursion. +func WithProverHashToFieldFunction(hFunc hash.Hash) ProverOption { + return func(cfg *ProverConfig) error { + cfg.HashToFieldFn = hFunc + return nil + } +} + +// WithProverChallengeHashFunction sets the hash function used for computing +// non-interactive challenges in Fiat-Shamir heuristic. If not set then by +// default SHA2-256 is used. Used mainly for compatibility between different +// systems and efficient recursion. +func WithProverChallengeHashFunction(hFunc hash.Hash) ProverOption { + return func(pc *ProverConfig) error { + pc.ChallengeHash = hFunc + return nil + } +} + +// WithProverKZGFoldingHashFunction sets the hash function used for computing +// the challenge when folding the KZG opening proofs. If not set then by default +// SHA2-256 is used. Used mainly for compatibility between different systems and +// efficient recursion. +func WithProverKZGFoldingHashFunction(hFunc hash.Hash) ProverOption { + return func(pc *ProverConfig) error { + pc.KZGFoldingHash = hFunc + return nil + } +} + +// VerifierOption defines option for altering the behavior of the verifier. See +// the descriptions of functions returning instances of this type for +// implemented options. +type VerifierOption func(*VerifierConfig) error + +// VerifierConfig is the configuration for the verifier with the options applied. +type VerifierConfig struct { + HashToFieldFn hash.Hash + ChallengeHash hash.Hash + KZGFoldingHash hash.Hash +} + +// NewVerifierConfig returns a default [VerifierConfig] with given verifier +// options applied. +func NewVerifierConfig(opts ...VerifierOption) (VerifierConfig, error) { + opt := VerifierConfig{ + // we cannot initialize HashToFieldFn here as we use different domain + // separation tags for PLONK and Groth16 + ChallengeHash: sha256.New(), + KZGFoldingHash: sha256.New(), + } + for _, option := range opts { + if err := option(&opt); err != nil { + return VerifierConfig{}, err + } + } + return opt, nil +} + +// WithVerifierHashToFieldFunction changes the hash function used for hashing +// bytes to field. If not set then the default hash function based on RFC 9380 +// is used. Used mainly for compatibility between different systems and +// efficient recursion. +func WithVerifierHashToFieldFunction(hFunc hash.Hash) VerifierOption { + return func(cfg *VerifierConfig) error { + cfg.HashToFieldFn = hFunc + return nil + } +} + +// WithVerifierChallengeHashFunction sets the hash function used for computing +// non-interactive challenges in Fiat-Shamir heuristic. If not set then by +// default SHA2-256 is used. Used mainly for compatibility between different +// systems and efficient recursion. +func WithVerifierChallengeHashFunction(hFunc hash.Hash) VerifierOption { + return func(pc *VerifierConfig) error { + pc.ChallengeHash = hFunc + return nil + } +} + +// WithVerifierKZGFoldingHashFunction sets the hash function used for computing +// the challenge when folding the KZG opening proofs. If not set then by default +// SHA2-256 is used. Used mainly for compatibility between different systems and +// efficient recursion. +func WithVerifierKZGFoldingHashFunction(hFunc hash.Hash) VerifierOption { + return func(pc *VerifierConfig) error { + pc.KZGFoldingHash = hFunc + return nil + } +} diff --git a/backend/groth16/bls12-377/commitment.go b/backend/groth16/bls12-377/commitment.go deleted file mode 100644 index c267e8a99b..0000000000 --- a/backend/groth16/bls12-377/commitment.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2020 ConsenSys Software Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by gnark DO NOT EDIT - -package groth16 - -import ( - curve "github.com/consensys/gnark-crypto/ecc/bls12-377" - "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" - "github.com/consensys/gnark/constraint" - "math/big" -) - -func solveCommitmentWire(commitment *curve.G1Affine, publicCommitted []*big.Int) (fr.Element, error) { - res, err := fr.Hash(constraint.SerializeCommitment(commitment.Marshal(), publicCommitted, (fr.Bits-1)/8+1), []byte(constraint.CommitmentDst), 1) - return res[0], err -} diff --git a/backend/groth16/bls12-377/prove.go b/backend/groth16/bls12-377/prove.go index 68f0133367..5dd2cf24d5 100644 --- a/backend/groth16/bls12-377/prove.go +++ b/backend/groth16/bls12-377/prove.go @@ -17,10 +17,12 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/pedersen" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/groth16/internal" @@ -59,7 +61,10 @@ func (proof *Proof) CurveID() ecc.ID { func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("new prover config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() @@ -86,8 +91,15 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return err } + opt.HashToFieldFn.Write(constraint.SerializeCommitment(proof.Commitments[i].Marshal(), hashed, (fr.Bits-1)/8+1)) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() + } var res fr.Element - res, err = solveCommitmentWire(&proof.Commitments[i], hashed) + res.SetBytes(hashBts[:nbBuf]) res.BigInt(out[0]) return err } diff --git a/backend/groth16/bls12-377/verify.go b/backend/groth16/bls12-377/verify.go index 3da51fcaee..867ce56708 100644 --- a/backend/groth16/bls12-377/verify.go +++ b/backend/groth16/bls12-377/verify.go @@ -19,15 +19,18 @@ package groth16 import ( "errors" "fmt" + "io" + "time" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/pedersen" "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/logger" - "io" - "time" ) var ( @@ -36,7 +39,14 @@ var ( ) // Verify verifies a proof with given VerifyingKey and publicWitness -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + opt, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("new verifier config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) + } nbPublicVars := len(vk.G1.K) - len(vk.PublicAndCommitmentCommitted) @@ -75,12 +85,17 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { copy(commitmentPrehashSerialized[offset:], publicWitness[vk.PublicAndCommitmentCommitted[i][j]-1].Marshal()) offset += fr.Bytes } - if res, err := fr.Hash(commitmentPrehashSerialized[:offset], []byte(constraint.CommitmentDst), 1); err != nil { - return err - } else { - publicWitness = append(publicWitness, res[0]) - copy(commitmentsSerialized[i*fr.Bytes:], res[0].Marshal()) + opt.HashToFieldFn.Write(commitmentPrehashSerialized[:offset]) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() } + var res fr.Element + res.SetBytes(hashBts[:nbBuf]) + publicWitness = append(publicWitness, res) + copy(commitmentsSerialized[i*fr.Bytes:], res.Marshal()) } if folded, err := pedersen.FoldCommitments(proof.Commitments, commitmentsSerialized); err != nil { diff --git a/backend/groth16/bls12-381/commitment.go b/backend/groth16/bls12-381/commitment.go deleted file mode 100644 index 6fcc533b5b..0000000000 --- a/backend/groth16/bls12-381/commitment.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2020 ConsenSys Software Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by gnark DO NOT EDIT - -package groth16 - -import ( - curve "github.com/consensys/gnark-crypto/ecc/bls12-381" - "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" - "github.com/consensys/gnark/constraint" - "math/big" -) - -func solveCommitmentWire(commitment *curve.G1Affine, publicCommitted []*big.Int) (fr.Element, error) { - res, err := fr.Hash(constraint.SerializeCommitment(commitment.Marshal(), publicCommitted, (fr.Bits-1)/8+1), []byte(constraint.CommitmentDst), 1) - return res[0], err -} diff --git a/backend/groth16/bls12-381/prove.go b/backend/groth16/bls12-381/prove.go index 0112677e3e..3a18d7ae1a 100644 --- a/backend/groth16/bls12-381/prove.go +++ b/backend/groth16/bls12-381/prove.go @@ -17,10 +17,12 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/pedersen" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/groth16/internal" @@ -59,7 +61,10 @@ func (proof *Proof) CurveID() ecc.ID { func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("new prover config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() @@ -86,8 +91,15 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return err } + opt.HashToFieldFn.Write(constraint.SerializeCommitment(proof.Commitments[i].Marshal(), hashed, (fr.Bits-1)/8+1)) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() + } var res fr.Element - res, err = solveCommitmentWire(&proof.Commitments[i], hashed) + res.SetBytes(hashBts[:nbBuf]) res.BigInt(out[0]) return err } diff --git a/backend/groth16/bls12-381/verify.go b/backend/groth16/bls12-381/verify.go index 646e052f42..0bf293f1d3 100644 --- a/backend/groth16/bls12-381/verify.go +++ b/backend/groth16/bls12-381/verify.go @@ -19,15 +19,18 @@ package groth16 import ( "errors" "fmt" + "io" + "time" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/pedersen" "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/logger" - "io" - "time" ) var ( @@ -36,7 +39,14 @@ var ( ) // Verify verifies a proof with given VerifyingKey and publicWitness -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + opt, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("new verifier config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) + } nbPublicVars := len(vk.G1.K) - len(vk.PublicAndCommitmentCommitted) @@ -75,12 +85,17 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { copy(commitmentPrehashSerialized[offset:], publicWitness[vk.PublicAndCommitmentCommitted[i][j]-1].Marshal()) offset += fr.Bytes } - if res, err := fr.Hash(commitmentPrehashSerialized[:offset], []byte(constraint.CommitmentDst), 1); err != nil { - return err - } else { - publicWitness = append(publicWitness, res[0]) - copy(commitmentsSerialized[i*fr.Bytes:], res[0].Marshal()) + opt.HashToFieldFn.Write(commitmentPrehashSerialized[:offset]) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() } + var res fr.Element + res.SetBytes(hashBts[:nbBuf]) + publicWitness = append(publicWitness, res) + copy(commitmentsSerialized[i*fr.Bytes:], res.Marshal()) } if folded, err := pedersen.FoldCommitments(proof.Commitments, commitmentsSerialized); err != nil { diff --git a/backend/groth16/bls24-315/commitment.go b/backend/groth16/bls24-315/commitment.go deleted file mode 100644 index fc1a3def96..0000000000 --- a/backend/groth16/bls24-315/commitment.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2020 ConsenSys Software Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by gnark DO NOT EDIT - -package groth16 - -import ( - curve "github.com/consensys/gnark-crypto/ecc/bls24-315" - "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" - "github.com/consensys/gnark/constraint" - "math/big" -) - -func solveCommitmentWire(commitment *curve.G1Affine, publicCommitted []*big.Int) (fr.Element, error) { - res, err := fr.Hash(constraint.SerializeCommitment(commitment.Marshal(), publicCommitted, (fr.Bits-1)/8+1), []byte(constraint.CommitmentDst), 1) - return res[0], err -} diff --git a/backend/groth16/bls24-315/prove.go b/backend/groth16/bls24-315/prove.go index 83f45d4a46..e32091db68 100644 --- a/backend/groth16/bls24-315/prove.go +++ b/backend/groth16/bls24-315/prove.go @@ -17,10 +17,12 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/pedersen" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/groth16/internal" @@ -59,7 +61,10 @@ func (proof *Proof) CurveID() ecc.ID { func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("new prover config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() @@ -86,8 +91,15 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return err } + opt.HashToFieldFn.Write(constraint.SerializeCommitment(proof.Commitments[i].Marshal(), hashed, (fr.Bits-1)/8+1)) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() + } var res fr.Element - res, err = solveCommitmentWire(&proof.Commitments[i], hashed) + res.SetBytes(hashBts[:nbBuf]) res.BigInt(out[0]) return err } diff --git a/backend/groth16/bls24-315/verify.go b/backend/groth16/bls24-315/verify.go index 6e85b70ecf..2c95a54d0d 100644 --- a/backend/groth16/bls24-315/verify.go +++ b/backend/groth16/bls24-315/verify.go @@ -19,15 +19,18 @@ package groth16 import ( "errors" "fmt" + "io" + "time" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/pedersen" "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/logger" - "io" - "time" ) var ( @@ -36,7 +39,14 @@ var ( ) // Verify verifies a proof with given VerifyingKey and publicWitness -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + opt, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("new verifier config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) + } nbPublicVars := len(vk.G1.K) - len(vk.PublicAndCommitmentCommitted) @@ -75,12 +85,17 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { copy(commitmentPrehashSerialized[offset:], publicWitness[vk.PublicAndCommitmentCommitted[i][j]-1].Marshal()) offset += fr.Bytes } - if res, err := fr.Hash(commitmentPrehashSerialized[:offset], []byte(constraint.CommitmentDst), 1); err != nil { - return err - } else { - publicWitness = append(publicWitness, res[0]) - copy(commitmentsSerialized[i*fr.Bytes:], res[0].Marshal()) + opt.HashToFieldFn.Write(commitmentPrehashSerialized[:offset]) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() } + var res fr.Element + res.SetBytes(hashBts[:nbBuf]) + publicWitness = append(publicWitness, res) + copy(commitmentsSerialized[i*fr.Bytes:], res.Marshal()) } if folded, err := pedersen.FoldCommitments(proof.Commitments, commitmentsSerialized); err != nil { diff --git a/backend/groth16/bls24-317/commitment.go b/backend/groth16/bls24-317/commitment.go deleted file mode 100644 index 05d71ba172..0000000000 --- a/backend/groth16/bls24-317/commitment.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2020 ConsenSys Software Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by gnark DO NOT EDIT - -package groth16 - -import ( - curve "github.com/consensys/gnark-crypto/ecc/bls24-317" - "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" - "github.com/consensys/gnark/constraint" - "math/big" -) - -func solveCommitmentWire(commitment *curve.G1Affine, publicCommitted []*big.Int) (fr.Element, error) { - res, err := fr.Hash(constraint.SerializeCommitment(commitment.Marshal(), publicCommitted, (fr.Bits-1)/8+1), []byte(constraint.CommitmentDst), 1) - return res[0], err -} diff --git a/backend/groth16/bls24-317/prove.go b/backend/groth16/bls24-317/prove.go index 82fbc62e6a..6eb04ef675 100644 --- a/backend/groth16/bls24-317/prove.go +++ b/backend/groth16/bls24-317/prove.go @@ -17,10 +17,12 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/pedersen" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/groth16/internal" @@ -59,7 +61,10 @@ func (proof *Proof) CurveID() ecc.ID { func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("new prover config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() @@ -86,8 +91,15 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return err } + opt.HashToFieldFn.Write(constraint.SerializeCommitment(proof.Commitments[i].Marshal(), hashed, (fr.Bits-1)/8+1)) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() + } var res fr.Element - res, err = solveCommitmentWire(&proof.Commitments[i], hashed) + res.SetBytes(hashBts[:nbBuf]) res.BigInt(out[0]) return err } diff --git a/backend/groth16/bls24-317/verify.go b/backend/groth16/bls24-317/verify.go index 3affc23113..f4c92dc687 100644 --- a/backend/groth16/bls24-317/verify.go +++ b/backend/groth16/bls24-317/verify.go @@ -19,15 +19,18 @@ package groth16 import ( "errors" "fmt" + "io" + "time" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/pedersen" "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/logger" - "io" - "time" ) var ( @@ -36,7 +39,14 @@ var ( ) // Verify verifies a proof with given VerifyingKey and publicWitness -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + opt, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("new verifier config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) + } nbPublicVars := len(vk.G1.K) - len(vk.PublicAndCommitmentCommitted) @@ -75,12 +85,17 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { copy(commitmentPrehashSerialized[offset:], publicWitness[vk.PublicAndCommitmentCommitted[i][j]-1].Marshal()) offset += fr.Bytes } - if res, err := fr.Hash(commitmentPrehashSerialized[:offset], []byte(constraint.CommitmentDst), 1); err != nil { - return err - } else { - publicWitness = append(publicWitness, res[0]) - copy(commitmentsSerialized[i*fr.Bytes:], res[0].Marshal()) + opt.HashToFieldFn.Write(commitmentPrehashSerialized[:offset]) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() } + var res fr.Element + res.SetBytes(hashBts[:nbBuf]) + publicWitness = append(publicWitness, res) + copy(commitmentsSerialized[i*fr.Bytes:], res.Marshal()) } if folded, err := pedersen.FoldCommitments(proof.Commitments, commitmentsSerialized); err != nil { diff --git a/backend/groth16/bn254/commitment.go b/backend/groth16/bn254/commitment.go deleted file mode 100644 index 435a7c058c..0000000000 --- a/backend/groth16/bn254/commitment.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2020 ConsenSys Software Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by gnark DO NOT EDIT - -package groth16 - -import ( - curve "github.com/consensys/gnark-crypto/ecc/bn254" - "github.com/consensys/gnark-crypto/ecc/bn254/fr" - "github.com/consensys/gnark/constraint" - "math/big" -) - -func solveCommitmentWire(commitment *curve.G1Affine, publicCommitted []*big.Int) (fr.Element, error) { - res, err := fr.Hash(constraint.SerializeCommitment(commitment.Marshal(), publicCommitted, (fr.Bits-1)/8+1), []byte(constraint.CommitmentDst), 1) - return res[0], err -} diff --git a/backend/groth16/bn254/prove.go b/backend/groth16/bn254/prove.go index e0759c2315..00809bc822 100644 --- a/backend/groth16/bn254/prove.go +++ b/backend/groth16/bn254/prove.go @@ -20,10 +20,12 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/groth16/internal" @@ -62,7 +64,10 @@ func (proof *Proof) CurveID() ecc.ID { func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("new prover config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() @@ -89,8 +94,15 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return err } + opt.HashToFieldFn.Write(constraint.SerializeCommitment(proof.Commitments[i].Marshal(), hashed, (fr.Bits-1)/8+1)) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() + } var res fr.Element - res, err = solveCommitmentWire(&proof.Commitments[i], hashed) + res.SetBytes(hashBts[:nbBuf]) res.BigInt(out[0]) return err } diff --git a/backend/groth16/bn254/verify.go b/backend/groth16/bn254/verify.go index 7d68d68a17..14eda65ed6 100644 --- a/backend/groth16/bn254/verify.go +++ b/backend/groth16/bn254/verify.go @@ -19,16 +19,19 @@ package groth16 import ( "errors" "fmt" + "io" + "text/template" + "time" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/logger" - "io" - "text/template" - "time" ) var ( @@ -37,7 +40,14 @@ var ( ) // Verify verifies a proof with given VerifyingKey and publicWitness -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + opt, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("new verifier config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) + } nbPublicVars := len(vk.G1.K) - len(vk.PublicAndCommitmentCommitted) @@ -76,12 +86,17 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { copy(commitmentPrehashSerialized[offset:], publicWitness[vk.PublicAndCommitmentCommitted[i][j]-1].Marshal()) offset += fr.Bytes } - if res, err := fr.Hash(commitmentPrehashSerialized[:offset], []byte(constraint.CommitmentDst), 1); err != nil { - return err - } else { - publicWitness = append(publicWitness, res[0]) - copy(commitmentsSerialized[i*fr.Bytes:], res[0].Marshal()) + opt.HashToFieldFn.Write(commitmentPrehashSerialized[:offset]) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() } + var res fr.Element + res.SetBytes(hashBts[:nbBuf]) + publicWitness = append(publicWitness, res) + copy(commitmentsSerialized[i*fr.Bytes:], res.Marshal()) } if folded, err := pedersen.FoldCommitments(proof.Commitments, commitmentsSerialized); err != nil { diff --git a/backend/groth16/bw6-633/commitment.go b/backend/groth16/bw6-633/commitment.go deleted file mode 100644 index f8af92e4fc..0000000000 --- a/backend/groth16/bw6-633/commitment.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2020 ConsenSys Software Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by gnark DO NOT EDIT - -package groth16 - -import ( - curve "github.com/consensys/gnark-crypto/ecc/bw6-633" - "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" - "github.com/consensys/gnark/constraint" - "math/big" -) - -func solveCommitmentWire(commitment *curve.G1Affine, publicCommitted []*big.Int) (fr.Element, error) { - res, err := fr.Hash(constraint.SerializeCommitment(commitment.Marshal(), publicCommitted, (fr.Bits-1)/8+1), []byte(constraint.CommitmentDst), 1) - return res[0], err -} diff --git a/backend/groth16/bw6-633/prove.go b/backend/groth16/bw6-633/prove.go index 8a68b87eeb..6cac809b96 100644 --- a/backend/groth16/bw6-633/prove.go +++ b/backend/groth16/bw6-633/prove.go @@ -17,10 +17,12 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/pedersen" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/groth16/internal" @@ -59,7 +61,10 @@ func (proof *Proof) CurveID() ecc.ID { func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("new prover config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() @@ -86,8 +91,15 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return err } + opt.HashToFieldFn.Write(constraint.SerializeCommitment(proof.Commitments[i].Marshal(), hashed, (fr.Bits-1)/8+1)) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() + } var res fr.Element - res, err = solveCommitmentWire(&proof.Commitments[i], hashed) + res.SetBytes(hashBts[:nbBuf]) res.BigInt(out[0]) return err } diff --git a/backend/groth16/bw6-633/verify.go b/backend/groth16/bw6-633/verify.go index c32a71e6a6..3bfaaffd39 100644 --- a/backend/groth16/bw6-633/verify.go +++ b/backend/groth16/bw6-633/verify.go @@ -19,15 +19,18 @@ package groth16 import ( "errors" "fmt" + "io" + "time" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/pedersen" "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/logger" - "io" - "time" ) var ( @@ -36,7 +39,14 @@ var ( ) // Verify verifies a proof with given VerifyingKey and publicWitness -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + opt, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("new verifier config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) + } nbPublicVars := len(vk.G1.K) - len(vk.PublicAndCommitmentCommitted) @@ -75,12 +85,17 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { copy(commitmentPrehashSerialized[offset:], publicWitness[vk.PublicAndCommitmentCommitted[i][j]-1].Marshal()) offset += fr.Bytes } - if res, err := fr.Hash(commitmentPrehashSerialized[:offset], []byte(constraint.CommitmentDst), 1); err != nil { - return err - } else { - publicWitness = append(publicWitness, res[0]) - copy(commitmentsSerialized[i*fr.Bytes:], res[0].Marshal()) + opt.HashToFieldFn.Write(commitmentPrehashSerialized[:offset]) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() } + var res fr.Element + res.SetBytes(hashBts[:nbBuf]) + publicWitness = append(publicWitness, res) + copy(commitmentsSerialized[i*fr.Bytes:], res.Marshal()) } if folded, err := pedersen.FoldCommitments(proof.Commitments, commitmentsSerialized); err != nil { diff --git a/backend/groth16/bw6-761/commitment.go b/backend/groth16/bw6-761/commitment.go deleted file mode 100644 index 5c357c24ad..0000000000 --- a/backend/groth16/bw6-761/commitment.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2020 ConsenSys Software Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by gnark DO NOT EDIT - -package groth16 - -import ( - curve "github.com/consensys/gnark-crypto/ecc/bw6-761" - "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" - "github.com/consensys/gnark/constraint" - "math/big" -) - -func solveCommitmentWire(commitment *curve.G1Affine, publicCommitted []*big.Int) (fr.Element, error) { - res, err := fr.Hash(constraint.SerializeCommitment(commitment.Marshal(), publicCommitted, (fr.Bits-1)/8+1), []byte(constraint.CommitmentDst), 1) - return res[0], err -} diff --git a/backend/groth16/bw6-761/prove.go b/backend/groth16/bw6-761/prove.go index 8b6df934c7..78e149be9f 100644 --- a/backend/groth16/bw6-761/prove.go +++ b/backend/groth16/bw6-761/prove.go @@ -17,10 +17,12 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/pedersen" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/groth16/internal" @@ -59,7 +61,10 @@ func (proof *Proof) CurveID() ecc.ID { func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("new prover config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() @@ -86,8 +91,15 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return err } + opt.HashToFieldFn.Write(constraint.SerializeCommitment(proof.Commitments[i].Marshal(), hashed, (fr.Bits-1)/8+1)) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() + } var res fr.Element - res, err = solveCommitmentWire(&proof.Commitments[i], hashed) + res.SetBytes(hashBts[:nbBuf]) res.BigInt(out[0]) return err } diff --git a/backend/groth16/bw6-761/verify.go b/backend/groth16/bw6-761/verify.go index ca49685a5e..f08d631d62 100644 --- a/backend/groth16/bw6-761/verify.go +++ b/backend/groth16/bw6-761/verify.go @@ -19,15 +19,18 @@ package groth16 import ( "errors" "fmt" + "io" + "time" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/pedersen" "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/logger" - "io" - "time" ) var ( @@ -36,7 +39,14 @@ var ( ) // Verify verifies a proof with given VerifyingKey and publicWitness -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + opt, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("new verifier config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) + } nbPublicVars := len(vk.G1.K) - len(vk.PublicAndCommitmentCommitted) @@ -75,12 +85,17 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { copy(commitmentPrehashSerialized[offset:], publicWitness[vk.PublicAndCommitmentCommitted[i][j]-1].Marshal()) offset += fr.Bytes } - if res, err := fr.Hash(commitmentPrehashSerialized[:offset], []byte(constraint.CommitmentDst), 1); err != nil { - return err - } else { - publicWitness = append(publicWitness, res[0]) - copy(commitmentsSerialized[i*fr.Bytes:], res[0].Marshal()) + opt.HashToFieldFn.Write(commitmentPrehashSerialized[:offset]) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() } + var res fr.Element + res.SetBytes(hashBts[:nbBuf]) + publicWitness = append(publicWitness, res) + copy(commitmentsSerialized[i*fr.Bytes:], res.Marshal()) } if folded, err := pedersen.FoldCommitments(proof.Commitments, commitmentsSerialized); err != nil { diff --git a/backend/groth16/groth16.go b/backend/groth16/groth16.go index 41e0f63c7e..823e7c3f4b 100644 --- a/backend/groth16/groth16.go +++ b/backend/groth16/groth16.go @@ -109,7 +109,7 @@ type VerifyingKey interface { } // Verify runs the groth16.Verify algorithm on provided proof with given witness -func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness) error { +func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness, opts ...backend.VerifierOption) error { switch _proof := proof.(type) { case *groth16_bls12377.Proof: @@ -117,43 +117,43 @@ func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness) error { if !ok { return witness.ErrInvalidWitness } - return groth16_bls12377.Verify(_proof, vk.(*groth16_bls12377.VerifyingKey), w) + return groth16_bls12377.Verify(_proof, vk.(*groth16_bls12377.VerifyingKey), w, opts...) case *groth16_bls12381.Proof: w, ok := publicWitness.Vector().(fr_bls12381.Vector) if !ok { return witness.ErrInvalidWitness } - return groth16_bls12381.Verify(_proof, vk.(*groth16_bls12381.VerifyingKey), w) + return groth16_bls12381.Verify(_proof, vk.(*groth16_bls12381.VerifyingKey), w, opts...) case *groth16_bn254.Proof: w, ok := publicWitness.Vector().(fr_bn254.Vector) if !ok { return witness.ErrInvalidWitness } - return groth16_bn254.Verify(_proof, vk.(*groth16_bn254.VerifyingKey), w) + return groth16_bn254.Verify(_proof, vk.(*groth16_bn254.VerifyingKey), w, opts...) case *groth16_bw6761.Proof: w, ok := publicWitness.Vector().(fr_bw6761.Vector) if !ok { return witness.ErrInvalidWitness } - return groth16_bw6761.Verify(_proof, vk.(*groth16_bw6761.VerifyingKey), w) + return groth16_bw6761.Verify(_proof, vk.(*groth16_bw6761.VerifyingKey), w, opts...) case *groth16_bls24317.Proof: w, ok := publicWitness.Vector().(fr_bls24317.Vector) if !ok { return witness.ErrInvalidWitness } - return groth16_bls24317.Verify(_proof, vk.(*groth16_bls24317.VerifyingKey), w) + return groth16_bls24317.Verify(_proof, vk.(*groth16_bls24317.VerifyingKey), w, opts...) case *groth16_bls24315.Proof: w, ok := publicWitness.Vector().(fr_bls24315.Vector) if !ok { return witness.ErrInvalidWitness } - return groth16_bls24315.Verify(_proof, vk.(*groth16_bls24315.VerifyingKey), w) + return groth16_bls24315.Verify(_proof, vk.(*groth16_bls24315.VerifyingKey), w, opts...) case *groth16_bw6633.Proof: w, ok := publicWitness.Vector().(fr_bw6633.Vector) if !ok { return witness.ErrInvalidWitness } - return groth16_bw6633.Verify(_proof, vk.(*groth16_bw6633.VerifyingKey), w) + return groth16_bw6633.Verify(_proof, vk.(*groth16_bw6633.VerifyingKey), w, opts...) default: panic("unrecognized R1CS curve type") } diff --git a/backend/groth16/groth16_test.go b/backend/groth16/groth16_test.go index 24fca03aed..027dc388d2 100644 --- a/backend/groth16/groth16_test.go +++ b/backend/groth16/groth16_test.go @@ -1,17 +1,56 @@ package groth16_test import ( + "fmt" "math/big" "testing" "github.com/consensys/gnark" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/frontend" "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/consensys/gnark/test" ) +func TestCustomHashToField(t *testing.T) { + assert := test.NewAssert(t) + assignment := &commitmentCircuit{X: 1} + for _, curve := range getCurves() { + assert.Run(func(assert *test.Assert) { + ccs, err := frontend.Compile(curve.ScalarField(), r1cs.NewBuilder, &commitmentCircuit{}) + assert.NoError(err) + pk, vk, err := groth16.Setup(ccs) + assert.NoError(err) + witness, err := frontend.NewWitness(assignment, curve.ScalarField()) + assert.NoError(err) + assert.Run(func(assert *test.Assert) { + proof, err := groth16.Prove(ccs, pk, witness, backend.WithProverHashToFieldFunction(constantHash{})) + assert.NoError(err) + pubWitness, err := witness.Public() + assert.NoError(err) + err = groth16.Verify(proof, vk, pubWitness, backend.WithVerifierHashToFieldFunction(constantHash{})) + assert.NoError(err) + }, "custom success") + assert.Run(func(assert *test.Assert) { + proof, err := groth16.Prove(ccs, pk, witness, backend.WithProverHashToFieldFunction(constantHash{})) + assert.NoError(err) + pubWitness, err := witness.Public() + assert.NoError(err) + err = groth16.Verify(proof, vk, pubWitness) + assert.Error(err) + }, "prover_only") + assert.Run(func(assert *test.Assert) { + proof, err := groth16.Prove(ccs, pk, witness) + assert.Error(err) + _ = proof + }, "verifier_only") + }, curve.String()) + } +} + //--------------------// // benches // //--------------------// @@ -116,6 +155,27 @@ func referenceCircuit(curve ecc.ID) (constraint.ConstraintSystem, frontend.Circu return r1cs, &good } +type commitmentCircuit struct { + X frontend.Variable +} + +func (c *commitmentCircuit) Define(api frontend.API) error { + cmt, err := api.(frontend.Committer).Commit(c.X) + if err != nil { + return fmt.Errorf("commit: %w", err) + } + api.AssertIsEqual(cmt, "0xaabbcc") + return nil +} + +type constantHash struct{} + +func (h constantHash) Write(p []byte) (n int, err error) { return len(p), nil } +func (h constantHash) Sum(b []byte) []byte { return []byte{0xaa, 0xbb, 0xcc} } +func (h constantHash) Reset() {} +func (h constantHash) Size() int { return 3 } +func (h constantHash) BlockSize() int { return 32 } + func getCurves() []ecc.ID { if testing.Short() { return []ecc.ID{ecc.BN254} diff --git a/backend/plonk/bls12-377/prove.go b/backend/plonk/bls12-377/prove.go index 2b53a9c58c..46346775c5 100644 --- a/backend/plonk/bls12-377/prove.go +++ b/backend/plonk/bls12-377/prove.go @@ -18,9 +18,8 @@ package plonk import ( "context" - "crypto/sha256" "errors" - "golang.org/x/sync/errgroup" + "fmt" "hash" "math/big" "math/bits" @@ -28,24 +27,25 @@ import ( "sync" "time" - "github.com/consensys/gnark/backend/witness" + "golang.org/x/sync/errgroup" "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" - curve "github.com/consensys/gnark-crypto/ecc/bls12-377" - "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/fft" - + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/iop" - cs "github.com/consensys/gnark/constraint/bls12-377" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "github.com/consensys/gnark/backend" + "github.com/consensys/gnark/backend/witness" + "github.com/consensys/gnark/constraint" + cs "github.com/consensys/gnark/constraint/bls12-377" "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/internal/utils" "github.com/consensys/gnark/logger" @@ -122,14 +122,17 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts // parse the options opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("get prover options: %w", err) } start := time.Now() // init instance g, ctx := errgroup.WithContext(context.Background()) - instance := newInstance(ctx, spr, pk, fullWitness, &opt) + instance, err := newInstance(ctx, spr, pk, fullWitness, &opt) + if err != nil { + return nil, fmt.Errorf("new instance: %w", err) + } // solve constraints g.Go(instance.solveConstraints) @@ -181,8 +184,9 @@ type instance struct { spr *cs.SparseR1CS opt *backend.ProverConfig - fs fiatshamir.Transcript - hFunc hash.Hash + fs fiatshamir.Transcript + kzgFoldingHash hash.Hash // for KZG folding + htfFunc hash.Hash // hash to field function // polynomials x []*iop.Polynomial // x stores tracks the polynomial we need @@ -223,8 +227,10 @@ type instance struct { chGammaBeta chan struct{} } -func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) instance { - hFunc := sha256.New() +func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) (*instance, error) { + if opts.HashToFieldFn == nil { + opts.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } s := instance{ ctx: ctx, pk: pk, @@ -233,8 +239,9 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi opt: opts, fullWitness: fullWitness, bp: make([]*iop.Polynomial, nb_blinding_polynomials), - fs: fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta"), - hFunc: hFunc, + fs: fiatshamir.NewTranscript(opts.ChallengeHash, "gamma", "beta", "alpha", "zeta"), + kzgFoldingHash: opts.KZGFoldingHash, + htfFunc: opts.HashToFieldFn, chLRO: make(chan struct{}, 1), chQk: make(chan struct{}, 1), chbp: make(chan struct{}, 1), @@ -251,7 +258,7 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi s.setupGKRHints() s.x = make([]*iop.Polynomial, id_Qci+2*len(s.commitmentInfo)) - return s + return &s, nil } func (s *instance) initComputeNumerator() error { @@ -309,6 +316,8 @@ func (s *instance) initBSB22Commitments() { // Computing and verifying Bsb22 multi-commits explained in https://hackmd.io/x8KsadW3RRyX7YTCFJIkHg func (s *instance) bsb22Hint(commDepth int) solver.Hint { return func(_ *big.Int, ins, outs []*big.Int) error { + var err error + res := &s.commitmentVal[commDepth] commitmentInfo := s.spr.CommitmentInfo.(constraint.PlonkCommitments)[commDepth] @@ -317,10 +326,6 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { for i := range ins { committedValues[offset+commitmentInfo.Committed[i]].SetBigInt(ins[i]) } - var ( - err error - hashRes []fr.Element - ) if _, err = committedValues[offset+commitmentInfo.CommitmentIndex].SetRandom(); err != nil { // Commitment injection constraint has qcp = 0. Safe to use for blinding. return err } @@ -333,10 +338,14 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { } s.cCommitments[commDepth].ToCanonical(&s.pk.Domain[0]).ToRegular() - if hashRes, err = fr.Hash(s.proof.Bsb22Commitments[commDepth].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err + s.htfFunc.Write(s.proof.Bsb22Commitments[commDepth].Marshal()) + hashBts := s.htfFunc.Sum(nil) + s.htfFunc.Reset() + nbBuf := fr.Bytes + if s.htfFunc.Size() < fr.Bytes { + nbBuf = s.htfFunc.Size() } - res.Set(&hashRes[0]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses + res.SetBytes(hashBts[:nbBuf]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses res.BigInt(outs[0]) return nil @@ -816,8 +825,9 @@ func (s *instance) batchOpening() error { polysToOpen, digestsToOpen, s.zeta, - s.hFunc, + s.kzgFoldingHash, s.pk.Kzg, + s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) return err diff --git a/backend/plonk/bls12-377/verify.go b/backend/plonk/bls12-377/verify.go index cb87b4745d..8b7fbdf3a4 100644 --- a/backend/plonk/bls12-377/verify.go +++ b/backend/plonk/bls12-377/verify.go @@ -17,20 +17,24 @@ package plonk import ( - "crypto/sha256" "errors" + "fmt" "io" "math/big" - "time" - "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "time" - "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/hash_to_field" + + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/logger" ) @@ -38,19 +42,20 @@ var ( errWrongClaimedQuotient = errors.New("claimed quotient is not as expected") ) -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { log := logger.Logger().With().Str("curve", "bls12-377").Str("backend", "plonk").Logger() start := time.Now() + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } if len(proof.Bsb22Commitments) != len(vk.Qcp) { return errors.New("BSB22 Commitment number mismatch") } - // pick a hash function to derive the challenge (the same as in the prover) - hFunc := sha256.New() - // transcript to derive the challenge - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") // The first challenge is derived using the public data: the commitments to the permutation, // the coefficients of the circuit, and the public inputs. @@ -119,11 +124,20 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { } } + if cfg.HashToFieldFn == nil { + cfg.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } + var hashBts []byte + var hashedCmt fr.Element + nbBuf := fr.Bytes + if cfg.HashToFieldFn.Size() < fr.Bytes { + nbBuf = cfg.HashToFieldFn.Size() + } for i := range vk.CommitmentConstraintIndexes { - var hashRes []fr.Element - if hashRes, err = fr.Hash(proof.Bsb22Commitments[i].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err - } + cfg.HashToFieldFn.Write(proof.Bsb22Commitments[i].Marshal()) + hashBts = cfg.HashToFieldFn.Sum(hashBts[0:]) + cfg.HashToFieldFn.Reset() + hashedCmt.SetBytes(hashBts[:nbBuf]) // Computing L_{CommitmentIndex} @@ -136,7 +150,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { Div(&lagrange, &den). // wⁱ(ζ-1)/(ζ-wⁱ) Mul(&lagrange, &lagrangeOne) // wⁱ/n (ζⁿ-1)/(ζ-wⁱ) - xiLi.Mul(&lagrange, &hashRes[0]) + xiLi.Mul(&lagrange, &hashedCmt) pi.Add(&pi, &xiLi) } } @@ -250,7 +264,8 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { digestsToFold, &proof.BatchedProof, zeta, - hFunc, + cfg.KZGFoldingHash, + zu.Marshal(), ) if err != nil { return err diff --git a/backend/plonk/bls12-381/prove.go b/backend/plonk/bls12-381/prove.go index e08ca1a87d..cd6282fcdf 100644 --- a/backend/plonk/bls12-381/prove.go +++ b/backend/plonk/bls12-381/prove.go @@ -18,9 +18,8 @@ package plonk import ( "context" - "crypto/sha256" "errors" - "golang.org/x/sync/errgroup" + "fmt" "hash" "math/big" "math/bits" @@ -28,24 +27,25 @@ import ( "sync" "time" - "github.com/consensys/gnark/backend/witness" + "golang.org/x/sync/errgroup" "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" - curve "github.com/consensys/gnark-crypto/ecc/bls12-381" - "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/fft" - + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/iop" - cs "github.com/consensys/gnark/constraint/bls12-381" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "github.com/consensys/gnark/backend" + "github.com/consensys/gnark/backend/witness" + "github.com/consensys/gnark/constraint" + cs "github.com/consensys/gnark/constraint/bls12-381" "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/internal/utils" "github.com/consensys/gnark/logger" @@ -122,14 +122,17 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts // parse the options opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("get prover options: %w", err) } start := time.Now() // init instance g, ctx := errgroup.WithContext(context.Background()) - instance := newInstance(ctx, spr, pk, fullWitness, &opt) + instance, err := newInstance(ctx, spr, pk, fullWitness, &opt) + if err != nil { + return nil, fmt.Errorf("new instance: %w", err) + } // solve constraints g.Go(instance.solveConstraints) @@ -181,8 +184,9 @@ type instance struct { spr *cs.SparseR1CS opt *backend.ProverConfig - fs fiatshamir.Transcript - hFunc hash.Hash + fs fiatshamir.Transcript + kzgFoldingHash hash.Hash // for KZG folding + htfFunc hash.Hash // hash to field function // polynomials x []*iop.Polynomial // x stores tracks the polynomial we need @@ -223,8 +227,10 @@ type instance struct { chGammaBeta chan struct{} } -func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) instance { - hFunc := sha256.New() +func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) (*instance, error) { + if opts.HashToFieldFn == nil { + opts.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } s := instance{ ctx: ctx, pk: pk, @@ -233,8 +239,9 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi opt: opts, fullWitness: fullWitness, bp: make([]*iop.Polynomial, nb_blinding_polynomials), - fs: fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta"), - hFunc: hFunc, + fs: fiatshamir.NewTranscript(opts.ChallengeHash, "gamma", "beta", "alpha", "zeta"), + kzgFoldingHash: opts.KZGFoldingHash, + htfFunc: opts.HashToFieldFn, chLRO: make(chan struct{}, 1), chQk: make(chan struct{}, 1), chbp: make(chan struct{}, 1), @@ -251,7 +258,7 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi s.setupGKRHints() s.x = make([]*iop.Polynomial, id_Qci+2*len(s.commitmentInfo)) - return s + return &s, nil } func (s *instance) initComputeNumerator() error { @@ -309,6 +316,8 @@ func (s *instance) initBSB22Commitments() { // Computing and verifying Bsb22 multi-commits explained in https://hackmd.io/x8KsadW3RRyX7YTCFJIkHg func (s *instance) bsb22Hint(commDepth int) solver.Hint { return func(_ *big.Int, ins, outs []*big.Int) error { + var err error + res := &s.commitmentVal[commDepth] commitmentInfo := s.spr.CommitmentInfo.(constraint.PlonkCommitments)[commDepth] @@ -317,10 +326,6 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { for i := range ins { committedValues[offset+commitmentInfo.Committed[i]].SetBigInt(ins[i]) } - var ( - err error - hashRes []fr.Element - ) if _, err = committedValues[offset+commitmentInfo.CommitmentIndex].SetRandom(); err != nil { // Commitment injection constraint has qcp = 0. Safe to use for blinding. return err } @@ -333,10 +338,14 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { } s.cCommitments[commDepth].ToCanonical(&s.pk.Domain[0]).ToRegular() - if hashRes, err = fr.Hash(s.proof.Bsb22Commitments[commDepth].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err + s.htfFunc.Write(s.proof.Bsb22Commitments[commDepth].Marshal()) + hashBts := s.htfFunc.Sum(nil) + s.htfFunc.Reset() + nbBuf := fr.Bytes + if s.htfFunc.Size() < fr.Bytes { + nbBuf = s.htfFunc.Size() } - res.Set(&hashRes[0]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses + res.SetBytes(hashBts[:nbBuf]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses res.BigInt(outs[0]) return nil @@ -816,8 +825,9 @@ func (s *instance) batchOpening() error { polysToOpen, digestsToOpen, s.zeta, - s.hFunc, + s.kzgFoldingHash, s.pk.Kzg, + s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) return err diff --git a/backend/plonk/bls12-381/verify.go b/backend/plonk/bls12-381/verify.go index 2bb88574a6..2b3eeef6c1 100644 --- a/backend/plonk/bls12-381/verify.go +++ b/backend/plonk/bls12-381/verify.go @@ -17,20 +17,24 @@ package plonk import ( - "crypto/sha256" "errors" + "fmt" "io" "math/big" - "time" - "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "time" - "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/hash_to_field" + + "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/logger" ) @@ -38,19 +42,20 @@ var ( errWrongClaimedQuotient = errors.New("claimed quotient is not as expected") ) -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { log := logger.Logger().With().Str("curve", "bls12-381").Str("backend", "plonk").Logger() start := time.Now() + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } if len(proof.Bsb22Commitments) != len(vk.Qcp) { return errors.New("BSB22 Commitment number mismatch") } - // pick a hash function to derive the challenge (the same as in the prover) - hFunc := sha256.New() - // transcript to derive the challenge - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") // The first challenge is derived using the public data: the commitments to the permutation, // the coefficients of the circuit, and the public inputs. @@ -119,11 +124,20 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { } } + if cfg.HashToFieldFn == nil { + cfg.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } + var hashBts []byte + var hashedCmt fr.Element + nbBuf := fr.Bytes + if cfg.HashToFieldFn.Size() < fr.Bytes { + nbBuf = cfg.HashToFieldFn.Size() + } for i := range vk.CommitmentConstraintIndexes { - var hashRes []fr.Element - if hashRes, err = fr.Hash(proof.Bsb22Commitments[i].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err - } + cfg.HashToFieldFn.Write(proof.Bsb22Commitments[i].Marshal()) + hashBts = cfg.HashToFieldFn.Sum(hashBts[0:]) + cfg.HashToFieldFn.Reset() + hashedCmt.SetBytes(hashBts[:nbBuf]) // Computing L_{CommitmentIndex} @@ -136,7 +150,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { Div(&lagrange, &den). // wⁱ(ζ-1)/(ζ-wⁱ) Mul(&lagrange, &lagrangeOne) // wⁱ/n (ζⁿ-1)/(ζ-wⁱ) - xiLi.Mul(&lagrange, &hashRes[0]) + xiLi.Mul(&lagrange, &hashedCmt) pi.Add(&pi, &xiLi) } } @@ -250,7 +264,8 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { digestsToFold, &proof.BatchedProof, zeta, - hFunc, + cfg.KZGFoldingHash, + zu.Marshal(), ) if err != nil { return err diff --git a/backend/plonk/bls24-315/prove.go b/backend/plonk/bls24-315/prove.go index 7ae6f81136..09f0643a47 100644 --- a/backend/plonk/bls24-315/prove.go +++ b/backend/plonk/bls24-315/prove.go @@ -18,9 +18,8 @@ package plonk import ( "context" - "crypto/sha256" "errors" - "golang.org/x/sync/errgroup" + "fmt" "hash" "math/big" "math/bits" @@ -28,24 +27,25 @@ import ( "sync" "time" - "github.com/consensys/gnark/backend/witness" + "golang.org/x/sync/errgroup" "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" - curve "github.com/consensys/gnark-crypto/ecc/bls24-315" - "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/fft" - + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/iop" - cs "github.com/consensys/gnark/constraint/bls24-315" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "github.com/consensys/gnark/backend" + "github.com/consensys/gnark/backend/witness" + "github.com/consensys/gnark/constraint" + cs "github.com/consensys/gnark/constraint/bls24-315" "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/internal/utils" "github.com/consensys/gnark/logger" @@ -122,14 +122,17 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts // parse the options opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("get prover options: %w", err) } start := time.Now() // init instance g, ctx := errgroup.WithContext(context.Background()) - instance := newInstance(ctx, spr, pk, fullWitness, &opt) + instance, err := newInstance(ctx, spr, pk, fullWitness, &opt) + if err != nil { + return nil, fmt.Errorf("new instance: %w", err) + } // solve constraints g.Go(instance.solveConstraints) @@ -181,8 +184,9 @@ type instance struct { spr *cs.SparseR1CS opt *backend.ProverConfig - fs fiatshamir.Transcript - hFunc hash.Hash + fs fiatshamir.Transcript + kzgFoldingHash hash.Hash // for KZG folding + htfFunc hash.Hash // hash to field function // polynomials x []*iop.Polynomial // x stores tracks the polynomial we need @@ -223,8 +227,10 @@ type instance struct { chGammaBeta chan struct{} } -func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) instance { - hFunc := sha256.New() +func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) (*instance, error) { + if opts.HashToFieldFn == nil { + opts.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } s := instance{ ctx: ctx, pk: pk, @@ -233,8 +239,9 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi opt: opts, fullWitness: fullWitness, bp: make([]*iop.Polynomial, nb_blinding_polynomials), - fs: fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta"), - hFunc: hFunc, + fs: fiatshamir.NewTranscript(opts.ChallengeHash, "gamma", "beta", "alpha", "zeta"), + kzgFoldingHash: opts.KZGFoldingHash, + htfFunc: opts.HashToFieldFn, chLRO: make(chan struct{}, 1), chQk: make(chan struct{}, 1), chbp: make(chan struct{}, 1), @@ -251,7 +258,7 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi s.setupGKRHints() s.x = make([]*iop.Polynomial, id_Qci+2*len(s.commitmentInfo)) - return s + return &s, nil } func (s *instance) initComputeNumerator() error { @@ -309,6 +316,8 @@ func (s *instance) initBSB22Commitments() { // Computing and verifying Bsb22 multi-commits explained in https://hackmd.io/x8KsadW3RRyX7YTCFJIkHg func (s *instance) bsb22Hint(commDepth int) solver.Hint { return func(_ *big.Int, ins, outs []*big.Int) error { + var err error + res := &s.commitmentVal[commDepth] commitmentInfo := s.spr.CommitmentInfo.(constraint.PlonkCommitments)[commDepth] @@ -317,10 +326,6 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { for i := range ins { committedValues[offset+commitmentInfo.Committed[i]].SetBigInt(ins[i]) } - var ( - err error - hashRes []fr.Element - ) if _, err = committedValues[offset+commitmentInfo.CommitmentIndex].SetRandom(); err != nil { // Commitment injection constraint has qcp = 0. Safe to use for blinding. return err } @@ -333,10 +338,14 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { } s.cCommitments[commDepth].ToCanonical(&s.pk.Domain[0]).ToRegular() - if hashRes, err = fr.Hash(s.proof.Bsb22Commitments[commDepth].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err + s.htfFunc.Write(s.proof.Bsb22Commitments[commDepth].Marshal()) + hashBts := s.htfFunc.Sum(nil) + s.htfFunc.Reset() + nbBuf := fr.Bytes + if s.htfFunc.Size() < fr.Bytes { + nbBuf = s.htfFunc.Size() } - res.Set(&hashRes[0]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses + res.SetBytes(hashBts[:nbBuf]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses res.BigInt(outs[0]) return nil @@ -816,8 +825,9 @@ func (s *instance) batchOpening() error { polysToOpen, digestsToOpen, s.zeta, - s.hFunc, + s.kzgFoldingHash, s.pk.Kzg, + s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) return err diff --git a/backend/plonk/bls24-315/verify.go b/backend/plonk/bls24-315/verify.go index a0a3b3e296..446c95ff42 100644 --- a/backend/plonk/bls24-315/verify.go +++ b/backend/plonk/bls24-315/verify.go @@ -17,20 +17,24 @@ package plonk import ( - "crypto/sha256" "errors" + "fmt" "io" "math/big" - "time" - "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "time" - "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/hash_to_field" + + "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/logger" ) @@ -38,19 +42,20 @@ var ( errWrongClaimedQuotient = errors.New("claimed quotient is not as expected") ) -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { log := logger.Logger().With().Str("curve", "bls24-315").Str("backend", "plonk").Logger() start := time.Now() + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } if len(proof.Bsb22Commitments) != len(vk.Qcp) { return errors.New("BSB22 Commitment number mismatch") } - // pick a hash function to derive the challenge (the same as in the prover) - hFunc := sha256.New() - // transcript to derive the challenge - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") // The first challenge is derived using the public data: the commitments to the permutation, // the coefficients of the circuit, and the public inputs. @@ -119,11 +124,20 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { } } + if cfg.HashToFieldFn == nil { + cfg.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } + var hashBts []byte + var hashedCmt fr.Element + nbBuf := fr.Bytes + if cfg.HashToFieldFn.Size() < fr.Bytes { + nbBuf = cfg.HashToFieldFn.Size() + } for i := range vk.CommitmentConstraintIndexes { - var hashRes []fr.Element - if hashRes, err = fr.Hash(proof.Bsb22Commitments[i].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err - } + cfg.HashToFieldFn.Write(proof.Bsb22Commitments[i].Marshal()) + hashBts = cfg.HashToFieldFn.Sum(hashBts[0:]) + cfg.HashToFieldFn.Reset() + hashedCmt.SetBytes(hashBts[:nbBuf]) // Computing L_{CommitmentIndex} @@ -136,7 +150,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { Div(&lagrange, &den). // wⁱ(ζ-1)/(ζ-wⁱ) Mul(&lagrange, &lagrangeOne) // wⁱ/n (ζⁿ-1)/(ζ-wⁱ) - xiLi.Mul(&lagrange, &hashRes[0]) + xiLi.Mul(&lagrange, &hashedCmt) pi.Add(&pi, &xiLi) } } @@ -250,7 +264,8 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { digestsToFold, &proof.BatchedProof, zeta, - hFunc, + cfg.KZGFoldingHash, + zu.Marshal(), ) if err != nil { return err diff --git a/backend/plonk/bls24-317/prove.go b/backend/plonk/bls24-317/prove.go index 359e4c7199..49412d739a 100644 --- a/backend/plonk/bls24-317/prove.go +++ b/backend/plonk/bls24-317/prove.go @@ -18,9 +18,8 @@ package plonk import ( "context" - "crypto/sha256" "errors" - "golang.org/x/sync/errgroup" + "fmt" "hash" "math/big" "math/bits" @@ -28,24 +27,25 @@ import ( "sync" "time" - "github.com/consensys/gnark/backend/witness" + "golang.org/x/sync/errgroup" "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" - curve "github.com/consensys/gnark-crypto/ecc/bls24-317" - "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/fft" - + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/iop" - cs "github.com/consensys/gnark/constraint/bls24-317" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "github.com/consensys/gnark/backend" + "github.com/consensys/gnark/backend/witness" + "github.com/consensys/gnark/constraint" + cs "github.com/consensys/gnark/constraint/bls24-317" "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/internal/utils" "github.com/consensys/gnark/logger" @@ -122,14 +122,17 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts // parse the options opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("get prover options: %w", err) } start := time.Now() // init instance g, ctx := errgroup.WithContext(context.Background()) - instance := newInstance(ctx, spr, pk, fullWitness, &opt) + instance, err := newInstance(ctx, spr, pk, fullWitness, &opt) + if err != nil { + return nil, fmt.Errorf("new instance: %w", err) + } // solve constraints g.Go(instance.solveConstraints) @@ -181,8 +184,9 @@ type instance struct { spr *cs.SparseR1CS opt *backend.ProverConfig - fs fiatshamir.Transcript - hFunc hash.Hash + fs fiatshamir.Transcript + kzgFoldingHash hash.Hash // for KZG folding + htfFunc hash.Hash // hash to field function // polynomials x []*iop.Polynomial // x stores tracks the polynomial we need @@ -223,8 +227,10 @@ type instance struct { chGammaBeta chan struct{} } -func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) instance { - hFunc := sha256.New() +func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) (*instance, error) { + if opts.HashToFieldFn == nil { + opts.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } s := instance{ ctx: ctx, pk: pk, @@ -233,8 +239,9 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi opt: opts, fullWitness: fullWitness, bp: make([]*iop.Polynomial, nb_blinding_polynomials), - fs: fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta"), - hFunc: hFunc, + fs: fiatshamir.NewTranscript(opts.ChallengeHash, "gamma", "beta", "alpha", "zeta"), + kzgFoldingHash: opts.KZGFoldingHash, + htfFunc: opts.HashToFieldFn, chLRO: make(chan struct{}, 1), chQk: make(chan struct{}, 1), chbp: make(chan struct{}, 1), @@ -251,7 +258,7 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi s.setupGKRHints() s.x = make([]*iop.Polynomial, id_Qci+2*len(s.commitmentInfo)) - return s + return &s, nil } func (s *instance) initComputeNumerator() error { @@ -309,6 +316,8 @@ func (s *instance) initBSB22Commitments() { // Computing and verifying Bsb22 multi-commits explained in https://hackmd.io/x8KsadW3RRyX7YTCFJIkHg func (s *instance) bsb22Hint(commDepth int) solver.Hint { return func(_ *big.Int, ins, outs []*big.Int) error { + var err error + res := &s.commitmentVal[commDepth] commitmentInfo := s.spr.CommitmentInfo.(constraint.PlonkCommitments)[commDepth] @@ -317,10 +326,6 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { for i := range ins { committedValues[offset+commitmentInfo.Committed[i]].SetBigInt(ins[i]) } - var ( - err error - hashRes []fr.Element - ) if _, err = committedValues[offset+commitmentInfo.CommitmentIndex].SetRandom(); err != nil { // Commitment injection constraint has qcp = 0. Safe to use for blinding. return err } @@ -333,10 +338,14 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { } s.cCommitments[commDepth].ToCanonical(&s.pk.Domain[0]).ToRegular() - if hashRes, err = fr.Hash(s.proof.Bsb22Commitments[commDepth].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err + s.htfFunc.Write(s.proof.Bsb22Commitments[commDepth].Marshal()) + hashBts := s.htfFunc.Sum(nil) + s.htfFunc.Reset() + nbBuf := fr.Bytes + if s.htfFunc.Size() < fr.Bytes { + nbBuf = s.htfFunc.Size() } - res.Set(&hashRes[0]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses + res.SetBytes(hashBts[:nbBuf]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses res.BigInt(outs[0]) return nil @@ -816,8 +825,9 @@ func (s *instance) batchOpening() error { polysToOpen, digestsToOpen, s.zeta, - s.hFunc, + s.kzgFoldingHash, s.pk.Kzg, + s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) return err diff --git a/backend/plonk/bls24-317/verify.go b/backend/plonk/bls24-317/verify.go index a2cb47d212..ab64624d7c 100644 --- a/backend/plonk/bls24-317/verify.go +++ b/backend/plonk/bls24-317/verify.go @@ -17,20 +17,24 @@ package plonk import ( - "crypto/sha256" "errors" + "fmt" "io" "math/big" - "time" - "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "time" - "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/hash_to_field" + + "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/logger" ) @@ -38,19 +42,20 @@ var ( errWrongClaimedQuotient = errors.New("claimed quotient is not as expected") ) -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { log := logger.Logger().With().Str("curve", "bls24-317").Str("backend", "plonk").Logger() start := time.Now() + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } if len(proof.Bsb22Commitments) != len(vk.Qcp) { return errors.New("BSB22 Commitment number mismatch") } - // pick a hash function to derive the challenge (the same as in the prover) - hFunc := sha256.New() - // transcript to derive the challenge - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") // The first challenge is derived using the public data: the commitments to the permutation, // the coefficients of the circuit, and the public inputs. @@ -119,11 +124,20 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { } } + if cfg.HashToFieldFn == nil { + cfg.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } + var hashBts []byte + var hashedCmt fr.Element + nbBuf := fr.Bytes + if cfg.HashToFieldFn.Size() < fr.Bytes { + nbBuf = cfg.HashToFieldFn.Size() + } for i := range vk.CommitmentConstraintIndexes { - var hashRes []fr.Element - if hashRes, err = fr.Hash(proof.Bsb22Commitments[i].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err - } + cfg.HashToFieldFn.Write(proof.Bsb22Commitments[i].Marshal()) + hashBts = cfg.HashToFieldFn.Sum(hashBts[0:]) + cfg.HashToFieldFn.Reset() + hashedCmt.SetBytes(hashBts[:nbBuf]) // Computing L_{CommitmentIndex} @@ -136,7 +150,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { Div(&lagrange, &den). // wⁱ(ζ-1)/(ζ-wⁱ) Mul(&lagrange, &lagrangeOne) // wⁱ/n (ζⁿ-1)/(ζ-wⁱ) - xiLi.Mul(&lagrange, &hashRes[0]) + xiLi.Mul(&lagrange, &hashedCmt) pi.Add(&pi, &xiLi) } } @@ -250,7 +264,8 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { digestsToFold, &proof.BatchedProof, zeta, - hFunc, + cfg.KZGFoldingHash, + zu.Marshal(), ) if err != nil { return err diff --git a/backend/plonk/bn254/prove.go b/backend/plonk/bn254/prove.go index acaaf3383e..1a237418d6 100644 --- a/backend/plonk/bn254/prove.go +++ b/backend/plonk/bn254/prove.go @@ -18,9 +18,8 @@ package plonk import ( "context" - "crypto/sha256" "errors" - "golang.org/x/sync/errgroup" + "fmt" "hash" "math/big" "math/bits" @@ -28,24 +27,25 @@ import ( "sync" "time" - "github.com/consensys/gnark/backend/witness" + "golang.org/x/sync/errgroup" "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bn254/fr" - curve "github.com/consensys/gnark-crypto/ecc/bn254" - "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" - + "github.com/consensys/gnark-crypto/ecc/bn254/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bn254/fr/iop" - cs "github.com/consensys/gnark/constraint/bn254" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "github.com/consensys/gnark/backend" + "github.com/consensys/gnark/backend/witness" + "github.com/consensys/gnark/constraint" + cs "github.com/consensys/gnark/constraint/bn254" "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/internal/utils" "github.com/consensys/gnark/logger" @@ -122,14 +122,17 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts // parse the options opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("get prover options: %w", err) } start := time.Now() // init instance g, ctx := errgroup.WithContext(context.Background()) - instance := newInstance(ctx, spr, pk, fullWitness, &opt) + instance, err := newInstance(ctx, spr, pk, fullWitness, &opt) + if err != nil { + return nil, fmt.Errorf("new instance: %w", err) + } // solve constraints g.Go(instance.solveConstraints) @@ -181,8 +184,9 @@ type instance struct { spr *cs.SparseR1CS opt *backend.ProverConfig - fs fiatshamir.Transcript - hFunc hash.Hash + fs fiatshamir.Transcript + kzgFoldingHash hash.Hash // for KZG folding + htfFunc hash.Hash // hash to field function // polynomials x []*iop.Polynomial // x stores tracks the polynomial we need @@ -223,8 +227,10 @@ type instance struct { chGammaBeta chan struct{} } -func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) instance { - hFunc := sha256.New() +func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) (*instance, error) { + if opts.HashToFieldFn == nil { + opts.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } s := instance{ ctx: ctx, pk: pk, @@ -233,8 +239,9 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi opt: opts, fullWitness: fullWitness, bp: make([]*iop.Polynomial, nb_blinding_polynomials), - fs: fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta"), - hFunc: hFunc, + fs: fiatshamir.NewTranscript(opts.ChallengeHash, "gamma", "beta", "alpha", "zeta"), + kzgFoldingHash: opts.KZGFoldingHash, + htfFunc: opts.HashToFieldFn, chLRO: make(chan struct{}, 1), chQk: make(chan struct{}, 1), chbp: make(chan struct{}, 1), @@ -251,7 +258,7 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi s.setupGKRHints() s.x = make([]*iop.Polynomial, id_Qci+2*len(s.commitmentInfo)) - return s + return &s, nil } func (s *instance) initComputeNumerator() error { @@ -309,6 +316,8 @@ func (s *instance) initBSB22Commitments() { // Computing and verifying Bsb22 multi-commits explained in https://hackmd.io/x8KsadW3RRyX7YTCFJIkHg func (s *instance) bsb22Hint(commDepth int) solver.Hint { return func(_ *big.Int, ins, outs []*big.Int) error { + var err error + res := &s.commitmentVal[commDepth] commitmentInfo := s.spr.CommitmentInfo.(constraint.PlonkCommitments)[commDepth] @@ -317,10 +326,6 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { for i := range ins { committedValues[offset+commitmentInfo.Committed[i]].SetBigInt(ins[i]) } - var ( - err error - hashRes []fr.Element - ) if _, err = committedValues[offset+commitmentInfo.CommitmentIndex].SetRandom(); err != nil { // Commitment injection constraint has qcp = 0. Safe to use for blinding. return err } @@ -333,10 +338,14 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { } s.cCommitments[commDepth].ToCanonical(&s.pk.Domain[0]).ToRegular() - if hashRes, err = fr.Hash(s.proof.Bsb22Commitments[commDepth].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err + s.htfFunc.Write(s.proof.Bsb22Commitments[commDepth].Marshal()) + hashBts := s.htfFunc.Sum(nil) + s.htfFunc.Reset() + nbBuf := fr.Bytes + if s.htfFunc.Size() < fr.Bytes { + nbBuf = s.htfFunc.Size() } - res.Set(&hashRes[0]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses + res.SetBytes(hashBts[:nbBuf]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses res.BigInt(outs[0]) return nil @@ -816,8 +825,9 @@ func (s *instance) batchOpening() error { polysToOpen, digestsToOpen, s.zeta, - s.hFunc, + s.kzgFoldingHash, s.pk.Kzg, + s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) return err diff --git a/backend/plonk/bn254/solidity.go b/backend/plonk/bn254/solidity.go index 5a6c5e9eab..dc8205dc45 100644 --- a/backend/plonk/bn254/solidity.go +++ b/backend/plonk/bn254/solidity.go @@ -22,150 +22,162 @@ pragma solidity ^0.8.19; contract PlonkVerifier { - uint256 private constant r_mod = 21888242871839275222246405745257275088548364400416034343698204186575808495617; - uint256 private constant p_mod = 21888242871839275222246405745257275088696311157297823662689037894645226208583; + uint256 private constant R_MOD = 21888242871839275222246405745257275088548364400416034343698204186575808495617; + uint256 private constant P_MOD = 21888242871839275222246405745257275088696311157297823662689037894645226208583; {{ range $index, $element := .Kzg.G2 }} - uint256 private constant g2_srs_{{ $index }}_x_0 = {{ (fpstr $element.X.A1) }}; - uint256 private constant g2_srs_{{ $index }}_x_1 = {{ (fpstr $element.X.A0) }}; - uint256 private constant g2_srs_{{ $index }}_y_0 = {{ (fpstr $element.Y.A1) }}; - uint256 private constant g2_srs_{{ $index }}_y_1 = {{ (fpstr $element.Y.A0) }}; + uint256 private constant G2_SRS_{{ $index }}_X_0 = {{ (fpstr $element.X.A1) }}; + uint256 private constant G2_SRS_{{ $index }}_X_1 = {{ (fpstr $element.X.A0) }}; + uint256 private constant G2_SRS_{{ $index }}_Y_0 = {{ (fpstr $element.Y.A1) }}; + uint256 private constant G2_SRS_{{ $index }}_Y_1 = {{ (fpstr $element.Y.A0) }}; {{ end }} + uint256 private constant G1_SRS_X = {{ fpstr .Kzg.G1.X }}; + uint256 private constant G1_SRS_Y = {{ fpstr .Kzg.G1.Y }}; + // ----------------------- vk --------------------- - uint256 private constant vk_domain_size = {{ .Size }}; - uint256 private constant vk_inv_domain_size = {{ (frstr .SizeInv) }}; - uint256 private constant vk_omega = {{ (frstr .Generator) }}; - uint256 private constant vk_ql_com_x = {{ (fpstr .Ql.X) }}; - uint256 private constant vk_ql_com_y = {{ (fpstr .Ql.Y) }}; - uint256 private constant vk_qr_com_x = {{ (fpstr .Qr.X) }}; - uint256 private constant vk_qr_com_y = {{ (fpstr .Qr.Y) }}; - uint256 private constant vk_qm_com_x = {{ (fpstr .Qm.X) }}; - uint256 private constant vk_qm_com_y = {{ (fpstr .Qm.Y) }}; - uint256 private constant vk_qo_com_x = {{ (fpstr .Qo.X) }}; - uint256 private constant vk_qo_com_y = {{ (fpstr .Qo.Y) }}; - uint256 private constant vk_qk_com_x = {{ (fpstr .Qk.X) }}; - uint256 private constant vk_qk_com_y = {{ (fpstr .Qk.Y) }}; + uint256 private constant VK_NB_PUBLIC_INPUTS = {{ .NbPublicVariables }}; + uint256 private constant VK_DOMAIN_SIZE = {{ .Size }}; + uint256 private constant VK_INV_DOMAIN_SIZE = {{ (frstr .SizeInv) }}; + uint256 private constant VK_OMEGA = {{ (frstr .Generator) }}; + uint256 private constant VK_QL_COM_X = {{ (fpstr .Ql.X) }}; + uint256 private constant VK_QL_COM_Y = {{ (fpstr .Ql.Y) }}; + uint256 private constant VK_QR_COM_X = {{ (fpstr .Qr.X) }}; + uint256 private constant VK_QR_COM_Y = {{ (fpstr .Qr.Y) }}; + uint256 private constant VK_QM_COM_X = {{ (fpstr .Qm.X) }}; + uint256 private constant VK_QM_COM_Y = {{ (fpstr .Qm.Y) }}; + uint256 private constant VK_QO_COM_X = {{ (fpstr .Qo.X) }}; + uint256 private constant VK_QO_COM_Y = {{ (fpstr .Qo.Y) }}; + uint256 private constant VK_QK_COM_X = {{ (fpstr .Qk.X) }}; + uint256 private constant VK_QK_COM_Y = {{ (fpstr .Qk.Y) }}; {{ range $index, $element := .S }} - uint256 private constant vk_s{{ inc $index }}_com_x = {{ (fpstr $element.X) }}; - uint256 private constant vk_s{{ inc $index }}_com_y = {{ (fpstr $element.Y) }}; + uint256 private constant VK_S{{ inc $index }}_COM_X = {{ (fpstr $element.X) }}; + uint256 private constant VK_S{{ inc $index }}_COM_Y = {{ (fpstr $element.Y) }}; {{ end }} - uint256 private constant vk_coset_shift = 5; + uint256 private constant VK_COSET_SHIFT = 5; + {{ range $index, $element := .Qcp}} - uint256 private constant vk_qc_{{ $index }}_x = {{ (fpstr $element.X) }}; - uint256 private constant vk_qc_{{ $index }}_y = {{ (fpstr $element.Y) }}; + uint256 private constant VK_QCP_{{ $index }}_X = {{ (fpstr $element.X) }}; + uint256 private constant VK_QCP_{{ $index }}_Y = {{ (fpstr $element.Y) }}; {{ end }} + {{ range $index, $element := .CommitmentConstraintIndexes -}} - uint256 private constant vk_index_commit_api_{{ $index }} = {{ $element }}; - {{ end }} - uint256 private constant vk_nb_custom_gates = {{ len .CommitmentConstraintIndexes }}; + uint256 private constant VK_INDEX_COMMIT_API{{ $index }} = {{ $element }}; + {{ end -}} + uint256 private constant VK_NB_CUSTOM_GATES = {{ len .CommitmentConstraintIndexes }}; // ------------------------------------------------ // offset proof - uint256 private constant proof_l_com_x = 0x00; - uint256 private constant proof_l_com_y = 0x20; - uint256 private constant proof_r_com_x = 0x40; - uint256 private constant proof_r_com_y = 0x60; - uint256 private constant proof_o_com_x = 0x80; - uint256 private constant proof_o_com_y = 0xa0; + uint256 private constant PROOF_L_COM_X = 0x00; + uint256 private constant PROOF_L_COM_Y = 0x20; + uint256 private constant PROOF_R_COM_X = 0x40; + uint256 private constant PROOF_R_COM_Y = 0x60; + uint256 private constant PROOF_O_COM_X = 0x80; + uint256 private constant PROOF_O_COM_Y = 0xa0; // h = h_0 + x^{n+2}h_1 + x^{2(n+2)}h_2 - uint256 private constant proof_h_0_x = 0xc0; - uint256 private constant proof_h_0_y = 0xe0; - uint256 private constant proof_h_1_x = 0x100; - uint256 private constant proof_h_1_y = 0x120; - uint256 private constant proof_h_2_x = 0x140; - uint256 private constant proof_h_2_y = 0x160; + uint256 private constant PROOF_H_0_X = 0xc0; + uint256 private constant PROOF_H_0_Y = 0xe0; + uint256 private constant PROOF_H_1_X = 0x100; + uint256 private constant PROOF_H_1_Y = 0x120; + uint256 private constant PROOF_H_2_X = 0x140; + uint256 private constant PROOF_H_2_Y = 0x160; // wire values at zeta - uint256 private constant proof_l_at_zeta = 0x180; - uint256 private constant proof_r_at_zeta = 0x1a0; - uint256 private constant proof_o_at_zeta = 0x1c0; + uint256 private constant PROOF_L_AT_ZETA = 0x180; + uint256 private constant PROOF_R_AT_ZETA = 0x1a0; + uint256 private constant PROOF_O_AT_ZETA = 0x1c0; //uint256[STATE_WIDTH-1] permutation_polynomials_at_zeta; // Sσ1(zeta),Sσ2(zeta) - uint256 private constant proof_s1_at_zeta = 0x1e0; // Sσ1(zeta) - uint256 private constant proof_s2_at_zeta = 0x200; // Sσ2(zeta) + uint256 private constant PROOF_S1_AT_ZETA = 0x1e0; // Sσ1(zeta) + uint256 private constant PROOF_S2_AT_ZETA = 0x200; // Sσ2(zeta) //Bn254.G1Point grand_product_commitment; // [z(x)] - uint256 private constant proof_grand_product_commitment_x = 0x220; - uint256 private constant proof_grand_product_commitment_y = 0x240; + uint256 private constant PROOF_GRAND_PRODUCT_COMMITMENT_X = 0x220; + uint256 private constant PROOF_GRAND_PRODUCT_COMMITMENT_Y = 0x240; - uint256 private constant proof_grand_product_at_zeta_omega = 0x260; // z(w*zeta) - uint256 private constant proof_quotient_polynomial_at_zeta = 0x280; // t(zeta) - uint256 private constant proof_linearised_polynomial_at_zeta = 0x2a0; // r(zeta) + uint256 private constant PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA = 0x260; // z(w*zeta) + uint256 private constant PROOF_QUOTIENT_POLYNOMIAL_AT_ZETA = 0x280; // t(zeta) + uint256 private constant PROOF_LINEARISED_POLYNOMIAL_AT_ZETA = 0x2a0; // r(zeta) // Folded proof for the opening of H, linearised poly, l, r, o, s_1, s_2, qcp - uint256 private constant proof_batch_opening_at_zeta_x = 0x2c0; // [Wzeta] - uint256 private constant proof_batch_opening_at_zeta_y = 0x2e0; + uint256 private constant PROOF_BATCH_OPENING_AT_ZETA_X = 0x2c0; // [Wzeta] + uint256 private constant PROOF_BATCH_OPENING_AT_ZETA_Y = 0x2e0; - //Bn254.G1Point opening_at_zeta_omega_proof; // [Wzeta*omega] - uint256 private constant proof_opening_at_zeta_omega_x = 0x300; - uint256 private constant proof_opening_at_zeta_omega_y = 0x320; + uint256 private constant PROOF_OPENING_AT_ZETA_OMEGA_X = 0x300; + uint256 private constant PROOF_OPENING_AT_ZETA_OMEGA_Y = 0x320; + + uint256 private constant PROOF_OPENING_QCP_AT_ZETA = 0x340; + uint256 private constant PROOF_COMMITMENTS_WIRES_CUSTOM_GATES = {{ hex (add 832 (mul (len .CommitmentConstraintIndexes) 32 ) )}}; - uint256 private constant proof_openings_qci_at_zeta = 0x340; // -> next part of proof is // [ openings_selector_commits || commitments_wires_commit_api] // -------- offset state // challenges to check the claimed quotient - uint256 private constant state_alpha = 0x00; - uint256 private constant state_beta = 0x20; - uint256 private constant state_gamma = 0x40; - uint256 private constant state_zeta = 0x60; + uint256 private constant STATE_ALPHA = 0x00; + uint256 private constant STATE_BETA = 0x20; + uint256 private constant STATE_GAMMA = 0x40; + uint256 private constant STATE_ZETA = 0x60; // reusable value - uint256 private constant state_alpha_square_lagrange_0 = 0x80; + uint256 private constant STATE_ALPHA_SQUARE_LAGRANGE_0 = 0x80; // commitment to H - uint256 private constant state_folded_h_x = 0xa0; - uint256 private constant state_folded_h_y = 0xc0; + uint256 private constant STATE_FOLDED_H_X = 0xa0; + uint256 private constant STATE_FOLDED_H_Y = 0xc0; // commitment to the linearised polynomial - uint256 private constant state_linearised_polynomial_x = 0xe0; - uint256 private constant state_linearised_polynomial_y = 0x100; + uint256 private constant STATE_LINEARISED_POLYNOMIAL_X = 0xe0; + uint256 private constant STATE_LINEARISED_POLYNOMIAL_Y = 0x100; // Folded proof for the opening of H, linearised poly, l, r, o, s_1, s_2, qcp - uint256 private constant state_folded_claimed_values = 0x120; + uint256 private constant STATE_FOLDED_CLAIMED_VALUES = 0x120; // folded digests of H, linearised poly, l, r, o, s_1, s_2, qcp - // Bn254.G1Point folded_digests; - uint256 private constant state_folded_digests_x = 0x140; - uint256 private constant state_folded_digests_y = 0x160; + uint256 private constant STATE_FOLDED_DIGESTS_X = 0x140; + uint256 private constant STATE_FOLDED_DIGESTS_Y = 0x160; - uint256 private constant state_pi = 0x180; + uint256 private constant STATE_PI = 0x180; - uint256 private constant state_zeta_power_n_minus_one = 0x1a0; + uint256 private constant STATE_ZETA_POWER_N_MINUS_ONE = 0x1a0; - uint256 private constant state_gamma_kzg = 0x1c0; + uint256 private constant STATE_GAMMA_KZG = 0x1c0; - uint256 private constant state_success = 0x1e0; - uint256 private constant state_check_var = 0x200; // /!\ this slot is used for debugging only + uint256 private constant STATE_SUCCESS = 0x1e0; + uint256 private constant STATE_CHECK_VAR = 0x200; // /!\ this slot is used for debugging only - uint256 private constant state_last_mem = 0x220; + uint256 private constant STATE_LAST_MEM = 0x220; // -------- errors - uint256 private constant error_string_id = 0x08c379a000000000000000000000000000000000000000000000000000000000; // selector for function Error(string) + uint256 private constant ERROR_STRING_ID = 0x08c379a000000000000000000000000000000000000000000000000000000000; // selector for function Error(string) {{ if (gt (len .CommitmentConstraintIndexes) 0 )}} // -------- utils (for hash_fr) - uint256 private constant bb = 340282366920938463463374607431768211456; // 2**128 - uint256 private constant zero_uint256 = 0; + uint256 private constant HASH_FR_BB = 340282366920938463463374607431768211456; // 2**128 + uint256 private constant HASH_FR_ZERO_UINT256 = 0; - uint8 private constant lenInBytes = 48; - uint8 private constant sizeDomain = 11; - uint8 private constant one = 1; - uint8 private constant two = 2; + uint8 private constant HASH_FR_LEN_IN_BYTES = 48; + uint8 private constant HASH_FR_SIZE_DOMAIN = 11; + uint8 private constant HASH_FR_ONE = 1; + uint8 private constant HASH_FR_TWO = 2; {{ end }} + /// Verify a Plonk proof. + /// Reverts if the proof or the public inputs are malformed. + /// @param proof serialised plonk proof (using gnark's MarshalSolidity) + /// @param public_inputs (must be reduced) + /// @return success true if the proof passes false otherwise function Verify(bytes calldata proof, uint256[] calldata public_inputs) public view returns(bool success) { assembly { let mem := mload(0x40) - let freeMem := add(mem, state_last_mem) + let freeMem := add(mem, STATE_LAST_MEM) // sanity checks + check_number_of_public_inputs(public_inputs.length) check_inputs_size(public_inputs.length, public_inputs.offset) check_proof_size(proof.length) check_proof_openings_size(proof.offset) @@ -178,17 +190,17 @@ contract PlonkVerifier { derive_zeta(proof.offset, prev_challenge_non_reduced) // evaluation of Z=Xⁿ-1 at ζ, we save this value - let zeta := mload(add(mem, state_zeta)) - let zeta_power_n_minus_one := addmod(pow(zeta, vk_domain_size, freeMem), sub(r_mod, 1), r_mod) - mstore(add(mem, state_zeta_power_n_minus_one), zeta_power_n_minus_one) + let zeta := mload(add(mem, STATE_ZETA)) + let zeta_power_n_minus_one := addmod(pow(zeta, VK_DOMAIN_SIZE, freeMem), sub(R_MOD, 1), R_MOD) + mstore(add(mem, STATE_ZETA_POWER_N_MINUS_ONE), zeta_power_n_minus_one) // public inputs contribution let l_pi := sum_pi_wo_api_commit(public_inputs.offset, public_inputs.length, freeMem) {{ if (gt (len .CommitmentConstraintIndexes) 0 ) -}} let l_wocommit := sum_pi_commit(proof.offset, public_inputs.length, freeMem) - l_pi := addmod(l_wocommit, l_pi, r_mod) + l_pi := addmod(l_wocommit, l_pi, R_MOD) {{ end -}} - mstore(add(mem, state_pi), l_pi) + mstore(add(mem, STATE_PI), l_pi) compute_alpha_square_lagrange_0() verify_quotient_poly_eval_at_zeta(proof.offset) @@ -198,39 +210,57 @@ contract PlonkVerifier { fold_state(proof.offset) batch_verify_multi_points(proof.offset) - success := mload(add(mem, state_success)) + success := mload(add(mem, STATE_SUCCESS)) // Beginning errors ------------------------------------------------- + + function error_nb_public_inputs() { + let ptError := mload(0x40) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) + mstore(add(ptError, 0x4), 0x20) + mstore(add(ptError, 0x24), 0x1d) + mstore(add(ptError, 0x44), "wrong number of public inputs") + revert(ptError, 0x64) + } + + /// Called when an operation on Bn254 fails + /// @dev for instance when calling EcMul on a point not on Bn254. function error_ec_op() { let ptError := mload(0x40) - mstore(ptError, error_string_id) // selector for function Error(string) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) mstore(add(ptError, 0x4), 0x20) mstore(add(ptError, 0x24), 0x12) mstore(add(ptError, 0x44), "error ec operation") revert(ptError, 0x64) } + /// Called when one of the public inputs is not reduced. function error_inputs_size() { let ptError := mload(0x40) - mstore(ptError, error_string_id) // selector for function Error(string) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) mstore(add(ptError, 0x4), 0x20) mstore(add(ptError, 0x24), 0x18) mstore(add(ptError, 0x44), "inputs are bigger than r") revert(ptError, 0x64) } + /// Called when the size proof is not as expected + /// @dev to avoid overflow attack for instance function error_proof_size() { let ptError := mload(0x40) - mstore(ptError, error_string_id) // selector for function Error(string) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) mstore(add(ptError, 0x4), 0x20) mstore(add(ptError, 0x24), 0x10) mstore(add(ptError, 0x44), "wrong proof size") revert(ptError, 0x64) } + /// Called when one the openings is bigger than r + /// The openings are the claimed evalutions of a polynomial + /// in a Kzg proof. function error_proof_openings_size() { let ptError := mload(0x40) - mstore(ptError, error_string_id) // selector for function Error(string) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) mstore(add(ptError, 0x4), 0x20) mstore(add(ptError, 0x24), 0x16) mstore(add(ptError, 0x44), "openings bigger than r") @@ -239,22 +269,40 @@ contract PlonkVerifier { function error_verify() { let ptError := mload(0x40) - mstore(ptError, error_string_id) // selector for function Error(string) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) mstore(add(ptError, 0x4), 0x20) mstore(add(ptError, 0x24), 0xc) mstore(add(ptError, 0x44), "error verify") revert(ptError, 0x64) } + + function error_random_generation() { + let ptError := mload(0x40) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) + mstore(add(ptError, 0x4), 0x20) + mstore(add(ptError, 0x24), 0x14) + mstore(add(ptError, 0x44), "error random gen kzg") + revert(ptError, 0x64) + } // end errors ------------------------------------------------- // Beginning checks ------------------------------------------------- + + /// @param s actual number of public inputs + function check_number_of_public_inputs(s) { + if iszero(eq(s, VK_NB_PUBLIC_INPUTS)) { + error_nb_public_inputs() + } + } - // s number of public inputs, p pointer the public inputs + /// Checks that the public inputs are < R_MOD. + /// @param s number of public inputs + /// @param p pointer to the public inputs array function check_inputs_size(s, p) { let input_checks := 1 for {let i} lt(i, s) {i:=add(i,1)} { - input_checks := and(input_checks,lt(calldataload(p), r_mod)) + input_checks := and(input_checks,lt(calldataload(p), R_MOD)) p := add(p, 0x20) } if iszero(input_checks) { @@ -262,55 +310,60 @@ contract PlonkVerifier { } } + /// Checks if the proof is of the correct size + /// @param actual_proof_size size of the proof (not the expected size) function check_proof_size(actual_proof_size) { - let expected_proof_size := add(0x340, mul(vk_nb_custom_gates,0x60)) + let expected_proof_size := add(0x340, mul(VK_NB_CUSTOM_GATES,0x60)) if iszero(eq(actual_proof_size, expected_proof_size)) { error_proof_size() } } + /// Checks if the multiple openings of the polynomials are < R_MOD. + /// @param aproof pointer to the beginning of the proof + /// @dev the 'a' prepending proof is to have a local name function check_proof_openings_size(aproof) { let openings_check := 1 // linearised polynomial at zeta - let p := add(aproof, proof_linearised_polynomial_at_zeta) - openings_check := and(openings_check, lt(calldataload(p), r_mod)) + let p := add(aproof, PROOF_LINEARISED_POLYNOMIAL_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) // quotient polynomial at zeta - p := add(aproof, proof_quotient_polynomial_at_zeta) - openings_check := and(openings_check, lt(calldataload(p), r_mod)) + p := add(aproof, PROOF_QUOTIENT_POLYNOMIAL_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) - // proof_l_at_zeta - p := add(aproof, proof_l_at_zeta) - openings_check := and(openings_check, lt(calldataload(p), r_mod)) + // PROOF_L_AT_ZETA + p := add(aproof, PROOF_L_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) - // proof_r_at_zeta - p := add(aproof, proof_r_at_zeta) - openings_check := and(openings_check, lt(calldataload(p), r_mod)) + // PROOF_R_AT_ZETA + p := add(aproof, PROOF_R_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) - // proof_o_at_zeta - p := add(aproof, proof_o_at_zeta) - openings_check := and(openings_check, lt(calldataload(p), r_mod)) + // PROOF_O_AT_ZETA + p := add(aproof, PROOF_O_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) - // proof_s1_at_zeta - p := add(aproof, proof_s1_at_zeta) - openings_check := and(openings_check, lt(calldataload(p), r_mod)) + // PROOF_S1_AT_ZETA + p := add(aproof, PROOF_S1_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) - // proof_s2_at_zeta - p := add(aproof, proof_s2_at_zeta) - openings_check := and(openings_check, lt(calldataload(p), r_mod)) + // PROOF_S2_AT_ZETA + p := add(aproof, PROOF_S2_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) - // proof_grand_product_at_zeta_omega - p := add(aproof, proof_grand_product_at_zeta_omega) - openings_check := and(openings_check, lt(calldataload(p), r_mod)) + // PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA + p := add(aproof, PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) - // proof_openings_qci_at_zeta + // PROOF_OPENING_QCP_AT_ZETA - p := add(aproof, proof_openings_qci_at_zeta) - for {let i:=0} lt(i, vk_nb_custom_gates) {i:=add(i,1)} + p := add(aproof, PROOF_OPENING_QCP_AT_ZETA) + for {let i:=0} lt(i, VK_NB_CUSTOM_GATES) {i:=add(i,1)} { - openings_check := and(openings_check, lt(calldataload(p), r_mod)) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) p := add(p, 0x20) } @@ -323,47 +376,51 @@ contract PlonkVerifier { // Beginning challenges ------------------------------------------------- - // Derive gamma as Sha256() - // where transcript is the concatenation (in this order) of: - // * the word "gamma" in ascii, equal to [0x67,0x61,0x6d, 0x6d, 0x61] and encoded as a uint256. - // * the commitments to the permutation polynomials S1, S2, S3, where we concatenate the coordinates of those points - // * the commitments of Ql, Qr, Qm, Qo, Qk , Qc_i (in case of custom gate) - // * the public inputs - // * the commitments of the wires related to the custom gates (commitments_wires_commit_api) - // * commitments to L, R, O (proof__com_) - // The data described above is written starting at mPtr. "gamma" lies on 5 bytes, - // and is encoded as a uint256 number n. In basis b = 256, the number looks like this - // [0 0 0 .. 0x67 0x61 0x6d, 0x6d, 0x61]. The first non zero entry is at position 27=0x1b - // nb_pi, pi respectively number of public inputs and public inputs + /// Derive gamma as Sha256() + /// @param aproof pointer to the proof + /// @param nb_pi number of public inputs + /// @param pi pointer to the array of public inputs + /// @return the challenge gamma, not reduced + /// @notice The transcript is the concatenation (in this order) of: + /// * the word "gamma" in ascii, equal to [0x67,0x61,0x6d, 0x6d, 0x61] and encoded as a uint256. + /// * the commitments to the permutation polynomials S1, S2, S3, where we concatenate the coordinates of those points + /// * the commitments of Ql, Qr, Qm, Qo, Qk + /// * the public inputs + /// * the commitments of the wires related to the custom gates (commitments_wires_commit_api) + /// * commitments to L, R, O (proof__com_) + /// The data described above is written starting at mPtr. "gamma" lies on 5 bytes, + /// and is encoded as a uint256 number n. In basis b = 256, the number looks like this + /// [0 0 0 .. 0x67 0x61 0x6d, 0x6d, 0x61]. The first non zero entry is at position 27=0x1b + /// Gamma reduced (the actual challenge) is stored at add(state, state_gamma) function derive_gamma(aproof, nb_pi, pi)->gamma_not_reduced { let state := mload(0x40) - let mPtr := add(state, state_last_mem) + let mPtr := add(state, STATE_LAST_MEM) // gamma // gamma in ascii is [0x67,0x61,0x6d, 0x6d, 0x61] // (same for alpha, beta, zeta) mstore(mPtr, 0x67616d6d61) // "gamma" - mstore(add(mPtr, 0x20), vk_s1_com_x) - mstore(add(mPtr, 0x40), vk_s1_com_y) - mstore(add(mPtr, 0x60), vk_s2_com_x) - mstore(add(mPtr, 0x80), vk_s2_com_y) - mstore(add(mPtr, 0xa0), vk_s3_com_x) - mstore(add(mPtr, 0xc0), vk_s3_com_y) - mstore(add(mPtr, 0xe0), vk_ql_com_x) - mstore(add(mPtr, 0x100), vk_ql_com_y) - mstore(add(mPtr, 0x120), vk_qr_com_x) - mstore(add(mPtr, 0x140), vk_qr_com_y) - mstore(add(mPtr, 0x160), vk_qm_com_x) - mstore(add(mPtr, 0x180), vk_qm_com_y) - mstore(add(mPtr, 0x1a0), vk_qo_com_x) - mstore(add(mPtr, 0x1c0), vk_qo_com_y) - mstore(add(mPtr, 0x1e0), vk_qk_com_x) - mstore(add(mPtr, 0x200), vk_qk_com_y) + mstore(add(mPtr, 0x20), VK_S1_COM_X) + mstore(add(mPtr, 0x40), VK_S1_COM_Y) + mstore(add(mPtr, 0x60), VK_S2_COM_X) + mstore(add(mPtr, 0x80), VK_S2_COM_Y) + mstore(add(mPtr, 0xa0), VK_S3_COM_X) + mstore(add(mPtr, 0xc0), VK_S3_COM_Y) + mstore(add(mPtr, 0xe0), VK_QL_COM_X) + mstore(add(mPtr, 0x100), VK_QL_COM_Y) + mstore(add(mPtr, 0x120), VK_QR_COM_X) + mstore(add(mPtr, 0x140), VK_QR_COM_Y) + mstore(add(mPtr, 0x160), VK_QM_COM_X) + mstore(add(mPtr, 0x180), VK_QM_COM_Y) + mstore(add(mPtr, 0x1a0), VK_QO_COM_X) + mstore(add(mPtr, 0x1c0), VK_QO_COM_Y) + mstore(add(mPtr, 0x1e0), VK_QK_COM_X) + mstore(add(mPtr, 0x200), VK_QK_COM_Y) {{ range $index, $element := .CommitmentConstraintIndexes}} - mstore(add(mPtr, {{ hex (add 544 (mul $index 64)) }}), vk_qc_{{ $index }}_x) - mstore(add(mPtr, {{ hex (add 576 (mul $index 64)) }}), vk_qc_{{ $index }}_y) + mstore(add(mPtr, {{ hex (add 544 (mul $index 64)) }}), VK_QCP_{{ $index }}_X) + mstore(add(mPtr, {{ hex (add 576 (mul $index 64)) }}), VK_QCP_{{ $index }}_Y) {{ end }} // public inputs let _mPtr := add(mPtr, {{ hex (add (mul (len .CommitmentConstraintIndexes) 64) 544) }}) @@ -382,20 +439,25 @@ contract PlonkVerifier { // + nb_custom gates*0x40 let size := add(0x2c5, size_pi_in_bytes) {{ if (gt (len .CommitmentConstraintIndexes) 0 )}} - size := add(size, mul(vk_nb_custom_gates, 0x40)) + size := add(size, mul(VK_NB_CUSTOM_GATES, 0x40)) {{ end -}} let l_success := staticcall(gas(), 0x2, add(mPtr, 0x1b), size, mPtr, 0x20) //0x1b -> 000.."gamma" if iszero(l_success) { error_verify() } gamma_not_reduced := mload(mPtr) - mstore(add(state, state_gamma), mod(gamma_not_reduced, r_mod)) + mstore(add(state, STATE_GAMMA), mod(gamma_not_reduced, R_MOD)) } + /// derive beta as Sha256 + /// @param gamma_not_reduced the previous challenge (gamma) not reduced + /// @return beta_not_reduced the next challenge, beta, not reduced + /// @notice the transcript consists of the previous challenge only. + /// The reduced version of beta is stored at add(state, state_beta) function derive_beta(gamma_not_reduced)->beta_not_reduced{ let state := mload(0x40) - let mPtr := add(mload(0x40), state_last_mem) + let mPtr := add(mload(0x40), STATE_LAST_MEM) // beta mstore(mPtr, 0x62657461) // "beta" @@ -405,14 +467,20 @@ contract PlonkVerifier { error_verify() } beta_not_reduced := mload(mPtr) - mstore(add(state, state_beta), mod(beta_not_reduced, r_mod)) + mstore(add(state, STATE_BETA), mod(beta_not_reduced, R_MOD)) } - // alpha depends on the previous challenge (beta) and on the commitment to the grand product polynomial + /// derive alpha as sha256 + /// @param aproof pointer to the proof object + /// @param beta_not_reduced the previous challenge (beta) not reduced + /// @return alpha_not_reduced the next challenge, alpha, not reduced + /// @notice the transcript consists of the previous challenge (beta) + /// not reduced, the commitments to the wires associated to the QCP_i, + /// and the commitment to the grand product polynomial function derive_alpha(aproof, beta_not_reduced)->alpha_not_reduced { let state := mload(0x40) - let mPtr := add(mload(0x40), state_last_mem) + let mPtr := add(mload(0x40), STATE_LAST_MEM) let full_size := 0x65 // size("alpha") + 0x20 (previous challenge) // alpha @@ -422,52 +490,59 @@ contract PlonkVerifier { _mPtr := add(_mPtr, 0x20) {{ if (gt (len .CommitmentConstraintIndexes) 0 )}} // Bsb22Commitments - let proof_bsb_commitments := add(aproof, proof_openings_qci_at_zeta) - proof_bsb_commitments := add(proof_bsb_commitments, mul(0x20, vk_nb_custom_gates)) - let size_bsb_commitments := mul(0x40, vk_nb_custom_gates) + let proof_bsb_commitments := add(aproof, PROOF_COMMITMENTS_WIRES_CUSTOM_GATES) + let size_bsb_commitments := mul(0x40, VK_NB_CUSTOM_GATES) calldatacopy(_mPtr, proof_bsb_commitments, size_bsb_commitments) _mPtr := add(_mPtr, size_bsb_commitments) full_size := add(full_size, size_bsb_commitments) {{ end }} // [Z], the commitment to the grand product polynomial - calldatacopy(_mPtr, add(aproof, proof_grand_product_commitment_x), 0x40) + calldatacopy(_mPtr, add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_X), 0x40) let l_success := staticcall(gas(), 0x2, add(mPtr, 0x1b), full_size, mPtr, 0x20) if iszero(l_success) { error_verify() } alpha_not_reduced := mload(mPtr) - mstore(add(state, state_alpha), mod(alpha_not_reduced, r_mod)) + mstore(add(state, STATE_ALPHA), mod(alpha_not_reduced, R_MOD)) } - // zeta depends on the previous challenge (alpha) and on the commitment to the quotient polynomial + /// derive zeta as sha256 + /// @param aproof pointer to the proof object + /// @param alpha_not_reduced the previous challenge (alpha) not reduced + /// The transcript consists of the previous challenge and the commitment to + /// the quotient polynomial h. function derive_zeta(aproof, alpha_not_reduced) { let state := mload(0x40) - let mPtr := add(mload(0x40), state_last_mem) + let mPtr := add(mload(0x40), STATE_LAST_MEM) // zeta mstore(mPtr, 0x7a657461) // "zeta" mstore(add(mPtr, 0x20), alpha_not_reduced) - calldatacopy(add(mPtr, 0x40), add(aproof, proof_h_0_x), 0xc0) + calldatacopy(add(mPtr, 0x40), add(aproof, PROOF_H_0_X), 0xc0) let l_success := staticcall(gas(), 0x2, add(mPtr, 0x1c), 0xe4, mPtr, 0x20) if iszero(l_success) { error_verify() } let zeta_not_reduced := mload(mPtr) - mstore(add(state, state_zeta), mod(zeta_not_reduced, r_mod)) + mstore(add(state, STATE_ZETA), mod(zeta_not_reduced, R_MOD)) } // END challenges ------------------------------------------------- // BEGINNING compute_pi ------------------------------------------------- - // public input (not coming from the commit api) contribution - // ins, n are the public inputs and number of public inputs respectively + /// sum_pi_wo_api_commit computes the public inputs contributions, + /// except for the public inputs coming from the custom gate + /// @param ins pointer to the public inputs + /// @param n number of public inputs + /// @param mPtr free memory + /// @return pi_wo_commit public inputs contribution (except the public inputs coming from the custom gate) function sum_pi_wo_api_commit(ins, n, mPtr)->pi_wo_commit { let state := mload(0x40) - let z := mload(add(state, state_zeta)) - let zpnmo := mload(add(state, state_zeta_power_n_minus_one)) + let z := mload(add(state, STATE_ZETA)) + let zpnmo := mload(add(state, STATE_ZETA_POWER_N_MINUS_ONE)) let li := mPtr batch_compute_lagranges_at_z(z, zpnmo, n, li) @@ -475,31 +550,29 @@ contract PlonkVerifier { let tmp := 0 for {let i:=0} lt(i,n) {i:=add(i,1)} { - tmp := mulmod(mload(li), calldataload(ins), r_mod) - pi_wo_commit := addmod(pi_wo_commit, tmp, r_mod) + tmp := mulmod(mload(li), calldataload(ins), R_MOD) + pi_wo_commit := addmod(pi_wo_commit, tmp, R_MOD) li := add(li, 0x20) ins := add(ins, 0x20) } } - // mPtr <- [L_0(z), .., L_{n-1}(z)] - // - // Here L_i(zeta) = ωⁱ/n * (ζⁿ-1)/(ζ-ωⁱ) where: - // * n = vk_domain_size - // * ω = vk_omega (generator of the multiplicative cyclic group of order n in (ℤ/rℤ)*) - // * ζ = z (challenge derived with Fiat Shamir) - // * zpnmo = 'zeta power n minus one' (ζⁿ-1) which has been precomputed + /// batch_compute_lagranges_at_z computes [L_0(z), .., L_{n-1}(z)] + /// @param z point at which the Lagranges are evaluated + /// @param zpnmo ζⁿ-1 + /// @param n number of public inputs (number of Lagranges to compute) + /// @param mPtr pointer to which the results are stored function batch_compute_lagranges_at_z(z, zpnmo, n, mPtr) { - let zn := mulmod(zpnmo, vk_inv_domain_size, r_mod) // 1/n * (ζⁿ - 1) + let zn := mulmod(zpnmo, VK_INV_DOMAIN_SIZE, R_MOD) // 1/n * (ζⁿ - 1) let _w := 1 let _mPtr := mPtr for {let i:=0} lt(i,n) {i:=add(i,1)} { - mstore(_mPtr, addmod(z,sub(r_mod, _w), r_mod)) - _w := mulmod(_w, vk_omega, r_mod) + mstore(_mPtr, addmod(z,sub(R_MOD, _w), R_MOD)) + _w := mulmod(_w, VK_OMEGA, R_MOD) _mPtr := add(_mPtr, 0x20) } batch_invert(mPtr, n, _mPtr) @@ -507,13 +580,16 @@ contract PlonkVerifier { _w := 1 for {let i:=0} lt(i,n) {i:=add(i,1)} { - mstore(_mPtr, mulmod(mulmod(mload(_mPtr), zn , r_mod), _w, r_mod)) + mstore(_mPtr, mulmod(mulmod(mload(_mPtr), zn , R_MOD), _w, R_MOD)) _mPtr := add(_mPtr, 0x20) - _w := mulmod(_w, vk_omega, r_mod) + _w := mulmod(_w, VK_OMEGA, R_MOD) } } - // batch invert (modulo r) in place the nb_ins uint256 inputs starting at ins. + /// @notice Montgomery trick for batch inversion mod R_MOD + /// @param ins pointer to the data to batch invert + /// @param number of elements to batch invert + /// @param mPtr free memory function batch_invert(ins, nb_ins, mPtr) { mstore(mPtr, 1) let offset := 0 @@ -521,72 +597,79 @@ contract PlonkVerifier { { let prev := mload(add(mPtr, offset)) let cur := mload(add(ins, offset)) - cur := mulmod(prev, cur, r_mod) + cur := mulmod(prev, cur, R_MOD) offset := add(offset, 0x20) mstore(add(mPtr, offset), cur) } ins := add(ins, sub(offset, 0x20)) mPtr := add(mPtr, offset) - let inv := pow(mload(mPtr), sub(r_mod,2), add(mPtr, 0x20)) + let inv := pow(mload(mPtr), sub(R_MOD,2), add(mPtr, 0x20)) for {let i:=0} lt(i, nb_ins) {i:=add(i,1)} { mPtr := sub(mPtr, 0x20) let tmp := mload(ins) - let cur := mulmod(inv, mload(mPtr), r_mod) + let cur := mulmod(inv, mload(mPtr), R_MOD) mstore(ins, cur) - inv := mulmod(inv, tmp, r_mod) + inv := mulmod(inv, tmp, R_MOD) ins := sub(ins, 0x20) } } {{ if (gt (len .CommitmentConstraintIndexes) 0 )}} - // mPtr free memory. Computes the public input contribution related to the commit + /// Public inputs (the ones coming from the custom gate) contribution + /// @param aproof pointer to the proof + /// @param nb_public_inputs number of public inputs + /// @param mPtr pointer to free memory + /// @return pi_commit custom gate public inputs contribution function sum_pi_commit(aproof, nb_public_inputs, mPtr)->pi_commit { let state := mload(0x40) - let z := mload(add(state, state_zeta)) - let zpnmo := mload(add(state, state_zeta_power_n_minus_one)) + let z := mload(add(state, STATE_ZETA)) + let zpnmo := mload(add(state, STATE_ZETA_POWER_N_MINUS_ONE)) - let p := add(aproof, proof_openings_qci_at_zeta) - p := add(p, mul(vk_nb_custom_gates, 0x20)) // p points now to the wire commitments + let p := add(aproof, PROOF_COMMITMENTS_WIRES_CUSTOM_GATES) let h_fr, ith_lagrange {{ range $index, $element := .CommitmentConstraintIndexes}} h_fr := hash_fr(calldataload(p), calldataload(add(p, 0x20)), mPtr) - ith_lagrange := compute_ith_lagrange_at_z(z, zpnmo, add(nb_public_inputs, vk_index_commit_api_{{ $index }}), mPtr) - pi_commit := addmod(pi_commit, mulmod(h_fr, ith_lagrange, r_mod), r_mod) + ith_lagrange := compute_ith_lagrange_at_z(z, zpnmo, add(nb_public_inputs, VK_INDEX_COMMIT_API{{ $index }}), mPtr) + pi_commit := addmod(pi_commit, mulmod(h_fr, ith_lagrange, R_MOD), R_MOD) p := add(p, 0x40) {{ end }} } - // z zeta - // zpmno ζⁿ-1 - // i i-th lagrange - // mPtr free memory - // Computes L_i(zeta) = ωⁱ/n * (ζⁿ-1)/(ζ-ωⁱ) where: + /// Computes L_i(zeta) = ωⁱ/n * (ζⁿ-1)/(ζ-ωⁱ) where: + /// @param z zeta + /// @param zpmno ζⁿ-1 + /// @param i i-th lagrange + /// @param mPtr free memory + /// @return res = ωⁱ/n * (ζⁿ-1)/(ζ-ωⁱ) function compute_ith_lagrange_at_z(z, zpnmo, i, mPtr)->res { - let w := pow(vk_omega, i, mPtr) // w**i - i := addmod(z, sub(r_mod, w), r_mod) // z-w**i - w := mulmod(w, vk_inv_domain_size, r_mod) // w**i/n - i := pow(i, sub(r_mod,2), mPtr) // (z-w**i)**-1 - w := mulmod(w, i, r_mod) // w**i/n*(z-w)**-1 - res := mulmod(w, zpnmo, r_mod) + let w := pow(VK_OMEGA, i, mPtr) // w**i + i := addmod(z, sub(R_MOD, w), R_MOD) // z-w**i + w := mulmod(w, VK_INV_DOMAIN_SIZE, R_MOD) // w**i/n + i := pow(i, sub(R_MOD,2), mPtr) // (z-w**i)**-1 + w := mulmod(w, i, R_MOD) // w**i/n*(z-w)**-1 + res := mulmod(w, zpnmo, R_MOD) } - // (x, y) point on bn254, both on 32bytes - // mPtr free memory + /// @dev https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-06#section-5.2 + /// @param x x coordinate of a point on Bn254(𝔽_p) + /// @param y y coordinate of a point on Bn254(𝔽_p) + /// @param mPtr free memory + /// @return res an element mod R_MOD function hash_fr(x, y, mPtr)->res { - // [0x00, .. , 0x00 || x, y, || 0, 48, 0, dst, sizeDomain] + // [0x00, .. , 0x00 || x, y, || 0, 48, 0, dst, HASH_FR_SIZE_DOMAIN] // <- 64 bytes -> <-64b -> <- 1 bytes each -> // [0x00, .., 0x00] 64 bytes of zero - mstore(mPtr, zero_uint256) - mstore(add(mPtr, 0x20), zero_uint256) + mstore(mPtr, HASH_FR_ZERO_UINT256) + mstore(add(mPtr, 0x20), HASH_FR_ZERO_UINT256) // msg = x || y , both on 32 bytes mstore(add(mPtr, 0x40), x) @@ -594,7 +677,7 @@ contract PlonkVerifier { // 0 || 48 || 0 all on 1 byte mstore8(add(mPtr, 0x80), 0) - mstore8(add(mPtr, 0x81), lenInBytes) + mstore8(add(mPtr, 0x81), HASH_FR_LEN_IN_BYTES) mstore8(add(mPtr, 0x82), 0) // "BSB22-Plonk" = [42, 53, 42, 32, 32, 2d, 50, 6c, 6f, 6e, 6b,] @@ -611,7 +694,7 @@ contract PlonkVerifier { mstore8(add(mPtr, 0x8d), 0x6b) // size domain - mstore8(add(mPtr, 0x8e), sizeDomain) + mstore8(add(mPtr, 0x8e), HASH_FR_SIZE_DOMAIN) let l_success := staticcall(gas(), 0x2, mPtr, 0x8f, mPtr, 0x20) if iszero(l_success) { @@ -620,9 +703,9 @@ contract PlonkVerifier { let b0 := mload(mPtr) - // [b0 || one || dst || sizeDomain] + // [b0 || one || dst || HASH_FR_SIZE_DOMAIN] // <-64bytes -> <- 1 byte each -> - mstore8(add(mPtr, 0x20), one) // 1 + mstore8(add(mPtr, 0x20), HASH_FR_ONE) // 1 mstore8(add(mPtr, 0x21), 0x42) // dst mstore8(add(mPtr, 0x22), 0x53) @@ -636,7 +719,7 @@ contract PlonkVerifier { mstore8(add(mPtr, 0x2a), 0x6e) mstore8(add(mPtr, 0x2b), 0x6b) - mstore8(add(mPtr, 0x2c), sizeDomain) // size domain + mstore8(add(mPtr, 0x2c), HASH_FR_SIZE_DOMAIN) // size domain l_success := staticcall(gas(), 0x2, mPtr, 0x2d, mPtr, 0x20) if iszero(l_success) { error_verify() @@ -644,10 +727,10 @@ contract PlonkVerifier { // b1 is located at mPtr. We store b2 at add(mPtr, 0x20) - // [b0^b1 || two || dst || sizeDomain] + // [b0^b1 || two || dst || HASH_FR_SIZE_DOMAIN] // <-64bytes -> <- 1 byte each -> mstore(add(mPtr, 0x20), xor(mload(mPtr), b0)) - mstore8(add(mPtr, 0x40), two) + mstore8(add(mPtr, 0x40), HASH_FR_TWO) mstore8(add(mPtr, 0x41), 0x42) // dst mstore8(add(mPtr, 0x42), 0x53) @@ -661,7 +744,7 @@ contract PlonkVerifier { mstore8(add(mPtr, 0x4a), 0x6e) mstore8(add(mPtr, 0x4b), 0x6b) - mstore8(add(mPtr, 0x4c), sizeDomain) // size domain + mstore8(add(mPtr, 0x4c), HASH_FR_SIZE_DOMAIN) // size domain let offset := add(mPtr, 0x20) l_success := staticcall(gas(), 0x2, offset, 0x2d, offset, 0x20) @@ -672,237 +755,251 @@ contract PlonkVerifier { // at this point we have mPtr = [ b1 || b2] where b1 is on 32byes and b2 in 16bytes. // we interpret it as a big integer mod r in big endian (similar to regular decimal notation) // the result is then 2**(8*16)*mPtr[32:] + mPtr[32:48] - res := mulmod(mload(mPtr), bb, r_mod) // <- res = 2**128 * mPtr[:32] - offset := add(mPtr, 0x10) - for {let i:=0} lt(i, 0x10) {i:=add(i,1)} // mPtr <- [xx, xx, .., | 0, 0, .. 0 || b2 ] - { - mstore8(offset, 0x00) - offset := add(offset, 0x1) - } - let b1 := mload(add(mPtr, 0x10)) // b1 <- [0, 0, .., 0 || b2[:16] ] - res := addmod(res, b1, r_mod) + res := mulmod(mload(mPtr), HASH_FR_BB, R_MOD) // <- res = 2**128 * mPtr[:32] + let b1 := shr(128, mload(add(mPtr, 0x20))) // b1 <- [0, 0, .., 0 || b2[:16] ] + res := addmod(res, b1, R_MOD) } {{ end }} // END compute_pi ------------------------------------------------- - // compute α² * 1/n * (ζ{n}-1)/(ζ - 1) where - // * α = challenge derived in derive_gamma_beta_alpha_zeta - // * n = vk_domain_size - // * ω = vk_omega (generator of the multiplicative cyclic group of order n in (ℤ/rℤ)*) - // * ζ = zeta (challenge derived with Fiat Shamir) + /// @notice compute α² * 1/n * (ζ{n}-1)/(ζ - 1) where + /// * α = challenge derived in derive_gamma_beta_alpha_zeta + /// * n = vk_domain_size + /// * ω = vk_omega (generator of the multiplicative cyclic group of order n in (ℤ/rℤ)*) + /// * ζ = zeta (challenge derived with Fiat Shamir) function compute_alpha_square_lagrange_0() { let state := mload(0x40) - let mPtr := add(mload(0x40), state_last_mem) - - let res := mload(add(state, state_zeta_power_n_minus_one)) - let den := addmod(mload(add(state, state_zeta)), sub(r_mod, 1), r_mod) - den := pow(den, sub(r_mod, 2), mPtr) - den := mulmod(den, vk_inv_domain_size, r_mod) - res := mulmod(den, res, r_mod) - - let l_alpha := mload(add(state, state_alpha)) - res := mulmod(res, l_alpha, r_mod) - res := mulmod(res, l_alpha, r_mod) - mstore(add(state, state_alpha_square_lagrange_0), res) + let mPtr := add(mload(0x40), STATE_LAST_MEM) + + let res := mload(add(state, STATE_ZETA_POWER_N_MINUS_ONE)) + let den := addmod(mload(add(state, STATE_ZETA)), sub(R_MOD, 1), R_MOD) + den := pow(den, sub(R_MOD, 2), mPtr) + den := mulmod(den, VK_INV_DOMAIN_SIZE, R_MOD) + res := mulmod(den, res, R_MOD) + + let l_alpha := mload(add(state, STATE_ALPHA)) + res := mulmod(res, l_alpha, R_MOD) + res := mulmod(res, l_alpha, R_MOD) + mstore(add(state, STATE_ALPHA_SQUARE_LAGRANGE_0), res) } - // follows alg. p.13 of https://eprint.iacr.org/2019/953.pdf - // with t₁ = t₂ = 1, and the proofs are ([digest] + [quotient] +purported evaluation): - // * [state_folded_state_digests], [proof_batch_opening_at_zeta_x], state_folded_evals - // * [proof_grand_product_commitment], [proof_opening_at_zeta_omega_x], [proof_grand_product_at_zeta_omega] + /// @notice follows alg. p.13 of https://eprint.iacr.org/2019/953.pdf + /// with t₁ = t₂ = 1, and the proofs are ([digest] + [quotient] +purported evaluation): + /// * [state_folded_state_digests], [proof_batch_opening_at_zeta_x], state_folded_evals + /// * [proof_grand_product_commitment], [proof_opening_at_zeta_omega_x], [proof_grand_product_at_zeta_omega] + /// @param aproof pointer to the proof function batch_verify_multi_points(aproof) { let state := mload(0x40) - let mPtr := add(state, state_last_mem) - - // here the random is not a challenge, hence no need to use Fiat Shamir, we just - // need an unpredictable result. - let random := mod(keccak256(state, 0x20), r_mod) + let mPtr := add(state, STATE_LAST_MEM) + + // derive a random number. As there is no random generator, we + // do an FS like challenge derivation, depending on both digests and + // ζ to ensure that the prover cannot control the random numger. + // Note: adding the other point ζω is not needed, as ω is known beforehand. + mstore(mPtr, mload(add(state, STATE_FOLDED_DIGESTS_X))) + mstore(add(mPtr, 0x20), mload(add(state, STATE_FOLDED_DIGESTS_Y))) + mstore(add(mPtr, 0x40), calldataload(add(aproof, PROOF_BATCH_OPENING_AT_ZETA_X))) + mstore(add(mPtr, 0x60), calldataload(add(aproof, PROOF_BATCH_OPENING_AT_ZETA_Y))) + mstore(add(mPtr, 0x80), calldataload(add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_X))) + mstore(add(mPtr, 0xa0), calldataload(add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_Y))) + mstore(add(mPtr, 0xc0), calldataload(add(aproof, PROOF_OPENING_AT_ZETA_OMEGA_X))) + mstore(add(mPtr, 0xe0), calldataload(add(aproof, PROOF_OPENING_AT_ZETA_OMEGA_Y))) + mstore(add(mPtr, 0x100), mload(add(state, STATE_ZETA))) + mstore(add(mPtr, 0x120), mload(add(state, STATE_GAMMA_KZG))) + let random := staticcall(gas(), 0x2, mPtr, 0x140, mPtr, 0x20) + if iszero(random){ + error_random_generation() + } + random := mod(mload(mPtr), R_MOD) // use the same variable as we are one variable away from getting stack-too-deep error... let folded_quotients := mPtr mPtr := add(folded_quotients, 0x40) - mstore(folded_quotients, calldataload(add(aproof, proof_batch_opening_at_zeta_x))) - mstore(add(folded_quotients, 0x20), calldataload(add(aproof, proof_batch_opening_at_zeta_y))) - point_acc_mul_calldata(folded_quotients, add(aproof, proof_opening_at_zeta_omega_x), random, mPtr) + mstore(folded_quotients, calldataload(add(aproof, PROOF_BATCH_OPENING_AT_ZETA_X))) + mstore(add(folded_quotients, 0x20), calldataload(add(aproof, PROOF_BATCH_OPENING_AT_ZETA_Y))) + point_acc_mul_calldata(folded_quotients, add(aproof, PROOF_OPENING_AT_ZETA_OMEGA_X), random, mPtr) - let folded_digests := add(state, state_folded_digests_x) - point_acc_mul_calldata(folded_digests, add(aproof, proof_grand_product_commitment_x), random, mPtr) + let folded_digests := add(state, STATE_FOLDED_DIGESTS_X) + point_acc_mul_calldata(folded_digests, add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_X), random, mPtr) - let folded_evals := add(state, state_folded_claimed_values) - fr_acc_mul_calldata(folded_evals, add(aproof, proof_grand_product_at_zeta_omega), random) + let folded_evals := add(state, STATE_FOLDED_CLAIMED_VALUES) + fr_acc_mul_calldata(folded_evals, add(aproof, PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA), random) let folded_evals_commit := mPtr mPtr := add(folded_evals_commit, 0x40) - mstore(folded_evals_commit, {{ fpstr .Kzg.G1.X }}) - mstore(add(folded_evals_commit, 0x20), {{ fpstr .Kzg.G1.Y }}) + mstore(folded_evals_commit, G1_SRS_X) + mstore(add(folded_evals_commit, 0x20), G1_SRS_Y) mstore(add(folded_evals_commit, 0x40), mload(folded_evals)) let check_staticcall := staticcall(gas(), 7, folded_evals_commit, 0x60, folded_evals_commit, 0x40) - if eq(check_staticcall, 0) { + if iszero(check_staticcall) { error_verify() } let folded_evals_commit_y := add(folded_evals_commit, 0x20) - mstore(folded_evals_commit_y, sub(p_mod, mload(folded_evals_commit_y))) + mstore(folded_evals_commit_y, sub(P_MOD, mload(folded_evals_commit_y))) point_add(folded_digests, folded_digests, folded_evals_commit, mPtr) let folded_points_quotients := mPtr mPtr := add(mPtr, 0x40) point_mul_calldata( folded_points_quotients, - add(aproof, proof_batch_opening_at_zeta_x), - mload(add(state, state_zeta)), + add(aproof, PROOF_BATCH_OPENING_AT_ZETA_X), + mload(add(state, STATE_ZETA)), mPtr ) - let zeta_omega := mulmod(mload(add(state, state_zeta)), vk_omega, r_mod) - random := mulmod(random, zeta_omega, r_mod) - point_acc_mul_calldata(folded_points_quotients, add(aproof, proof_opening_at_zeta_omega_x), random, mPtr) + let zeta_omega := mulmod(mload(add(state, STATE_ZETA)), VK_OMEGA, R_MOD) + random := mulmod(random, zeta_omega, R_MOD) + point_acc_mul_calldata(folded_points_quotients, add(aproof, PROOF_OPENING_AT_ZETA_OMEGA_X), random, mPtr) point_add(folded_digests, folded_digests, folded_points_quotients, mPtr) let folded_quotients_y := add(folded_quotients, 0x20) - mstore(folded_quotients_y, sub(p_mod, mload(folded_quotients_y))) + mstore(folded_quotients_y, sub(P_MOD, mload(folded_quotients_y))) mstore(mPtr, mload(folded_digests)) mstore(add(mPtr, 0x20), mload(add(folded_digests, 0x20))) - mstore(add(mPtr, 0x40), g2_srs_0_x_0) // the 4 lines are the canonical G2 point on BN254 - mstore(add(mPtr, 0x60), g2_srs_0_x_1) - mstore(add(mPtr, 0x80), g2_srs_0_y_0) - mstore(add(mPtr, 0xa0), g2_srs_0_y_1) + mstore(add(mPtr, 0x40), G2_SRS_0_X_0) // the 4 lines are the canonical G2 point on BN254 + mstore(add(mPtr, 0x60), G2_SRS_0_X_1) + mstore(add(mPtr, 0x80), G2_SRS_0_Y_0) + mstore(add(mPtr, 0xa0), G2_SRS_0_Y_1) mstore(add(mPtr, 0xc0), mload(folded_quotients)) mstore(add(mPtr, 0xe0), mload(add(folded_quotients, 0x20))) - mstore(add(mPtr, 0x100), g2_srs_1_x_0) - mstore(add(mPtr, 0x120), g2_srs_1_x_1) - mstore(add(mPtr, 0x140), g2_srs_1_y_0) - mstore(add(mPtr, 0x160), g2_srs_1_y_1) + mstore(add(mPtr, 0x100), G2_SRS_1_X_0) + mstore(add(mPtr, 0x120), G2_SRS_1_X_1) + mstore(add(mPtr, 0x140), G2_SRS_1_Y_0) + mstore(add(mPtr, 0x160), G2_SRS_1_Y_1) check_pairing_kzg(mPtr) } - // check_pairing_kzg checks the result of the final pairing product of the batched - // kzg verification. The purpose of this function is too avoid exhausting the stack - // in the function batch_verify_multi_points. - // mPtr: pointer storing the tuple of pairs + /// @notice check_pairing_kzg checks the result of the final pairing product of the batched + /// kzg verification. The purpose of this function is to avoid exhausting the stack + /// in the function batch_verify_multi_points. + /// @param mPtr pointer storing the tuple of pairs function check_pairing_kzg(mPtr) { let state := mload(0x40) // TODO test the staticcall using the method from audit_4-5 let l_success := staticcall(gas(), 8, mPtr, 0x180, 0x00, 0x20) let res_pairing := mload(0x00) - let s_success := mload(add(state, state_success)) + let s_success := mload(add(state, STATE_SUCCESS)) res_pairing := and(and(res_pairing, l_success), s_success) - mstore(add(state, state_success), res_pairing) + mstore(add(state, STATE_SUCCESS), res_pairing) } - // Fold the opening proofs at ζ: - // * at state+state_folded_digest we store: [H] + γ[Linearised_polynomial]+γ²[L] + γ³[R] + γ⁴[O] + γ⁵[S₁] +γ⁶[S₂] + ∑ᵢγ⁶⁺ⁱ[Pi_{i}] - // * at state+state_folded_claimed_values we store: H(ζ) + γLinearised_polynomial(ζ)+γ²L(ζ) + γ³R(ζ)+ γ⁴O(ζ) + γ⁵S₁(ζ) +γ⁶S₂(ζ) + ∑ᵢγ⁶⁺ⁱPi_{i}(ζ) - // acc_gamma stores the γⁱ + /// @notice Fold the opening proofs at ζ: + /// * at state+state_folded_digest we store: [H] + γ[Linearised_polynomial]+γ²[L] + γ³[R] + γ⁴[O] + γ⁵[S₁] +γ⁶[S₂] + ∑ᵢγ⁶⁺ⁱ[Pi_{i}] + /// * at state+state_folded_claimed_values we store: H(ζ) + γLinearised_polynomial(ζ)+γ²L(ζ) + γ³R(ζ)+ γ⁴O(ζ) + γ⁵S₁(ζ) +γ⁶S₂(ζ) + ∑ᵢγ⁶⁺ⁱPi_{i}(ζ) + /// @param aproof pointer to the proof + /// acc_gamma stores the γⁱ function fold_state(aproof) { let state := mload(0x40) - let mPtr := add(mload(0x40), state_last_mem) + let mPtr := add(mload(0x40), STATE_LAST_MEM) let mPtr20 := add(mPtr, 0x20) let mPtr40 := add(mPtr, 0x40) - let l_gamma_kzg := mload(add(state, state_gamma_kzg)) + let l_gamma_kzg := mload(add(state, STATE_GAMMA_KZG)) let acc_gamma := l_gamma_kzg - let state_folded_digests := add(state, state_folded_digests_x) + let state_folded_digests := add(state, STATE_FOLDED_DIGESTS_X) - mstore(add(state, state_folded_digests_x), mload(add(state, state_folded_h_x))) - mstore(add(state, state_folded_digests_y), mload(add(state, state_folded_h_y))) - mstore(add(state, state_folded_claimed_values), calldataload(add(aproof, proof_quotient_polynomial_at_zeta))) + mstore(add(state, STATE_FOLDED_DIGESTS_X), mload(add(state, STATE_FOLDED_H_X))) + mstore(add(state, STATE_FOLDED_DIGESTS_Y), mload(add(state, STATE_FOLDED_H_Y))) + mstore(add(state, STATE_FOLDED_CLAIMED_VALUES), calldataload(add(aproof, PROOF_QUOTIENT_POLYNOMIAL_AT_ZETA))) - point_acc_mul(state_folded_digests, add(state, state_linearised_polynomial_x), acc_gamma, mPtr) - fr_acc_mul_calldata(add(state, state_folded_claimed_values), add(aproof, proof_linearised_polynomial_at_zeta), acc_gamma) + point_acc_mul(state_folded_digests, add(state, STATE_LINEARISED_POLYNOMIAL_X), acc_gamma, mPtr) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_LINEARISED_POLYNOMIAL_AT_ZETA), acc_gamma) - acc_gamma := mulmod(acc_gamma, l_gamma_kzg, r_mod) - point_acc_mul_calldata(add(state, state_folded_digests_x), add(aproof, proof_l_com_x), acc_gamma, mPtr) - fr_acc_mul_calldata(add(state, state_folded_claimed_values), add(aproof, proof_l_at_zeta), acc_gamma) + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + point_acc_mul_calldata(add(state, STATE_FOLDED_DIGESTS_X), add(aproof, PROOF_L_COM_X), acc_gamma, mPtr) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_L_AT_ZETA), acc_gamma) - acc_gamma := mulmod(acc_gamma, l_gamma_kzg, r_mod) - point_acc_mul_calldata(state_folded_digests, add(aproof, proof_r_com_x), acc_gamma, mPtr) - fr_acc_mul_calldata(add(state, state_folded_claimed_values), add(aproof, proof_r_at_zeta), acc_gamma) + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + point_acc_mul_calldata(state_folded_digests, add(aproof, PROOF_R_COM_X), acc_gamma, mPtr) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_R_AT_ZETA), acc_gamma) - acc_gamma := mulmod(acc_gamma, l_gamma_kzg, r_mod) - point_acc_mul_calldata(state_folded_digests, add(aproof, proof_o_com_x), acc_gamma, mPtr) - fr_acc_mul_calldata(add(state, state_folded_claimed_values), add(aproof, proof_o_at_zeta), acc_gamma) + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + point_acc_mul_calldata(state_folded_digests, add(aproof, PROOF_O_COM_X), acc_gamma, mPtr) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_O_AT_ZETA), acc_gamma) - acc_gamma := mulmod(acc_gamma, l_gamma_kzg, r_mod) - mstore(mPtr, vk_s1_com_x) - mstore(mPtr20, vk_s1_com_y) + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + mstore(mPtr, VK_S1_COM_X) + mstore(mPtr20, VK_S1_COM_Y) point_acc_mul(state_folded_digests, mPtr, acc_gamma, mPtr40) - fr_acc_mul_calldata(add(state, state_folded_claimed_values), add(aproof, proof_s1_at_zeta), acc_gamma) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_S1_AT_ZETA), acc_gamma) - acc_gamma := mulmod(acc_gamma, l_gamma_kzg, r_mod) - mstore(mPtr, vk_s2_com_x) - mstore(mPtr20, vk_s2_com_y) + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + mstore(mPtr, VK_S2_COM_X) + mstore(mPtr20, VK_S2_COM_Y) point_acc_mul(state_folded_digests, mPtr, acc_gamma, mPtr40) - fr_acc_mul_calldata(add(state, state_folded_claimed_values), add(aproof, proof_s2_at_zeta), acc_gamma) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_S2_AT_ZETA), acc_gamma) {{- if (gt (len .CommitmentConstraintIndexes) 0 ) }} - let poscaz := add(aproof, proof_openings_qci_at_zeta) + let poscaz := add(aproof, PROOF_OPENING_QCP_AT_ZETA) {{ end -}} {{ range $index, $element := .CommitmentConstraintIndexes }} - acc_gamma := mulmod(acc_gamma, l_gamma_kzg, r_mod) - mstore(mPtr, vk_qc_{{ $index }}_x) - mstore(mPtr20, vk_qc_{{ $index }}_y) + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + mstore(mPtr, VK_QCP_{{ $index }}_X) + mstore(mPtr20, VK_QCP_{{ $index }}_Y) point_acc_mul(state_folded_digests, mPtr, acc_gamma, mPtr40) - fr_acc_mul_calldata(add(state, state_folded_claimed_values), poscaz, acc_gamma) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), poscaz, acc_gamma) poscaz := add(poscaz, 0x20) {{ end }} } - // generate the challenge (using Fiat Shamir) to fold the opening proofs - // at ζ. - // The process for deriving γ is the same as in derive_gamma but this time the inputs are - // in this order (the [] means it's a commitment): - // * ζ - // * [H] ( = H₁ + ζᵐ⁺²*H₂ + ζ²⁽ᵐ⁺²⁾*H₃ ) - // * [Linearised polynomial] - // * [L], [R], [O] - // * [S₁] [S₂] - // * [Pi_{i}] (wires associated to custom gates) - // Then there are the purported evaluations of the previous committed polynomials: - // * H(ζ) - // * Linearised_polynomial(ζ) - // * L(ζ), R(ζ), O(ζ), S₁(ζ), S₂(ζ) - // * Pi_{i}(ζ) + /// @notice generate the challenge (using Fiat Shamir) to fold the opening proofs + /// at ζ. + /// The process for deriving γ is the same as in derive_gamma but this time the inputs are + /// in this order (the [] means it's a commitment): + /// * ζ + /// * [H] ( = H₁ + ζᵐ⁺²*H₂ + ζ²⁽ᵐ⁺²⁾*H₃ ) + /// * [Linearised polynomial] + /// * [L], [R], [O] + /// * [S₁] [S₂] + /// * [Pi_{i}] (wires associated to custom gates) + /// Then there are the purported evaluations of the previous committed polynomials: + /// * H(ζ) + /// * Linearised_polynomial(ζ) + /// * L(ζ), R(ζ), O(ζ), S₁(ζ), S₂(ζ) + /// * Pi_{i}(ζ) + /// * Z(ζω) + /// @param aproof pointer to the proof function compute_gamma_kzg(aproof) { let state := mload(0x40) - let mPtr := add(mload(0x40), state_last_mem) + let mPtr := add(mload(0x40), STATE_LAST_MEM) mstore(mPtr, 0x67616d6d61) // "gamma" - mstore(add(mPtr, 0x20), mload(add(state, state_zeta))) - mstore(add(mPtr,0x40), mload(add(state, state_folded_h_x))) - mstore(add(mPtr,0x60), mload(add(state, state_folded_h_y))) - mstore(add(mPtr,0x80), mload(add(state, state_linearised_polynomial_x))) - mstore(add(mPtr,0xa0), mload(add(state, state_linearised_polynomial_y))) - calldatacopy(add(mPtr, 0xc0), add(aproof, proof_l_com_x), 0xc0) - mstore(add(mPtr,0x180), vk_s1_com_x) - mstore(add(mPtr,0x1a0), vk_s1_com_y) - mstore(add(mPtr,0x1c0), vk_s2_com_x) - mstore(add(mPtr,0x1e0), vk_s2_com_y) + mstore(add(mPtr, 0x20), mload(add(state, STATE_ZETA))) + mstore(add(mPtr,0x40), mload(add(state, STATE_FOLDED_H_X))) + mstore(add(mPtr,0x60), mload(add(state, STATE_FOLDED_H_Y))) + mstore(add(mPtr,0x80), mload(add(state, STATE_LINEARISED_POLYNOMIAL_X))) + mstore(add(mPtr,0xa0), mload(add(state, STATE_LINEARISED_POLYNOMIAL_Y))) + calldatacopy(add(mPtr, 0xc0), add(aproof, PROOF_L_COM_X), 0xc0) + mstore(add(mPtr,0x180), VK_S1_COM_X) + mstore(add(mPtr,0x1a0), VK_S1_COM_Y) + mstore(add(mPtr,0x1c0), VK_S2_COM_X) + mstore(add(mPtr,0x1e0), VK_S2_COM_Y) let offset := 0x200 {{ range $index, $element := .CommitmentConstraintIndexes }} - mstore(add(mPtr,offset), vk_qc_{{ $index }}_x) - mstore(add(mPtr,add(offset, 0x20)), vk_qc_{{ $index }}_y) + mstore(add(mPtr,offset), VK_QCP_{{ $index }}_X) + mstore(add(mPtr,add(offset, 0x20)), VK_QCP_{{ $index }}_Y) offset := add(offset, 0x40) {{ end }} - mstore(add(mPtr, offset), calldataload(add(aproof, proof_quotient_polynomial_at_zeta))) - mstore(add(mPtr, add(offset, 0x20)), calldataload(add(aproof, proof_linearised_polynomial_at_zeta))) - mstore(add(mPtr, add(offset, 0x40)), calldataload(add(aproof, proof_l_at_zeta))) - mstore(add(mPtr, add(offset, 0x60)), calldataload(add(aproof, proof_r_at_zeta))) - mstore(add(mPtr, add(offset, 0x80)), calldataload(add(aproof, proof_o_at_zeta))) - mstore(add(mPtr, add(offset, 0xa0)), calldataload(add(aproof, proof_s1_at_zeta))) - mstore(add(mPtr, add(offset, 0xc0)), calldataload(add(aproof, proof_s2_at_zeta))) + mstore(add(mPtr, offset), calldataload(add(aproof, PROOF_QUOTIENT_POLYNOMIAL_AT_ZETA))) + mstore(add(mPtr, add(offset, 0x20)), calldataload(add(aproof, PROOF_LINEARISED_POLYNOMIAL_AT_ZETA))) + mstore(add(mPtr, add(offset, 0x40)), calldataload(add(aproof, PROOF_L_AT_ZETA))) + mstore(add(mPtr, add(offset, 0x60)), calldataload(add(aproof, PROOF_R_AT_ZETA))) + mstore(add(mPtr, add(offset, 0x80)), calldataload(add(aproof, PROOF_O_AT_ZETA))) + mstore(add(mPtr, add(offset, 0xa0)), calldataload(add(aproof, PROOF_S1_AT_ZETA))) + mstore(add(mPtr, add(offset, 0xc0)), calldataload(add(aproof, PROOF_S2_AT_ZETA))) - {{ if (gt (len .CommitmentConstraintIndexes) 0 )}} let _mPtr := add(mPtr, add(offset, 0xe0)) - let _poscaz := add(aproof, proof_openings_qci_at_zeta) - for {let i:=0} lt(i, vk_nb_custom_gates) {i:=add(i,1)} + {{ if (gt (len .CommitmentConstraintIndexes) 0 )}} + let _poscaz := add(aproof, PROOF_OPENING_QCP_AT_ZETA) + for {let i:=0} lt(i, VK_NB_CUSTOM_GATES) {i:=add(i,1)} { mstore(_mPtr, calldataload(_poscaz)) _poscaz := add(_poscaz, 0x20) @@ -910,75 +1007,74 @@ contract PlonkVerifier { } {{ end }} + mstore(_mPtr, calldataload(add(aproof, PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA))) + let start_input := 0x1b // 00.."gamma" - let size_input := add(0x16, mul(vk_nb_custom_gates,3)) // number of 32bytes elmts = 0x16 (zeta+2*7+7 for the digests+openings) + 2*vk_nb_custom_gates (for the commitments of the selectors) + vk_nb_custom_gates (for the openings of the selectors) + let size_input := add(0x17, mul(VK_NB_CUSTOM_GATES,3)) // number of 32bytes elmts = 0x17 (zeta+2*7+7 for the digests+openings) + 2*VK_NB_CUSTOM_GATES (for the commitments of the selectors) + VK_NB_CUSTOM_GATES (for the openings of the selectors) size_input := add(0x5, mul(size_input, 0x20)) // size in bytes: 15*32 bytes + 5 bytes for gamma - let check_staticcall := staticcall(gas(), 0x2, add(mPtr,start_input), size_input, add(state, state_gamma_kzg), 0x20) - if eq(check_staticcall, 0) { + let check_staticcall := staticcall(gas(), 0x2, add(mPtr,start_input), size_input, add(state, STATE_GAMMA_KZG), 0x20) + if iszero(check_staticcall) { error_verify() } - mstore(add(state, state_gamma_kzg), mod(mload(add(state, state_gamma_kzg)), r_mod)) + mstore(add(state, STATE_GAMMA_KZG), mod(mload(add(state, STATE_GAMMA_KZG)), R_MOD)) } function compute_commitment_linearised_polynomial_ec(aproof, s1, s2) { let state := mload(0x40) - let mPtr := add(mload(0x40), state_last_mem) + let mPtr := add(mload(0x40), STATE_LAST_MEM) - mstore(mPtr, vk_ql_com_x) - mstore(add(mPtr, 0x20), vk_ql_com_y) + mstore(mPtr, VK_QL_COM_X) + mstore(add(mPtr, 0x20), VK_QL_COM_Y) point_mul( - add(state, state_linearised_polynomial_x), + add(state, STATE_LINEARISED_POLYNOMIAL_X), mPtr, - calldataload(add(aproof, proof_l_at_zeta)), + calldataload(add(aproof, PROOF_L_AT_ZETA)), add(mPtr, 0x40) ) - mstore(mPtr, vk_qr_com_x) - mstore(add(mPtr, 0x20), vk_qr_com_y) + mstore(mPtr, VK_QR_COM_X) + mstore(add(mPtr, 0x20), VK_QR_COM_Y) point_acc_mul( - add(state, state_linearised_polynomial_x), + add(state, STATE_LINEARISED_POLYNOMIAL_X), mPtr, - calldataload(add(aproof, proof_r_at_zeta)), + calldataload(add(aproof, PROOF_R_AT_ZETA)), add(mPtr, 0x40) ) - let rl := mulmod(calldataload(add(aproof, proof_l_at_zeta)), calldataload(add(aproof, proof_r_at_zeta)), r_mod) - mstore(mPtr, vk_qm_com_x) - mstore(add(mPtr, 0x20), vk_qm_com_y) - point_acc_mul(add(state, state_linearised_polynomial_x), mPtr, rl, add(mPtr, 0x40)) + let rl := mulmod(calldataload(add(aproof, PROOF_L_AT_ZETA)), calldataload(add(aproof, PROOF_R_AT_ZETA)), R_MOD) + mstore(mPtr, VK_QM_COM_X) + mstore(add(mPtr, 0x20), VK_QM_COM_Y) + point_acc_mul(add(state, STATE_LINEARISED_POLYNOMIAL_X), mPtr, rl, add(mPtr, 0x40)) - mstore(mPtr, vk_qo_com_x) - mstore(add(mPtr, 0x20), vk_qo_com_y) + mstore(mPtr, VK_QO_COM_X) + mstore(add(mPtr, 0x20), VK_QO_COM_Y) point_acc_mul( - add(state, state_linearised_polynomial_x), + add(state, STATE_LINEARISED_POLYNOMIAL_X), mPtr, - calldataload(add(aproof, proof_o_at_zeta)), + calldataload(add(aproof, PROOF_O_AT_ZETA)), add(mPtr, 0x40) ) - mstore(mPtr, vk_qk_com_x) - mstore(add(mPtr, 0x20), vk_qk_com_y) + mstore(mPtr, VK_QK_COM_X) + mstore(add(mPtr, 0x20), VK_QK_COM_Y) point_add( - add(state, state_linearised_polynomial_x), - add(state, state_linearised_polynomial_x), + add(state, STATE_LINEARISED_POLYNOMIAL_X), + add(state, STATE_LINEARISED_POLYNOMIAL_X), mPtr, add(mPtr, 0x40) ) - let commits_api_at_zeta := add(aproof, proof_openings_qci_at_zeta) - let commits_api := add( - aproof, - add(proof_openings_qci_at_zeta, mul(vk_nb_custom_gates, 0x20)) - ) + let commits_api_at_zeta := add(aproof, PROOF_OPENING_QCP_AT_ZETA) + let commits_api := add(aproof, PROOF_COMMITMENTS_WIRES_CUSTOM_GATES) for { let i := 0 - } lt(i, vk_nb_custom_gates) { + } lt(i, VK_NB_CUSTOM_GATES) { i := add(i, 1) } { mstore(mPtr, calldataload(commits_api)) mstore(add(mPtr, 0x20), calldataload(add(commits_api, 0x20))) point_acc_mul( - add(state, state_linearised_polynomial_x), + add(state, STATE_LINEARISED_POLYNOMIAL_X), mPtr, calldataload(commits_api_at_zeta), add(mPtr, 0x40) @@ -987,60 +1083,61 @@ contract PlonkVerifier { commits_api := add(commits_api, 0x40) } - mstore(mPtr, vk_s3_com_x) - mstore(add(mPtr, 0x20), vk_s3_com_y) - point_acc_mul(add(state, state_linearised_polynomial_x), mPtr, s1, add(mPtr, 0x40)) + mstore(mPtr, VK_S3_COM_X) + mstore(add(mPtr, 0x20), VK_S3_COM_Y) + point_acc_mul(add(state, STATE_LINEARISED_POLYNOMIAL_X), mPtr, s1, add(mPtr, 0x40)) - mstore(mPtr, calldataload(add(aproof, proof_grand_product_commitment_x))) - mstore(add(mPtr, 0x20), calldataload(add(aproof, proof_grand_product_commitment_y))) - point_acc_mul(add(state, state_linearised_polynomial_x), mPtr, s2, add(mPtr, 0x40)) + mstore(mPtr, calldataload(add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_X))) + mstore(add(mPtr, 0x20), calldataload(add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_Y))) + point_acc_mul(add(state, STATE_LINEARISED_POLYNOMIAL_X), mPtr, s2, add(mPtr, 0x40)) } - // Compute the commitment to the linearized polynomial equal to - // L(ζ)[Qₗ]+r(ζ)[Qᵣ]+R(ζ)L(ζ)[Qₘ]+O(ζ)[Qₒ]+[Qₖ]+Σᵢqc'ᵢ(ζ)[BsbCommitmentᵢ] + - // α*( Z(μζ)(L(ζ)+β*S₁(ζ)+γ)*(R(ζ)+β*S₂(ζ)+γ)[S₃]-[Z](L(ζ)+β*id_{1}(ζ)+γ)*(R(ζ)+β*id_{2(ζ)+γ)*(O(ζ)+β*id_{3}(ζ)+γ) ) + - // α²*L₁(ζ)[Z] - // where - // * id_1 = id, id_2 = vk_coset_shift*id, id_3 = vk_coset_shift^{2}*id - // * the [] means that it's a commitment (i.e. a point on Bn254(F_p)) + /// @notice Compute the commitment to the linearized polynomial equal to + /// L(ζ)[Qₗ]+r(ζ)[Qᵣ]+R(ζ)L(ζ)[Qₘ]+O(ζ)[Qₒ]+[Qₖ]+Σᵢqc'ᵢ(ζ)[BsbCommitmentᵢ] + + /// α*( Z(μζ)(L(ζ)+β*S₁(ζ)+γ)*(R(ζ)+β*S₂(ζ)+γ)[S₃]-[Z](L(ζ)+β*id_{1}(ζ)+γ)*(R(ζ)+β*id_{2(ζ)+γ)*(O(ζ)+β*id_{3}(ζ)+γ) ) + + /// α²*L₁(ζ)[Z] + /// where + /// * id_1 = id, id_2 = vk_coset_shift*id, id_3 = vk_coset_shift^{2}*id + /// * the [] means that it's a commitment (i.e. a point on Bn254(F_p)) + /// @param aproof pointer to the proof function compute_commitment_linearised_polynomial(aproof) { let state := mload(0x40) - let l_beta := mload(add(state, state_beta)) - let l_gamma := mload(add(state, state_gamma)) - let l_zeta := mload(add(state, state_zeta)) - let l_alpha := mload(add(state, state_alpha)) - - let u := mulmod(calldataload(add(aproof, proof_grand_product_at_zeta_omega)), l_beta, r_mod) - let v := mulmod(l_beta, calldataload(add(aproof, proof_s1_at_zeta)), r_mod) - v := addmod(v, calldataload(add(aproof, proof_l_at_zeta)), r_mod) - v := addmod(v, l_gamma, r_mod) - - let w := mulmod(l_beta, calldataload(add(aproof, proof_s2_at_zeta)), r_mod) - w := addmod(w, calldataload(add(aproof, proof_r_at_zeta)), r_mod) - w := addmod(w, l_gamma, r_mod) - - let s1 := mulmod(u, v, r_mod) - s1 := mulmod(s1, w, r_mod) - s1 := mulmod(s1, l_alpha, r_mod) - - let coset_square := mulmod(vk_coset_shift, vk_coset_shift, r_mod) - let betazeta := mulmod(l_beta, l_zeta, r_mod) - u := addmod(betazeta, calldataload(add(aproof, proof_l_at_zeta)), r_mod) - u := addmod(u, l_gamma, r_mod) - - v := mulmod(betazeta, vk_coset_shift, r_mod) - v := addmod(v, calldataload(add(aproof, proof_r_at_zeta)), r_mod) - v := addmod(v, l_gamma, r_mod) - - w := mulmod(betazeta, coset_square, r_mod) - w := addmod(w, calldataload(add(aproof, proof_o_at_zeta)), r_mod) - w := addmod(w, l_gamma, r_mod) - - let s2 := mulmod(u, v, r_mod) - s2 := mulmod(s2, w, r_mod) - s2 := sub(r_mod, s2) - s2 := mulmod(s2, l_alpha, r_mod) - s2 := addmod(s2, mload(add(state, state_alpha_square_lagrange_0)), r_mod) + let l_beta := mload(add(state, STATE_BETA)) + let l_gamma := mload(add(state, STATE_GAMMA)) + let l_zeta := mload(add(state, STATE_ZETA)) + let l_alpha := mload(add(state, STATE_ALPHA)) + + let u := mulmod(calldataload(add(aproof, PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA)), l_beta, R_MOD) + let v := mulmod(l_beta, calldataload(add(aproof, PROOF_S1_AT_ZETA)), R_MOD) + v := addmod(v, calldataload(add(aproof, PROOF_L_AT_ZETA)), R_MOD) + v := addmod(v, l_gamma, R_MOD) + + let w := mulmod(l_beta, calldataload(add(aproof, PROOF_S2_AT_ZETA)), R_MOD) + w := addmod(w, calldataload(add(aproof, PROOF_R_AT_ZETA)), R_MOD) + w := addmod(w, l_gamma, R_MOD) + + let s1 := mulmod(u, v, R_MOD) + s1 := mulmod(s1, w, R_MOD) + s1 := mulmod(s1, l_alpha, R_MOD) + + let coset_square := mulmod(VK_COSET_SHIFT, VK_COSET_SHIFT, R_MOD) + let betazeta := mulmod(l_beta, l_zeta, R_MOD) + u := addmod(betazeta, calldataload(add(aproof, PROOF_L_AT_ZETA)), R_MOD) + u := addmod(u, l_gamma, R_MOD) + + v := mulmod(betazeta, VK_COSET_SHIFT, R_MOD) + v := addmod(v, calldataload(add(aproof, PROOF_R_AT_ZETA)), R_MOD) + v := addmod(v, l_gamma, R_MOD) + + w := mulmod(betazeta, coset_square, R_MOD) + w := addmod(w, calldataload(add(aproof, PROOF_O_AT_ZETA)), R_MOD) + w := addmod(w, l_gamma, R_MOD) + + let s2 := mulmod(u, v, R_MOD) + s2 := mulmod(s2, w, R_MOD) + s2 := sub(R_MOD, s2) + s2 := mulmod(s2, l_alpha, R_MOD) + s2 := addmod(s2, mload(add(state, STATE_ALPHA_SQUARE_LAGRANGE_0)), R_MOD) // at this stage: // * s₁ = α*Z(μζ)(l(ζ)+β*s₁(ζ)+γ)*(r(ζ)+β*s₂(ζ)+γ)*β @@ -1049,62 +1146,69 @@ contract PlonkVerifier { compute_commitment_linearised_polynomial_ec(aproof, s1, s2) } - // compute H₁ + ζᵐ⁺²*H₂ + ζ²⁽ᵐ⁺²⁾*H₃ and store the result at - // state + state_folded_h + /// @notice compute H₁ + ζᵐ⁺²*H₂ + ζ²⁽ᵐ⁺²⁾*H₃ and store the result at + /// state + state_folded_h + /// @param aproof pointer to the proof function fold_h(aproof) { let state := mload(0x40) - let n_plus_two := add(vk_domain_size, 2) - let mPtr := add(mload(0x40), state_last_mem) - let zeta_power_n_plus_two := pow(mload(add(state, state_zeta)), n_plus_two, mPtr) - point_mul_calldata(add(state, state_folded_h_x), add(aproof, proof_h_2_x), zeta_power_n_plus_two, mPtr) - point_add_calldata(add(state, state_folded_h_x), add(state, state_folded_h_x), add(aproof, proof_h_1_x), mPtr) - point_mul(add(state, state_folded_h_x), add(state, state_folded_h_x), zeta_power_n_plus_two, mPtr) - point_add_calldata(add(state, state_folded_h_x), add(state, state_folded_h_x), add(aproof, proof_h_0_x), mPtr) + let n_plus_two := add(VK_DOMAIN_SIZE, 2) + let mPtr := add(mload(0x40), STATE_LAST_MEM) + let zeta_power_n_plus_two := pow(mload(add(state, STATE_ZETA)), n_plus_two, mPtr) + point_mul_calldata(add(state, STATE_FOLDED_H_X), add(aproof, PROOF_H_2_X), zeta_power_n_plus_two, mPtr) + point_add_calldata(add(state, STATE_FOLDED_H_X), add(state, STATE_FOLDED_H_X), add(aproof, PROOF_H_1_X), mPtr) + point_mul(add(state, STATE_FOLDED_H_X), add(state, STATE_FOLDED_H_X), zeta_power_n_plus_two, mPtr) + point_add_calldata(add(state, STATE_FOLDED_H_X), add(state, STATE_FOLDED_H_X), add(aproof, PROOF_H_0_X), mPtr) } - // check that - // L(ζ)Qₗ(ζ)+r(ζ)Qᵣ(ζ)+R(ζ)L(ζ)Qₘ(ζ)+O(ζ)Qₒ(ζ)+Qₖ(ζ)+Σᵢqc'ᵢ(ζ)BsbCommitmentᵢ(ζ) + - // α*( Z(μζ)(l(ζ)+β*s₁(ζ)+γ)*(r(ζ)+β*s₂(ζ)+γ)*β*s₃(X)-Z(X)(l(ζ)+β*id_1(ζ)+γ)*(r(ζ)+β*id_2(ζ)+γ)*(o(ζ)+β*id_3(ζ)+γ) ) ) - // + α²*L₁(ζ) = - // (ζⁿ-1)H(ζ) + /// @notice check that + /// L(ζ)Qₗ(ζ)+r(ζ)Qᵣ(ζ)+R(ζ)L(ζ)Qₘ(ζ)+O(ζ)Qₒ(ζ)+Qₖ(ζ)+Σᵢqc'ᵢ(ζ)BsbCommitmentᵢ(ζ) + + /// α*( Z(μζ)(l(ζ)+β*s₁(ζ)+γ)*(r(ζ)+β*s₂(ζ)+γ)*β*s₃(X)-Z(X)(l(ζ)+β*id_1(ζ)+γ)*(r(ζ)+β*id_2(ζ)+γ)*(o(ζ)+β*id_3(ζ)+γ) ) ) + /// + α²*L₁(ζ) = + /// (ζⁿ-1)H(ζ) + /// @param aproof pointer to the proof function verify_quotient_poly_eval_at_zeta(aproof) { let state := mload(0x40) // (l(ζ)+β*s1(ζ)+γ) - let s1 := add(mload(0x40), state_last_mem) - mstore(s1, mulmod(calldataload(add(aproof, proof_s1_at_zeta)), mload(add(state, state_beta)), r_mod)) - mstore(s1, addmod(mload(s1), mload(add(state, state_gamma)), r_mod)) - mstore(s1, addmod(mload(s1), calldataload(add(aproof, proof_l_at_zeta)), r_mod)) + let s1 := add(mload(0x40), STATE_LAST_MEM) + mstore(s1, mulmod(calldataload(add(aproof, PROOF_S1_AT_ZETA)), mload(add(state, STATE_BETA)), R_MOD)) + mstore(s1, addmod(mload(s1), mload(add(state, STATE_GAMMA)), R_MOD)) + mstore(s1, addmod(mload(s1), calldataload(add(aproof, PROOF_L_AT_ZETA)), R_MOD)) // (r(ζ)+β*s2(ζ)+γ) let s2 := add(s1, 0x20) - mstore(s2, mulmod(calldataload(add(aproof, proof_s2_at_zeta)), mload(add(state, state_beta)), r_mod)) - mstore(s2, addmod(mload(s2), mload(add(state, state_gamma)), r_mod)) - mstore(s2, addmod(mload(s2), calldataload(add(aproof, proof_r_at_zeta)), r_mod)) + mstore(s2, mulmod(calldataload(add(aproof, PROOF_S2_AT_ZETA)), mload(add(state, STATE_BETA)), R_MOD)) + mstore(s2, addmod(mload(s2), mload(add(state, STATE_GAMMA)), R_MOD)) + mstore(s2, addmod(mload(s2), calldataload(add(aproof, PROOF_R_AT_ZETA)), R_MOD)) // _s2 := mload(s2) // (o(ζ)+γ) let o := add(s1, 0x40) - mstore(o, addmod(calldataload(add(aproof, proof_o_at_zeta)), mload(add(state, state_gamma)), r_mod)) + mstore(o, addmod(calldataload(add(aproof, PROOF_O_AT_ZETA)), mload(add(state, STATE_GAMMA)), R_MOD)) // α*(Z(μζ))*(l(ζ)+β*s1(ζ)+γ)*(r(ζ)+β*s2(ζ)+γ)*(o(ζ)+γ) - mstore(s1, mulmod(mload(s1), mload(s2), r_mod)) - mstore(s1, mulmod(mload(s1), mload(o), r_mod)) - mstore(s1, mulmod(mload(s1), mload(add(state, state_alpha)), r_mod)) - mstore(s1, mulmod(mload(s1), calldataload(add(aproof, proof_grand_product_at_zeta_omega)), r_mod)) + mstore(s1, mulmod(mload(s1), mload(s2), R_MOD)) + mstore(s1, mulmod(mload(s1), mload(o), R_MOD)) + mstore(s1, mulmod(mload(s1), mload(add(state, STATE_ALPHA)), R_MOD)) + mstore(s1, mulmod(mload(s1), calldataload(add(aproof, PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA)), R_MOD)) let computed_quotient := add(s1, 0x60) // linearizedpolynomial + pi(zeta) - mstore(computed_quotient,addmod(calldataload(add(aproof, proof_linearised_polynomial_at_zeta)), mload(add(state, state_pi)), r_mod)) - mstore(computed_quotient, addmod(mload(computed_quotient), mload(s1), r_mod)) - mstore(computed_quotient,addmod(mload(computed_quotient), sub(r_mod, mload(add(state, state_alpha_square_lagrange_0))), r_mod)) - mstore(s2,mulmod(calldataload(add(aproof, proof_quotient_polynomial_at_zeta)),mload(add(state, state_zeta_power_n_minus_one)),r_mod)) + mstore(computed_quotient,addmod(calldataload(add(aproof, PROOF_LINEARISED_POLYNOMIAL_AT_ZETA)), mload(add(state, STATE_PI)), R_MOD)) + mstore(computed_quotient, addmod(mload(computed_quotient), mload(s1), R_MOD)) + mstore(computed_quotient,addmod(mload(computed_quotient), sub(R_MOD, mload(add(state, STATE_ALPHA_SQUARE_LAGRANGE_0))), R_MOD)) + mstore(s2,mulmod(calldataload(add(aproof, PROOF_QUOTIENT_POLYNOMIAL_AT_ZETA)),mload(add(state, STATE_ZETA_POWER_N_MINUS_ONE)),R_MOD)) - mstore(add(state, state_success), eq(mload(computed_quotient), mload(s2))) + mstore(add(state, STATE_SUCCESS), eq(mload(computed_quotient), mload(s2))) } // BEGINNING utils math functions ------------------------------------------------- + + /// @param dst pointer storing the result + /// @param p pointer to the first point + /// @param q pointer to the second point + /// @param mPtr pointer to free memory function point_add(dst, p, q, mPtr) { let state := mload(0x40) mstore(mPtr, mload(p)) @@ -1117,6 +1221,10 @@ contract PlonkVerifier { } } + /// @param dst pointer storing the result + /// @param p pointer to the first point (calldata) + /// @param q pointer to the second point (calladata) + /// @param mPtr pointer to free memory function point_add_calldata(dst, p, q, mPtr) { let state := mload(0x40) mstore(mPtr, mload(p)) @@ -1129,7 +1237,10 @@ contract PlonkVerifier { } } - // dst <- [s]src + /// @parma dst pointer storing the result + /// @param src pointer to a point on Bn254(𝔽_p) + /// @param s scalar + /// @param mPtr free memory function point_mul(dst,src,s, mPtr) { let state := mload(0x40) mstore(mPtr,mload(src)) @@ -1141,7 +1252,10 @@ contract PlonkVerifier { } } - // dst <- [s]src + /// @parma dst pointer storing the result + /// @param src pointer to a point on Bn254(𝔽_p) on calldata + /// @param s scalar + /// @param mPtr free memory function point_mul_calldata(dst, src, s, mPtr) { let state := mload(0x40) mstore(mPtr, calldataload(src)) @@ -1153,7 +1267,11 @@ contract PlonkVerifier { } } - // dst <- dst + [s]src (Elliptic curve) + /// @notice dst <- dst + [s]src (Elliptic curve) + /// @param dst pointer accumulator point storing the result + /// @param src pointer to the point to multiply and add + /// @param s scalar + /// @param mPtr free memory function point_acc_mul(dst,src,s, mPtr) { let state := mload(0x40) mstore(mPtr,mload(src)) @@ -1168,7 +1286,11 @@ contract PlonkVerifier { } } - // dst <- dst + [s]src (Elliptic curve) + /// @notice dst <- dst + [s]src (Elliptic curve) + /// @param dst pointer accumulator point storing the result + /// @param src pointer to the point to multiply and add (on calldata) + /// @param s scalar + /// @mPtr free memory function point_acc_mul_calldata(dst, src, s, mPtr) { let state := mload(0x40) mstore(mPtr, calldataload(src)) @@ -1183,20 +1305,26 @@ contract PlonkVerifier { } } - // dst <- dst + src (Fr) dst,src are addresses, s is a value + /// @notice dst <- dst + src*s (Fr) dst,src are addresses, s is a value + /// @param dst pointer storing the result + /// @param src pointer to the scalar to multiply and add (on calldata) + /// @param s scalar function fr_acc_mul_calldata(dst, src, s) { - let tmp := mulmod(calldataload(src), s, r_mod) - mstore(dst, addmod(mload(dst), tmp, r_mod)) + let tmp := mulmod(calldataload(src), s, R_MOD) + mstore(dst, addmod(mload(dst), tmp, R_MOD)) } - // dst <- x ** e mod r (x, e are values, not pointers) + /// @param x element to exponentiate + /// @param e exponent + /// @param mPtr free memory + /// @return res x ** e mod r function pow(x, e, mPtr)->res { mstore(mPtr, 0x20) mstore(add(mPtr, 0x20), 0x20) mstore(add(mPtr, 0x40), 0x20) mstore(add(mPtr, 0x60), x) mstore(add(mPtr, 0x80), e) - mstore(add(mPtr, 0xa0), r_mod) + mstore(add(mPtr, 0xa0), R_MOD) let check_staticcall := staticcall(gas(),0x05,mPtr,0xc0,mPtr,0x20) if eq(check_staticcall, 0) { error_verify() diff --git a/backend/plonk/bn254/verify.go b/backend/plonk/bn254/verify.go index 4279dc5c48..1e0d7ca4de 100644 --- a/backend/plonk/bn254/verify.go +++ b/backend/plonk/bn254/verify.go @@ -17,25 +17,24 @@ package plonk import ( - "crypto/sha256" "errors" + "fmt" "io" "math/big" + "text/template" "time" - "github.com/consensys/gnark-crypto/ecc/bn254/fr" - - "fmt" - "github.com/consensys/gnark-crypto/ecc/bn254/fp" - - "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" - "text/template" + "github.com/consensys/gnark-crypto/ecc/bn254/fp" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/hash_to_field" - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/logger" ) @@ -43,19 +42,20 @@ var ( errWrongClaimedQuotient = errors.New("claimed quotient is not as expected") ) -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { log := logger.Logger().With().Str("curve", "bn254").Str("backend", "plonk").Logger() start := time.Now() + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } if len(proof.Bsb22Commitments) != len(vk.Qcp) { return errors.New("BSB22 Commitment number mismatch") } - // pick a hash function to derive the challenge (the same as in the prover) - hFunc := sha256.New() - // transcript to derive the challenge - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") // The first challenge is derived using the public data: the commitments to the permutation, // the coefficients of the circuit, and the public inputs. @@ -124,11 +124,20 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { } } + if cfg.HashToFieldFn == nil { + cfg.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } + var hashBts []byte + var hashedCmt fr.Element + nbBuf := fr.Bytes + if cfg.HashToFieldFn.Size() < fr.Bytes { + nbBuf = cfg.HashToFieldFn.Size() + } for i := range vk.CommitmentConstraintIndexes { - var hashRes []fr.Element - if hashRes, err = fr.Hash(proof.Bsb22Commitments[i].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err - } + cfg.HashToFieldFn.Write(proof.Bsb22Commitments[i].Marshal()) + hashBts = cfg.HashToFieldFn.Sum(hashBts[0:]) + cfg.HashToFieldFn.Reset() + hashedCmt.SetBytes(hashBts[:nbBuf]) // Computing L_{CommitmentIndex} @@ -141,7 +150,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { Div(&lagrange, &den). // wⁱ(ζ-1)/(ζ-wⁱ) Mul(&lagrange, &lagrangeOne) // wⁱ/n (ζⁿ-1)/(ζ-wⁱ) - xiLi.Mul(&lagrange, &hashRes[0]) + xiLi.Mul(&lagrange, &hashedCmt) pi.Add(&pi, &xiLi) } } @@ -255,7 +264,8 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { digestsToFold, &proof.BatchedProof, zeta, - hFunc, + cfg.KZGFoldingHash, + zu.Marshal(), ) if err != nil { return err diff --git a/backend/plonk/bw6-633/prove.go b/backend/plonk/bw6-633/prove.go index 4cd1f55aa8..77d1a48644 100644 --- a/backend/plonk/bw6-633/prove.go +++ b/backend/plonk/bw6-633/prove.go @@ -18,9 +18,8 @@ package plonk import ( "context" - "crypto/sha256" "errors" - "golang.org/x/sync/errgroup" + "fmt" "hash" "math/big" "math/bits" @@ -28,24 +27,25 @@ import ( "sync" "time" - "github.com/consensys/gnark/backend/witness" + "golang.org/x/sync/errgroup" "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" - curve "github.com/consensys/gnark-crypto/ecc/bw6-633" - "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/fft" - + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/iop" - cs "github.com/consensys/gnark/constraint/bw6-633" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "github.com/consensys/gnark/backend" + "github.com/consensys/gnark/backend/witness" + "github.com/consensys/gnark/constraint" + cs "github.com/consensys/gnark/constraint/bw6-633" "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/internal/utils" "github.com/consensys/gnark/logger" @@ -122,14 +122,17 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts // parse the options opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("get prover options: %w", err) } start := time.Now() // init instance g, ctx := errgroup.WithContext(context.Background()) - instance := newInstance(ctx, spr, pk, fullWitness, &opt) + instance, err := newInstance(ctx, spr, pk, fullWitness, &opt) + if err != nil { + return nil, fmt.Errorf("new instance: %w", err) + } // solve constraints g.Go(instance.solveConstraints) @@ -181,8 +184,9 @@ type instance struct { spr *cs.SparseR1CS opt *backend.ProverConfig - fs fiatshamir.Transcript - hFunc hash.Hash + fs fiatshamir.Transcript + kzgFoldingHash hash.Hash // for KZG folding + htfFunc hash.Hash // hash to field function // polynomials x []*iop.Polynomial // x stores tracks the polynomial we need @@ -223,8 +227,10 @@ type instance struct { chGammaBeta chan struct{} } -func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) instance { - hFunc := sha256.New() +func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) (*instance, error) { + if opts.HashToFieldFn == nil { + opts.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } s := instance{ ctx: ctx, pk: pk, @@ -233,8 +239,9 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi opt: opts, fullWitness: fullWitness, bp: make([]*iop.Polynomial, nb_blinding_polynomials), - fs: fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta"), - hFunc: hFunc, + fs: fiatshamir.NewTranscript(opts.ChallengeHash, "gamma", "beta", "alpha", "zeta"), + kzgFoldingHash: opts.KZGFoldingHash, + htfFunc: opts.HashToFieldFn, chLRO: make(chan struct{}, 1), chQk: make(chan struct{}, 1), chbp: make(chan struct{}, 1), @@ -251,7 +258,7 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi s.setupGKRHints() s.x = make([]*iop.Polynomial, id_Qci+2*len(s.commitmentInfo)) - return s + return &s, nil } func (s *instance) initComputeNumerator() error { @@ -309,6 +316,8 @@ func (s *instance) initBSB22Commitments() { // Computing and verifying Bsb22 multi-commits explained in https://hackmd.io/x8KsadW3RRyX7YTCFJIkHg func (s *instance) bsb22Hint(commDepth int) solver.Hint { return func(_ *big.Int, ins, outs []*big.Int) error { + var err error + res := &s.commitmentVal[commDepth] commitmentInfo := s.spr.CommitmentInfo.(constraint.PlonkCommitments)[commDepth] @@ -317,10 +326,6 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { for i := range ins { committedValues[offset+commitmentInfo.Committed[i]].SetBigInt(ins[i]) } - var ( - err error - hashRes []fr.Element - ) if _, err = committedValues[offset+commitmentInfo.CommitmentIndex].SetRandom(); err != nil { // Commitment injection constraint has qcp = 0. Safe to use for blinding. return err } @@ -333,10 +338,14 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { } s.cCommitments[commDepth].ToCanonical(&s.pk.Domain[0]).ToRegular() - if hashRes, err = fr.Hash(s.proof.Bsb22Commitments[commDepth].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err + s.htfFunc.Write(s.proof.Bsb22Commitments[commDepth].Marshal()) + hashBts := s.htfFunc.Sum(nil) + s.htfFunc.Reset() + nbBuf := fr.Bytes + if s.htfFunc.Size() < fr.Bytes { + nbBuf = s.htfFunc.Size() } - res.Set(&hashRes[0]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses + res.SetBytes(hashBts[:nbBuf]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses res.BigInt(outs[0]) return nil @@ -816,8 +825,9 @@ func (s *instance) batchOpening() error { polysToOpen, digestsToOpen, s.zeta, - s.hFunc, + s.kzgFoldingHash, s.pk.Kzg, + s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) return err diff --git a/backend/plonk/bw6-633/verify.go b/backend/plonk/bw6-633/verify.go index 1f73006faa..71e7c2cc72 100644 --- a/backend/plonk/bw6-633/verify.go +++ b/backend/plonk/bw6-633/verify.go @@ -17,20 +17,24 @@ package plonk import ( - "crypto/sha256" "errors" + "fmt" "io" "math/big" - "time" - "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "time" - "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/hash_to_field" + + "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/logger" ) @@ -38,19 +42,20 @@ var ( errWrongClaimedQuotient = errors.New("claimed quotient is not as expected") ) -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { log := logger.Logger().With().Str("curve", "bw6-633").Str("backend", "plonk").Logger() start := time.Now() + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } if len(proof.Bsb22Commitments) != len(vk.Qcp) { return errors.New("BSB22 Commitment number mismatch") } - // pick a hash function to derive the challenge (the same as in the prover) - hFunc := sha256.New() - // transcript to derive the challenge - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") // The first challenge is derived using the public data: the commitments to the permutation, // the coefficients of the circuit, and the public inputs. @@ -119,11 +124,20 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { } } + if cfg.HashToFieldFn == nil { + cfg.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } + var hashBts []byte + var hashedCmt fr.Element + nbBuf := fr.Bytes + if cfg.HashToFieldFn.Size() < fr.Bytes { + nbBuf = cfg.HashToFieldFn.Size() + } for i := range vk.CommitmentConstraintIndexes { - var hashRes []fr.Element - if hashRes, err = fr.Hash(proof.Bsb22Commitments[i].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err - } + cfg.HashToFieldFn.Write(proof.Bsb22Commitments[i].Marshal()) + hashBts = cfg.HashToFieldFn.Sum(hashBts[0:]) + cfg.HashToFieldFn.Reset() + hashedCmt.SetBytes(hashBts[:nbBuf]) // Computing L_{CommitmentIndex} @@ -136,7 +150,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { Div(&lagrange, &den). // wⁱ(ζ-1)/(ζ-wⁱ) Mul(&lagrange, &lagrangeOne) // wⁱ/n (ζⁿ-1)/(ζ-wⁱ) - xiLi.Mul(&lagrange, &hashRes[0]) + xiLi.Mul(&lagrange, &hashedCmt) pi.Add(&pi, &xiLi) } } @@ -250,7 +264,8 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { digestsToFold, &proof.BatchedProof, zeta, - hFunc, + cfg.KZGFoldingHash, + zu.Marshal(), ) if err != nil { return err diff --git a/backend/plonk/bw6-761/prove.go b/backend/plonk/bw6-761/prove.go index 71e2acd8aa..35d89a6e73 100644 --- a/backend/plonk/bw6-761/prove.go +++ b/backend/plonk/bw6-761/prove.go @@ -18,9 +18,8 @@ package plonk import ( "context" - "crypto/sha256" "errors" - "golang.org/x/sync/errgroup" + "fmt" "hash" "math/big" "math/bits" @@ -28,24 +27,25 @@ import ( "sync" "time" - "github.com/consensys/gnark/backend/witness" + "golang.org/x/sync/errgroup" "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" - curve "github.com/consensys/gnark-crypto/ecc/bw6-761" - "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/fft" - + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/iop" - cs "github.com/consensys/gnark/constraint/bw6-761" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "github.com/consensys/gnark/backend" + "github.com/consensys/gnark/backend/witness" + "github.com/consensys/gnark/constraint" + cs "github.com/consensys/gnark/constraint/bw6-761" "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/internal/utils" "github.com/consensys/gnark/logger" @@ -122,14 +122,17 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts // parse the options opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("get prover options: %w", err) } start := time.Now() // init instance g, ctx := errgroup.WithContext(context.Background()) - instance := newInstance(ctx, spr, pk, fullWitness, &opt) + instance, err := newInstance(ctx, spr, pk, fullWitness, &opt) + if err != nil { + return nil, fmt.Errorf("new instance: %w", err) + } // solve constraints g.Go(instance.solveConstraints) @@ -181,8 +184,9 @@ type instance struct { spr *cs.SparseR1CS opt *backend.ProverConfig - fs fiatshamir.Transcript - hFunc hash.Hash + fs fiatshamir.Transcript + kzgFoldingHash hash.Hash // for KZG folding + htfFunc hash.Hash // hash to field function // polynomials x []*iop.Polynomial // x stores tracks the polynomial we need @@ -223,8 +227,10 @@ type instance struct { chGammaBeta chan struct{} } -func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) instance { - hFunc := sha256.New() +func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) (*instance, error) { + if opts.HashToFieldFn == nil { + opts.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } s := instance{ ctx: ctx, pk: pk, @@ -233,8 +239,9 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi opt: opts, fullWitness: fullWitness, bp: make([]*iop.Polynomial, nb_blinding_polynomials), - fs: fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta"), - hFunc: hFunc, + fs: fiatshamir.NewTranscript(opts.ChallengeHash, "gamma", "beta", "alpha", "zeta"), + kzgFoldingHash: opts.KZGFoldingHash, + htfFunc: opts.HashToFieldFn, chLRO: make(chan struct{}, 1), chQk: make(chan struct{}, 1), chbp: make(chan struct{}, 1), @@ -251,7 +258,7 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi s.setupGKRHints() s.x = make([]*iop.Polynomial, id_Qci+2*len(s.commitmentInfo)) - return s + return &s, nil } func (s *instance) initComputeNumerator() error { @@ -309,6 +316,8 @@ func (s *instance) initBSB22Commitments() { // Computing and verifying Bsb22 multi-commits explained in https://hackmd.io/x8KsadW3RRyX7YTCFJIkHg func (s *instance) bsb22Hint(commDepth int) solver.Hint { return func(_ *big.Int, ins, outs []*big.Int) error { + var err error + res := &s.commitmentVal[commDepth] commitmentInfo := s.spr.CommitmentInfo.(constraint.PlonkCommitments)[commDepth] @@ -317,10 +326,6 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { for i := range ins { committedValues[offset+commitmentInfo.Committed[i]].SetBigInt(ins[i]) } - var ( - err error - hashRes []fr.Element - ) if _, err = committedValues[offset+commitmentInfo.CommitmentIndex].SetRandom(); err != nil { // Commitment injection constraint has qcp = 0. Safe to use for blinding. return err } @@ -333,10 +338,14 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { } s.cCommitments[commDepth].ToCanonical(&s.pk.Domain[0]).ToRegular() - if hashRes, err = fr.Hash(s.proof.Bsb22Commitments[commDepth].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err + s.htfFunc.Write(s.proof.Bsb22Commitments[commDepth].Marshal()) + hashBts := s.htfFunc.Sum(nil) + s.htfFunc.Reset() + nbBuf := fr.Bytes + if s.htfFunc.Size() < fr.Bytes { + nbBuf = s.htfFunc.Size() } - res.Set(&hashRes[0]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses + res.SetBytes(hashBts[:nbBuf]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses res.BigInt(outs[0]) return nil @@ -816,8 +825,9 @@ func (s *instance) batchOpening() error { polysToOpen, digestsToOpen, s.zeta, - s.hFunc, + s.kzgFoldingHash, s.pk.Kzg, + s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) return err diff --git a/backend/plonk/bw6-761/verify.go b/backend/plonk/bw6-761/verify.go index 15c06e9915..e8b4ecd4c6 100644 --- a/backend/plonk/bw6-761/verify.go +++ b/backend/plonk/bw6-761/verify.go @@ -17,20 +17,24 @@ package plonk import ( - "crypto/sha256" "errors" + "fmt" "io" "math/big" - "time" - "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "time" - "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/hash_to_field" + + "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/logger" ) @@ -38,19 +42,20 @@ var ( errWrongClaimedQuotient = errors.New("claimed quotient is not as expected") ) -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { log := logger.Logger().With().Str("curve", "bw6-761").Str("backend", "plonk").Logger() start := time.Now() + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } if len(proof.Bsb22Commitments) != len(vk.Qcp) { return errors.New("BSB22 Commitment number mismatch") } - // pick a hash function to derive the challenge (the same as in the prover) - hFunc := sha256.New() - // transcript to derive the challenge - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") // The first challenge is derived using the public data: the commitments to the permutation, // the coefficients of the circuit, and the public inputs. @@ -119,11 +124,20 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { } } + if cfg.HashToFieldFn == nil { + cfg.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } + var hashBts []byte + var hashedCmt fr.Element + nbBuf := fr.Bytes + if cfg.HashToFieldFn.Size() < fr.Bytes { + nbBuf = cfg.HashToFieldFn.Size() + } for i := range vk.CommitmentConstraintIndexes { - var hashRes []fr.Element - if hashRes, err = fr.Hash(proof.Bsb22Commitments[i].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err - } + cfg.HashToFieldFn.Write(proof.Bsb22Commitments[i].Marshal()) + hashBts = cfg.HashToFieldFn.Sum(hashBts[0:]) + cfg.HashToFieldFn.Reset() + hashedCmt.SetBytes(hashBts[:nbBuf]) // Computing L_{CommitmentIndex} @@ -136,7 +150,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { Div(&lagrange, &den). // wⁱ(ζ-1)/(ζ-wⁱ) Mul(&lagrange, &lagrangeOne) // wⁱ/n (ζⁿ-1)/(ζ-wⁱ) - xiLi.Mul(&lagrange, &hashRes[0]) + xiLi.Mul(&lagrange, &hashedCmt) pi.Add(&pi, &xiLi) } } @@ -250,7 +264,8 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { digestsToFold, &proof.BatchedProof, zeta, - hFunc, + cfg.KZGFoldingHash, + zu.Marshal(), ) if err != nil { return err diff --git a/backend/plonk/plonk.go b/backend/plonk/plonk.go index 23e05cf6ba..2fd11b18aa 100644 --- a/backend/plonk/plonk.go +++ b/backend/plonk/plonk.go @@ -155,7 +155,7 @@ func Prove(ccs constraint.ConstraintSystem, pk ProvingKey, fullWitness witness.W } // Verify verifies a PLONK proof, from the proof, preprocessed public data, and public witness. -func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness) error { +func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness, opts ...backend.VerifierOption) error { switch _proof := proof.(type) { @@ -164,49 +164,49 @@ func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness) error { if !ok { return witness.ErrInvalidWitness } - return plonk_bn254.Verify(_proof, vk.(*plonk_bn254.VerifyingKey), w) + return plonk_bn254.Verify(_proof, vk.(*plonk_bn254.VerifyingKey), w, opts...) case *plonk_bls12381.Proof: w, ok := publicWitness.Vector().(fr_bls12381.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bls12381.Verify(_proof, vk.(*plonk_bls12381.VerifyingKey), w) + return plonk_bls12381.Verify(_proof, vk.(*plonk_bls12381.VerifyingKey), w, opts...) case *plonk_bls12377.Proof: w, ok := publicWitness.Vector().(fr_bls12377.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bls12377.Verify(_proof, vk.(*plonk_bls12377.VerifyingKey), w) + return plonk_bls12377.Verify(_proof, vk.(*plonk_bls12377.VerifyingKey), w, opts...) case *plonk_bw6761.Proof: w, ok := publicWitness.Vector().(fr_bw6761.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bw6761.Verify(_proof, vk.(*plonk_bw6761.VerifyingKey), w) + return plonk_bw6761.Verify(_proof, vk.(*plonk_bw6761.VerifyingKey), w, opts...) case *plonk_bw6633.Proof: w, ok := publicWitness.Vector().(fr_bw6633.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bw6633.Verify(_proof, vk.(*plonk_bw6633.VerifyingKey), w) + return plonk_bw6633.Verify(_proof, vk.(*plonk_bw6633.VerifyingKey), w, opts...) case *plonk_bls24317.Proof: w, ok := publicWitness.Vector().(fr_bls24317.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bls24317.Verify(_proof, vk.(*plonk_bls24317.VerifyingKey), w) + return plonk_bls24317.Verify(_proof, vk.(*plonk_bls24317.VerifyingKey), w, opts...) case *plonk_bls24315.Proof: w, ok := publicWitness.Vector().(fr_bls24315.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bls24315.Verify(_proof, vk.(*plonk_bls24315.VerifyingKey), w) + return plonk_bls24315.Verify(_proof, vk.(*plonk_bls24315.VerifyingKey), w, opts...) default: panic("unrecognized proof type") diff --git a/backend/plonk/plonk_test.go b/backend/plonk/plonk_test.go index 211ef6a243..3cfe8053b9 100644 --- a/backend/plonk/plonk_test.go +++ b/backend/plonk/plonk_test.go @@ -2,12 +2,14 @@ package plonk_test import ( "bytes" + "fmt" "math/big" "testing" "github.com/consensys/gnark" "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark-crypto/kzg" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/plonk" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/frontend" @@ -58,6 +60,129 @@ func TestProver(t *testing.T) { } } +func TestCustomHashToField(t *testing.T) { + assert := test.NewAssert(t) + assignment := &commitmentCircuit{X: 1} + for _, curve := range getCurves() { + curve := curve + assert.Run(func(assert *test.Assert) { + ccs, err := frontend.Compile(curve.ScalarField(), scs.NewBuilder, &commitmentCircuit{}) + assert.NoError(err) + srs, err := test.NewKZGSRS(ccs) + assert.NoError(err) + pk, vk, err := plonk.Setup(ccs, srs) + assert.NoError(err) + witness, err := frontend.NewWitness(assignment, curve.ScalarField()) + assert.NoError(err) + assert.Run(func(assert *test.Assert) { + proof, err := plonk.Prove(ccs, pk, witness, backend.WithProverHashToFieldFunction(constantHash{})) + assert.NoError(err) + pubWitness, err := witness.Public() + assert.NoError(err) + err = plonk.Verify(proof, vk, pubWitness, backend.WithVerifierHashToFieldFunction(constantHash{})) + assert.NoError(err) + }, "prover_verifier") + assert.Run(func(assert *test.Assert) { + proof, err := plonk.Prove(ccs, pk, witness, backend.WithProverHashToFieldFunction(constantHash{})) + assert.NoError(err) + pubWitness, err := witness.Public() + assert.NoError(err) + err = plonk.Verify(proof, vk, pubWitness) + assert.Error(err) + }, "prover_only") + assert.Run(func(assert *test.Assert) { + proof, err := plonk.Prove(ccs, pk, witness) + assert.Error(err) + _ = proof + }, "verifier_only") + }, curve.String()) + } +} + +func TestCustomChallengeHash(t *testing.T) { + assert := test.NewAssert(t) + assignment := &smallCircuit{X: 1} + for _, curve := range getCurves() { + curve := curve + assert.Run(func(assert *test.Assert) { + ccs, err := frontend.Compile(curve.ScalarField(), scs.NewBuilder, &smallCircuit{}) + assert.NoError(err) + srs, err := test.NewKZGSRS(ccs) + assert.NoError(err) + pk, vk, err := plonk.Setup(ccs, srs) + assert.NoError(err) + witness, err := frontend.NewWitness(assignment, curve.ScalarField()) + assert.NoError(err) + assert.Run(func(assert *test.Assert) { + proof, err := plonk.Prove(ccs, pk, witness, backend.WithProverChallengeHashFunction(constantHash{})) + assert.NoError(err) + pubWitness, err := witness.Public() + assert.NoError(err) + err = plonk.Verify(proof, vk, pubWitness, backend.WithVerifierChallengeHashFunction(constantHash{})) + assert.NoError(err) + }, "prover_verifier") + assert.Run(func(assert *test.Assert) { + proof, err := plonk.Prove(ccs, pk, witness, backend.WithProverChallengeHashFunction(constantHash{})) + assert.NoError(err) + pubWitness, err := witness.Public() + assert.NoError(err) + err = plonk.Verify(proof, vk, pubWitness) + assert.Error(err) + }, "prover_only") + assert.Run(func(assert *test.Assert) { + proof, err := plonk.Prove(ccs, pk, witness) + assert.NoError(err) + pubWitness, err := witness.Public() + assert.NoError(err) + err = plonk.Verify(proof, vk, pubWitness, backend.WithVerifierChallengeHashFunction(constantHash{})) + assert.Error(err) + }, "verifier_only") + }, curve.String()) + } +} + +func TestCustomKZGFoldingHash(t *testing.T) { + assert := test.NewAssert(t) + assignment := &smallCircuit{X: 1} + for _, curve := range getCurves() { + curve := curve + assert.Run(func(assert *test.Assert) { + ccs, err := frontend.Compile(curve.ScalarField(), scs.NewBuilder, &smallCircuit{}) + assert.NoError(err) + srs, err := test.NewKZGSRS(ccs) + assert.NoError(err) + pk, vk, err := plonk.Setup(ccs, srs) + assert.NoError(err) + witness, err := frontend.NewWitness(assignment, curve.ScalarField()) + assert.NoError(err) + assert.Run(func(assert *test.Assert) { + proof, err := plonk.Prove(ccs, pk, witness, backend.WithProverKZGFoldingHashFunction(constantHash{})) + assert.NoError(err) + pubWitness, err := witness.Public() + assert.NoError(err) + err = plonk.Verify(proof, vk, pubWitness, backend.WithVerifierKZGFoldingHashFunction(constantHash{})) + assert.NoError(err) + }, "prover_verifier") + assert.Run(func(assert *test.Assert) { + proof, err := plonk.Prove(ccs, pk, witness, backend.WithProverKZGFoldingHashFunction(constantHash{})) + assert.NoError(err) + pubWitness, err := witness.Public() + assert.NoError(err) + err = plonk.Verify(proof, vk, pubWitness) + assert.Error(err) + }, "prover_only") + assert.Run(func(assert *test.Assert) { + proof, err := plonk.Prove(ccs, pk, witness) + assert.NoError(err) + pubWitness, err := witness.Public() + assert.NoError(err) + err = plonk.Verify(proof, vk, pubWitness, backend.WithVerifierKZGFoldingHashFunction(constantHash{})) + assert.Error(err) + }, "verifier_only") + }, curve.String()) + } +} + func BenchmarkSetup(b *testing.B) { for _, curve := range getCurves() { b.Run(curve.String(), func(b *testing.B) { @@ -161,6 +286,37 @@ func referenceCircuit(curve ecc.ID) (constraint.ConstraintSystem, frontend.Circu return ccs, &good, srs } +type commitmentCircuit struct { + X frontend.Variable +} + +func (c *commitmentCircuit) Define(api frontend.API) error { + cmt, err := api.(frontend.Committer).Commit(c.X) + if err != nil { + return fmt.Errorf("commit: %w", err) + } + api.AssertIsEqual(cmt, "0xaabbcc") + return nil +} + +type smallCircuit struct { + X frontend.Variable +} + +func (c *smallCircuit) Define(api frontend.API) error { + res := api.Mul(c.X, c.X) + api.AssertIsEqual(c.X, res) + return nil +} + +type constantHash struct{} + +func (h constantHash) Write(p []byte) (n int, err error) { return len(p), nil } +func (h constantHash) Sum(b []byte) []byte { return []byte{0xaa, 0xbb, 0xcc} } +func (h constantHash) Reset() {} +func (h constantHash) Size() int { return 3 } +func (h constantHash) BlockSize() int { return 32 } + func getCurves() []ecc.ID { if testing.Short() { return []ecc.ID{ecc.BN254} diff --git a/backend/plonkfri/bls12-377/prove.go b/backend/plonkfri/bls12-377/prove.go index 97d6c0301e..c0b800e45f 100644 --- a/backend/plonkfri/bls12-377/prove.go +++ b/backend/plonkfri/bls12-377/prove.go @@ -17,7 +17,6 @@ package plonkfri import ( - "crypto/sha256" "math/big" "math/bits" "runtime" @@ -76,11 +75,8 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts var proof Proof - // pick a hash function that will be used to derive the challenges - hFunc := sha256.New() - // 0 - Fiat Shamir - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(opt.ChallengeHash, "gamma", "beta", "alpha", "zeta") // 1 - solve the system _solution, err := spr.Solve(fullWitness, opt.SolverOpts...) diff --git a/backend/plonkfri/bls12-377/verify.go b/backend/plonkfri/bls12-377/verify.go index b2d6630b07..f4b2653f2f 100644 --- a/backend/plonkfri/bls12-377/verify.go +++ b/backend/plonkfri/bls12-377/verify.go @@ -17,22 +17,25 @@ package plonkfri import ( - "crypto/sha256" "errors" + "fmt" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/fri" - "math/big" - fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" + "math/big" ) var ErrInvalidAlgebraicRelation = errors.New("algebraic relation does not hold") -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } // 0 - derive the challenges with Fiat Shamir - hFunc := sha256.New() - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") dataFiatShamir := make([][fr.Bytes]byte, len(publicWitness)+3) for i := 0; i < len(publicWitness); i++ { diff --git a/backend/plonkfri/bls12-381/prove.go b/backend/plonkfri/bls12-381/prove.go index d121b59a37..aae8121d9f 100644 --- a/backend/plonkfri/bls12-381/prove.go +++ b/backend/plonkfri/bls12-381/prove.go @@ -17,7 +17,6 @@ package plonkfri import ( - "crypto/sha256" "math/big" "math/bits" "runtime" @@ -76,11 +75,8 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts var proof Proof - // pick a hash function that will be used to derive the challenges - hFunc := sha256.New() - // 0 - Fiat Shamir - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(opt.ChallengeHash, "gamma", "beta", "alpha", "zeta") // 1 - solve the system _solution, err := spr.Solve(fullWitness, opt.SolverOpts...) diff --git a/backend/plonkfri/bls12-381/verify.go b/backend/plonkfri/bls12-381/verify.go index 67c4b4923e..49a1f5bc02 100644 --- a/backend/plonkfri/bls12-381/verify.go +++ b/backend/plonkfri/bls12-381/verify.go @@ -17,22 +17,25 @@ package plonkfri import ( - "crypto/sha256" "errors" + "fmt" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/fri" - "math/big" - fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" + "math/big" ) var ErrInvalidAlgebraicRelation = errors.New("algebraic relation does not hold") -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } // 0 - derive the challenges with Fiat Shamir - hFunc := sha256.New() - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") dataFiatShamir := make([][fr.Bytes]byte, len(publicWitness)+3) for i := 0; i < len(publicWitness); i++ { diff --git a/backend/plonkfri/bls24-315/prove.go b/backend/plonkfri/bls24-315/prove.go index cb1f43fcee..594df2cdc0 100644 --- a/backend/plonkfri/bls24-315/prove.go +++ b/backend/plonkfri/bls24-315/prove.go @@ -17,7 +17,6 @@ package plonkfri import ( - "crypto/sha256" "math/big" "math/bits" "runtime" @@ -76,11 +75,8 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts var proof Proof - // pick a hash function that will be used to derive the challenges - hFunc := sha256.New() - // 0 - Fiat Shamir - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(opt.ChallengeHash, "gamma", "beta", "alpha", "zeta") // 1 - solve the system _solution, err := spr.Solve(fullWitness, opt.SolverOpts...) diff --git a/backend/plonkfri/bls24-315/verify.go b/backend/plonkfri/bls24-315/verify.go index 81d6c7f36f..ad04736683 100644 --- a/backend/plonkfri/bls24-315/verify.go +++ b/backend/plonkfri/bls24-315/verify.go @@ -17,22 +17,25 @@ package plonkfri import ( - "crypto/sha256" "errors" + "fmt" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/fri" - "math/big" - fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" + "math/big" ) var ErrInvalidAlgebraicRelation = errors.New("algebraic relation does not hold") -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } // 0 - derive the challenges with Fiat Shamir - hFunc := sha256.New() - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") dataFiatShamir := make([][fr.Bytes]byte, len(publicWitness)+3) for i := 0; i < len(publicWitness); i++ { diff --git a/backend/plonkfri/bls24-317/prove.go b/backend/plonkfri/bls24-317/prove.go index 5fc6cbf713..7653db67ae 100644 --- a/backend/plonkfri/bls24-317/prove.go +++ b/backend/plonkfri/bls24-317/prove.go @@ -17,7 +17,6 @@ package plonkfri import ( - "crypto/sha256" "math/big" "math/bits" "runtime" @@ -76,11 +75,8 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts var proof Proof - // pick a hash function that will be used to derive the challenges - hFunc := sha256.New() - // 0 - Fiat Shamir - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(opt.ChallengeHash, "gamma", "beta", "alpha", "zeta") // 1 - solve the system _solution, err := spr.Solve(fullWitness, opt.SolverOpts...) diff --git a/backend/plonkfri/bls24-317/verify.go b/backend/plonkfri/bls24-317/verify.go index 206b5ae3a1..996302faf5 100644 --- a/backend/plonkfri/bls24-317/verify.go +++ b/backend/plonkfri/bls24-317/verify.go @@ -17,22 +17,25 @@ package plonkfri import ( - "crypto/sha256" "errors" + "fmt" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/fri" - "math/big" - fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" + "math/big" ) var ErrInvalidAlgebraicRelation = errors.New("algebraic relation does not hold") -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } // 0 - derive the challenges with Fiat Shamir - hFunc := sha256.New() - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") dataFiatShamir := make([][fr.Bytes]byte, len(publicWitness)+3) for i := 0; i < len(publicWitness); i++ { diff --git a/backend/plonkfri/bn254/prove.go b/backend/plonkfri/bn254/prove.go index 161ad667f4..4a79841f26 100644 --- a/backend/plonkfri/bn254/prove.go +++ b/backend/plonkfri/bn254/prove.go @@ -17,7 +17,6 @@ package plonkfri import ( - "crypto/sha256" "math/big" "math/bits" "runtime" @@ -76,11 +75,8 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts var proof Proof - // pick a hash function that will be used to derive the challenges - hFunc := sha256.New() - // 0 - Fiat Shamir - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(opt.ChallengeHash, "gamma", "beta", "alpha", "zeta") // 1 - solve the system _solution, err := spr.Solve(fullWitness, opt.SolverOpts...) diff --git a/backend/plonkfri/bn254/verify.go b/backend/plonkfri/bn254/verify.go index 028c117d69..d28b58ce02 100644 --- a/backend/plonkfri/bn254/verify.go +++ b/backend/plonkfri/bn254/verify.go @@ -17,22 +17,25 @@ package plonkfri import ( - "crypto/sha256" "errors" + "fmt" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/fr/fri" - "math/big" - fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" + "math/big" ) var ErrInvalidAlgebraicRelation = errors.New("algebraic relation does not hold") -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } // 0 - derive the challenges with Fiat Shamir - hFunc := sha256.New() - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") dataFiatShamir := make([][fr.Bytes]byte, len(publicWitness)+3) for i := 0; i < len(publicWitness); i++ { diff --git a/backend/plonkfri/bw6-633/prove.go b/backend/plonkfri/bw6-633/prove.go index e71df6e7aa..1202013681 100644 --- a/backend/plonkfri/bw6-633/prove.go +++ b/backend/plonkfri/bw6-633/prove.go @@ -17,7 +17,6 @@ package plonkfri import ( - "crypto/sha256" "math/big" "math/bits" "runtime" @@ -76,11 +75,8 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts var proof Proof - // pick a hash function that will be used to derive the challenges - hFunc := sha256.New() - // 0 - Fiat Shamir - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(opt.ChallengeHash, "gamma", "beta", "alpha", "zeta") // 1 - solve the system _solution, err := spr.Solve(fullWitness, opt.SolverOpts...) diff --git a/backend/plonkfri/bw6-633/verify.go b/backend/plonkfri/bw6-633/verify.go index 491b1ee78a..9b8cd07344 100644 --- a/backend/plonkfri/bw6-633/verify.go +++ b/backend/plonkfri/bw6-633/verify.go @@ -17,22 +17,25 @@ package plonkfri import ( - "crypto/sha256" "errors" + "fmt" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/fri" - "math/big" - fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" + "math/big" ) var ErrInvalidAlgebraicRelation = errors.New("algebraic relation does not hold") -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } // 0 - derive the challenges with Fiat Shamir - hFunc := sha256.New() - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") dataFiatShamir := make([][fr.Bytes]byte, len(publicWitness)+3) for i := 0; i < len(publicWitness); i++ { diff --git a/backend/plonkfri/bw6-761/prove.go b/backend/plonkfri/bw6-761/prove.go index 9092580485..74ccbbc4c5 100644 --- a/backend/plonkfri/bw6-761/prove.go +++ b/backend/plonkfri/bw6-761/prove.go @@ -17,7 +17,6 @@ package plonkfri import ( - "crypto/sha256" "math/big" "math/bits" "runtime" @@ -76,11 +75,8 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts var proof Proof - // pick a hash function that will be used to derive the challenges - hFunc := sha256.New() - // 0 - Fiat Shamir - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(opt.ChallengeHash, "gamma", "beta", "alpha", "zeta") // 1 - solve the system _solution, err := spr.Solve(fullWitness, opt.SolverOpts...) diff --git a/backend/plonkfri/bw6-761/verify.go b/backend/plonkfri/bw6-761/verify.go index 3e05c16966..458c30b0d3 100644 --- a/backend/plonkfri/bw6-761/verify.go +++ b/backend/plonkfri/bw6-761/verify.go @@ -17,22 +17,25 @@ package plonkfri import ( - "crypto/sha256" "errors" + "fmt" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/fri" - "math/big" - fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" + "math/big" ) var ErrInvalidAlgebraicRelation = errors.New("algebraic relation does not hold") -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } // 0 - derive the challenges with Fiat Shamir - hFunc := sha256.New() - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") dataFiatShamir := make([][fr.Bytes]byte, len(publicWitness)+3) for i := 0; i < len(publicWitness); i++ { diff --git a/backend/plonkfri/plonkfri.go b/backend/plonkfri/plonkfri.go index 5fcb3760eb..6d8df14ecc 100644 --- a/backend/plonkfri/plonkfri.go +++ b/backend/plonkfri/plonkfri.go @@ -133,7 +133,7 @@ func Prove(ccs constraint.ConstraintSystem, pk ProvingKey, fullWitness witness.W } // Verify verifies a PLONK proof, from the proof, preprocessed public data, and public witness. -func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness) error { +func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness, opts ...backend.VerifierOption) error { switch _proof := proof.(type) { @@ -142,48 +142,48 @@ func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness) error { if !ok { return witness.ErrInvalidWitness } - return plonk_bn254.Verify(_proof, vk.(*plonk_bn254.VerifyingKey), w) + return plonk_bn254.Verify(_proof, vk.(*plonk_bn254.VerifyingKey), w, opts...) case *plonk_bls12381.Proof: w, ok := publicWitness.Vector().(fr_bls12381.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bls12381.Verify(_proof, vk.(*plonk_bls12381.VerifyingKey), w) + return plonk_bls12381.Verify(_proof, vk.(*plonk_bls12381.VerifyingKey), w, opts...) case *plonk_bls12377.Proof: w, ok := publicWitness.Vector().(fr_bls12377.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bls12377.Verify(_proof, vk.(*plonk_bls12377.VerifyingKey), w) + return plonk_bls12377.Verify(_proof, vk.(*plonk_bls12377.VerifyingKey), w, opts...) case *plonk_bw6761.Proof: w, ok := publicWitness.Vector().(fr_bw6761.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bw6761.Verify(_proof, vk.(*plonk_bw6761.VerifyingKey), w) + return plonk_bw6761.Verify(_proof, vk.(*plonk_bw6761.VerifyingKey), w, opts...) case *plonk_bw6633.Proof: w, ok := publicWitness.Vector().(fr_bw6633.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bw6633.Verify(_proof, vk.(*plonk_bw6633.VerifyingKey), w) + return plonk_bw6633.Verify(_proof, vk.(*plonk_bw6633.VerifyingKey), w, opts...) case *plonk_bls24315.Proof: w, ok := publicWitness.Vector().(fr_bls24315.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bls24315.Verify(_proof, vk.(*plonk_bls24315.VerifyingKey), w) + return plonk_bls24315.Verify(_proof, vk.(*plonk_bls24315.VerifyingKey), w, opts...) case *plonk_bls24317.Proof: w, ok := publicWitness.Vector().(fr_bls24317.Vector) if !ok { return witness.ErrInvalidWitness } - return plonk_bls24317.Verify(_proof, vk.(*plonk_bls24317.VerifyingKey), w) + return plonk_bls24317.Verify(_proof, vk.(*plonk_bls24317.VerifyingKey), w, opts...) default: panic("unrecognized proof type") diff --git a/go.mod b/go.mod index 627710bf3f..be87da5f88 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ require ( github.com/bits-and-blooms/bitset v1.8.0 github.com/blang/semver/v4 v4.0.0 github.com/consensys/bavard v0.1.13 - github.com/consensys/gnark-crypto v0.12.2-0.20231012161402-206544105834 + github.com/consensys/gnark-crypto v0.12.2-0.20231023220848-538dff926c15 github.com/fxamacker/cbor/v2 v2.5.0 github.com/google/go-cmp v0.5.9 github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b diff --git a/go.sum b/go.sum index 855bcf5c92..9b26c86b07 100644 --- a/go.sum +++ b/go.sum @@ -4,8 +4,8 @@ github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ= github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI= -github.com/consensys/gnark-crypto v0.12.2-0.20231012161402-206544105834 h1:o+Q1/PSZfNkoUAl/Gf5N/u+H6LkOzR3gF0mS8nRSWYM= -github.com/consensys/gnark-crypto v0.12.2-0.20231012161402-206544105834/go.mod h1:v2Gy7L/4ZRosZ7Ivs+9SfUDr0f5UlG+EM5t7MPHiLuY= +github.com/consensys/gnark-crypto v0.12.2-0.20231023220848-538dff926c15 h1:fu5ienFKWWqrfMPbWnhw4zfIFZW3pzVIbv3KtASymbU= +github.com/consensys/gnark-crypto v0.12.2-0.20231023220848-538dff926c15/go.mod h1:v2Gy7L/4ZRosZ7Ivs+9SfUDr0f5UlG+EM5t7MPHiLuY= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= diff --git a/internal/generator/backend/main.go b/internal/generator/backend/main.go index 60ff988fba..ba32fe9f95 100644 --- a/internal/generator/backend/main.go +++ b/internal/generator/backend/main.go @@ -167,7 +167,6 @@ func main() { {File: filepath.Join(groth16Dir, "verify.go"), Templates: []string{"groth16/groth16.verify.go.tmpl", importCurve}}, {File: filepath.Join(groth16Dir, "prove.go"), Templates: []string{"groth16/groth16.prove.go.tmpl", importCurve}, BuildTag: buildTag}, {File: filepath.Join(groth16Dir, "setup.go"), Templates: []string{"groth16/groth16.setup.go.tmpl", importCurve}, BuildTag: buildTag}, - {File: filepath.Join(groth16Dir, "commitment.go"), Templates: []string{"groth16/groth16.commitment.go.tmpl", importCurve}}, {File: filepath.Join(groth16Dir, "marshal.go"), Templates: []string{"groth16/groth16.marshal.go.tmpl", importCurve}, BuildTag: buildTag}, {File: filepath.Join(groth16Dir, "marshal_test.go"), Templates: []string{"groth16/tests/groth16.marshal.go.tmpl", importCurve}}, } diff --git a/internal/generator/backend/template/imports.go.tmpl b/internal/generator/backend/template/imports.go.tmpl index 2c640e8b34..5ed4441632 100644 --- a/internal/generator/backend/template/imports.go.tmpl +++ b/internal/generator/backend/template/imports.go.tmpl @@ -60,4 +60,8 @@ {{- define "import_gkr"}} "github.com/consensys/gnark-crypto/ecc/{{ toLower .Curve }}/fr/gkr" +{{- end}} + +{{- define "import_hash_to_field" }} + "github.com/consensys/gnark-crypto/ecc/{{ toLower .Curve}}/fr/hash_to_field" {{- end}} \ No newline at end of file diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.commitment.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.commitment.go.tmpl deleted file mode 100644 index aec745a7d6..0000000000 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.commitment.go.tmpl +++ /dev/null @@ -1,11 +0,0 @@ -import ( - {{- template "import_fr" . }} - {{- template "import_curve" . }} - "github.com/consensys/gnark/constraint" - "math/big" -) - -func solveCommitmentWire(commitment *curve.G1Affine, publicCommitted []*big.Int) (fr.Element, error) { - res, err := fr.Hash(constraint.SerializeCommitment(commitment.Marshal(), publicCommitted, (fr.Bits-1)/8+1), []byte(constraint.CommitmentDst), 1) - return res[0], err -} \ No newline at end of file diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl index 0b7693bfa6..fe4b141afd 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl @@ -1,12 +1,15 @@ import ( + "fmt" + "runtime" + "math/big" + "time" + {{- template "import_fr" . }} {{- template "import_curve" . }} {{- template "import_backend_cs" . }} {{- template "import_fft" . }} + {{- template "import_hash_to_field" . }} {{- template "import_pedersen" .}} - "runtime" - "math/big" - "time" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark/internal/utils" @@ -42,7 +45,10 @@ func (proof *Proof) CurveID() ecc.ID { func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("new prover config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() @@ -69,8 +75,15 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return err } + opt.HashToFieldFn.Write(constraint.SerializeCommitment(proof.Commitments[i].Marshal(), hashed, (fr.Bits-1)/8+1)) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() + } var res fr.Element - res, err = solveCommitmentWire(&proof.Commitments[i], hashed) + res.SetBytes(hashBts[:nbBuf]) res.BigInt(out[0]) return err } diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl index 04408d09f3..80891eb4a3 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl @@ -1,17 +1,19 @@ import ( - "github.com/consensys/gnark-crypto/ecc" - {{- template "import_curve" . }} - {{- template "import_fr" . }} - "fmt" "errors" - "time" + "fmt" "io" - {{- if eq .Curve "BN254"}} "text/template" {{- end}} + "time" + + "github.com/consensys/gnark-crypto/ecc" + {{- template "import_curve" . }} + {{- template "import_fr" . }} {{- template "import_pedersen" .}} + {{- template "import_hash_to_field" . }} "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/logger" ) @@ -22,7 +24,14 @@ var ( ) // Verify verifies a proof with given VerifyingKey and publicWitness -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + opt, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("new verifier config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) + } nbPublicVars := len(vk.G1.K) - len(vk.PublicAndCommitmentCommitted) @@ -61,12 +70,17 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { copy(commitmentPrehashSerialized[offset:], publicWitness[vk.PublicAndCommitmentCommitted[i][j]-1].Marshal()) offset += fr.Bytes } - if res, err := fr.Hash(commitmentPrehashSerialized[:offset], []byte(constraint.CommitmentDst), 1); err != nil { - return err - } else { - publicWitness = append(publicWitness, res[0]) - copy(commitmentsSerialized[i*fr.Bytes:], res[0].Marshal()) + opt.HashToFieldFn.Write(commitmentPrehashSerialized[:offset]) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() } + var res fr.Element + res.SetBytes(hashBts[:nbBuf]) + publicWitness = append(publicWitness, res) + copy(commitmentsSerialized[i*fr.Bytes:], res.Marshal()) } if folded, err := pedersen.FoldCommitments(proof.Commitments, commitmentsSerialized); err != nil { diff --git a/internal/generator/backend/template/zkpschemes/plonk/plonk.prove.go.tmpl b/internal/generator/backend/template/zkpschemes/plonk/plonk.prove.go.tmpl index 03fddde7a9..7ac814c65d 100644 --- a/internal/generator/backend/template/zkpschemes/plonk/plonk.prove.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/plonk/plonk.prove.go.tmpl @@ -1,31 +1,31 @@ import ( - "crypto/sha256" + "context" + "errors" + "fmt" + "hash" "math/big" "math/bits" "runtime" - "time" - "errors" "sync" - "hash" - "context" + "time" + "golang.org/x/sync/errgroup" - "github.com/consensys/gnark/backend/witness" - "github.com/consensys/gnark-crypto/ecc" - {{ template "import_fr" . }} {{ template "import_curve" . }} - {{ template "import_kzg" . }} + {{ template "import_fr" . }} {{ template "import_fft" . }} - {{ template "import_backend_cs" . }} + {{- template "import_hash_to_field" . }} "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/fr/iop" - + {{ template "import_kzg" . }} + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" + "github.com/consensys/gnark/backend/witness" + {{ template "import_backend_cs" . }} + "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/internal/utils" - "github.com/consensys/gnark/backend" "github.com/consensys/gnark/logger" - "github.com/consensys/gnark-crypto/fiat-shamir" - "github.com/consensys/gnark/constraint" ) // TODO in gnark-crypto: @@ -99,14 +99,17 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts // parse the options opt, err := backend.NewProverConfig(opts...) if err != nil { - return nil, err + return nil, fmt.Errorf("get prover options: %w", err) } start := time.Now() // init instance g, ctx := errgroup.WithContext(context.Background()) - instance := newInstance(ctx, spr, pk, fullWitness, &opt) + instance, err := newInstance(ctx, spr, pk, fullWitness, &opt) + if err != nil { + return nil, fmt.Errorf("new instance: %w", err) + } // solve constraints g.Go(instance.solveConstraints) @@ -158,8 +161,9 @@ type instance struct { spr *cs.SparseR1CS opt *backend.ProverConfig - fs fiatshamir.Transcript - hFunc hash.Hash + fs fiatshamir.Transcript + kzgFoldingHash hash.Hash // for KZG folding + htfFunc hash.Hash // hash to field function // polynomials x []*iop.Polynomial // x stores tracks the polynomial we need @@ -200,8 +204,10 @@ type instance struct { chGammaBeta chan struct{} } -func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) instance { - hFunc := sha256.New() +func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts *backend.ProverConfig) (*instance, error) { + if opts.HashToFieldFn == nil { + opts.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } s := instance{ ctx: ctx, pk: pk, @@ -210,8 +216,9 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi opt: opts, fullWitness: fullWitness, bp: make([]*iop.Polynomial, nb_blinding_polynomials), - fs: fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta"), - hFunc: hFunc, + fs: fiatshamir.NewTranscript(opts.ChallengeHash, "gamma", "beta", "alpha", "zeta"), + kzgFoldingHash: opts.KZGFoldingHash, + htfFunc: opts.HashToFieldFn, chLRO: make(chan struct{}, 1), chQk: make(chan struct{}, 1), chbp: make(chan struct{}, 1), @@ -228,7 +235,7 @@ func newInstance(ctx context.Context, spr *cs.SparseR1CS, pk *ProvingKey, fullWi s.setupGKRHints() s.x = make([]*iop.Polynomial, id_Qci+2*len(s.commitmentInfo)) - return s + return &s, nil } func (s *instance) initComputeNumerator() error { @@ -287,6 +294,8 @@ func (s *instance) initBSB22Commitments() { // Computing and verifying Bsb22 multi-commits explained in https://hackmd.io/x8KsadW3RRyX7YTCFJIkHg func (s *instance) bsb22Hint(commDepth int) solver.Hint { return func(_ *big.Int, ins, outs []*big.Int) error { + var err error + res := &s.commitmentVal[commDepth] commitmentInfo := s.spr.CommitmentInfo.(constraint.PlonkCommitments)[commDepth] @@ -295,10 +304,6 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { for i := range ins { committedValues[offset+commitmentInfo.Committed[i]].SetBigInt(ins[i]) } - var ( - err error - hashRes []fr.Element - ) if _, err = committedValues[offset+commitmentInfo.CommitmentIndex].SetRandom(); err != nil { // Commitment injection constraint has qcp = 0. Safe to use for blinding. return err } @@ -311,10 +316,14 @@ func (s *instance) bsb22Hint(commDepth int) solver.Hint { } s.cCommitments[commDepth].ToCanonical(&s.pk.Domain[0]).ToRegular() - if hashRes, err = fr.Hash(s.proof.Bsb22Commitments[commDepth].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err + s.htfFunc.Write(s.proof.Bsb22Commitments[commDepth].Marshal()) + hashBts := s.htfFunc.Sum(nil) + s.htfFunc.Reset() + nbBuf := fr.Bytes + if s.htfFunc.Size() < fr.Bytes { + nbBuf = s.htfFunc.Size() } - res.Set(&hashRes[0]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses + res.SetBytes(hashBts[:nbBuf]) // TODO @Tabaie use CommitmentIndex for this; create a new variable CommitmentConstraintIndex for other uses res.BigInt(outs[0]) return nil @@ -794,8 +803,9 @@ func (s *instance) batchOpening() error { polysToOpen, digestsToOpen, s.zeta, - s.hFunc, + s.kzgFoldingHash, s.pk.Kzg, + s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) return err @@ -1350,4 +1360,4 @@ func computeLinearizedPolynomial(lZeta, rZeta, oZeta, alpha, beta, gamma, zeta, return blindedZCanonical } -var errContextDone = errors.New("context done") +var errContextDone = errors.New("context done") \ No newline at end of file diff --git a/internal/generator/backend/template/zkpschemes/plonk/plonk.verify.go.tmpl b/internal/generator/backend/template/zkpschemes/plonk/plonk.verify.go.tmpl index 0eb70f5506..04fa61b042 100644 --- a/internal/generator/backend/template/zkpschemes/plonk/plonk.verify.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/plonk/plonk.verify.go.tmpl @@ -1,41 +1,45 @@ import ( - "crypto/sha256" "errors" + "fmt" + "io" "math/big" + {{ if eq .Curve "BN254" -}} + "text/template" + {{- end }} "time" - "io" + + "github.com/consensys/gnark-crypto/ecc" + {{ template "import_curve" . }} {{ template "import_fr" . }} - {{if eq .Curve "BN254"}} + {{ if eq .Curve "BN254" -}} "github.com/consensys/gnark-crypto/ecc/bn254/fp" - "fmt" - {{end}} + {{- end }} + {{- template "import_hash_to_field" . }} {{ template "import_kzg" . }} - {{ template "import_curve" . }} - {{if eq .Curve "BN254"}} - "text/template" - {{end}} + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" "github.com/consensys/gnark/logger" - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/fiat-shamir" ) var ( errWrongClaimedQuotient = errors.New("claimed quotient is not as expected") ) -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { log := logger.Logger().With().Str("curve", "{{ toLower .Curve }}").Str("backend", "plonk").Logger() start := time.Now() + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } if len(proof.Bsb22Commitments) != len(vk.Qcp) { return errors.New("BSB22 Commitment number mismatch") } - // pick a hash function to derive the challenge (the same as in the prover) - hFunc := sha256.New() // transcript to derive the challenge - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") // The first challenge is derived using the public data: the commitments to the permutation, // the coefficients of the circuit, and the public inputs. @@ -104,11 +108,20 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { } } + if cfg.HashToFieldFn == nil { + cfg.HashToFieldFn = hash_to_field.New([]byte("BSB22-Plonk")) + } + var hashBts []byte + var hashedCmt fr.Element + nbBuf := fr.Bytes + if cfg.HashToFieldFn.Size() < fr.Bytes { + nbBuf = cfg.HashToFieldFn.Size() + } for i := range vk.CommitmentConstraintIndexes { - var hashRes []fr.Element - if hashRes, err = fr.Hash(proof.Bsb22Commitments[i].Marshal(), []byte("BSB22-Plonk"), 1); err != nil { - return err - } + cfg.HashToFieldFn.Write(proof.Bsb22Commitments[i].Marshal()) + hashBts = cfg.HashToFieldFn.Sum(hashBts[0:]) + cfg.HashToFieldFn.Reset() + hashedCmt.SetBytes(hashBts[:nbBuf]) // Computing L_{CommitmentIndex} @@ -121,7 +134,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { Div(&lagrange, &den). // wⁱ(ζ-1)/(ζ-wⁱ) Mul(&lagrange, &lagrangeOne) // wⁱ/n (ζⁿ-1)/(ζ-wⁱ) - xiLi.Mul(&lagrange, &hashRes[0]) + xiLi.Mul(&lagrange, &hashedCmt) pi.Add(&pi, &xiLi) } } @@ -235,7 +248,8 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { digestsToFold, &proof.BatchedProof, zeta, - hFunc, + cfg.KZGFoldingHash, + zu.Marshal(), ) if err != nil { return err diff --git a/internal/generator/backend/template/zkpschemes/plonkfri/plonk.prove.go.tmpl b/internal/generator/backend/template/zkpschemes/plonkfri/plonk.prove.go.tmpl index 5f3ee2ccf2..d8f50bee1d 100644 --- a/internal/generator/backend/template/zkpschemes/plonkfri/plonk.prove.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/plonkfri/plonk.prove.go.tmpl @@ -1,5 +1,4 @@ import ( - "crypto/sha256" "math/big" "math/bits" "runtime" @@ -55,11 +54,8 @@ func Prove(spr *cs.SparseR1CS, pk *ProvingKey, fullWitness witness.Witness, opts var proof Proof - // pick a hash function that will be used to derive the challenges - hFunc := sha256.New() - // 0 - Fiat Shamir - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(opt.ChallengeHash, "gamma", "beta", "alpha", "zeta") // 1 - solve the system _solution, err := spr.Solve(fullWitness, opt.SolverOpts...) diff --git a/internal/generator/backend/template/zkpschemes/plonkfri/plonk.verify.go.tmpl b/internal/generator/backend/template/zkpschemes/plonkfri/plonk.verify.go.tmpl index 26cb1eff25..e033680eac 100644 --- a/internal/generator/backend/template/zkpschemes/plonkfri/plonk.verify.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/plonkfri/plonk.verify.go.tmpl @@ -1,21 +1,24 @@ import ( - "crypto/sha256" + "fmt" "errors" "math/big" {{- template "import_fri" . }} {{- template "import_fr" . }} - + "github.com/consensys/gnark/backend" fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" ) var ErrInvalidAlgebraicRelation = errors.New("algebraic relation does not hold") -func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector) error { +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } // 0 - derive the challenges with Fiat Shamir - hFunc := sha256.New() - fs := fiatshamir.NewTranscript(hFunc, "gamma", "beta", "alpha", "zeta") + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") dataFiatShamir := make([][fr.Bytes]byte, len(publicWitness)+3) for i := 0; i < len(publicWitness); i++ { diff --git a/internal/stats/latest.stats b/internal/stats/latest.stats index 3c6bc5114b72dd0453933c767587f7d5428cf898..e722b93cbee8debf3e3c1fe6122b196a8a4e9f64 100644 GIT binary patch delta 157 zcmZn^YZRN1#i%#=ETj131;n;bScN99 zXSz9w9 LJO{hb#H>~TPsA>v delta 147 zcmZn^YZRN1HMxeXb#ofyA;!s7%-1Jo{n^~Yew1;t7MHT_y&A@U_i7jz7&w5Kfr0Ta zi?0Wmz$P@=o%8WzBi5G5fh-RP$sj6)@n2L5NH^FF#=nd+)xZP}ApkN4 BC}{uy diff --git a/std/algebra/defaults.go b/std/algebra/defaults.go new file mode 100644 index 0000000000..3c78897959 --- /dev/null +++ b/std/algebra/defaults.go @@ -0,0 +1,87 @@ +package algebra + +import ( + "fmt" + + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/std/algebra/emulated/sw_bls12381" + "github.com/consensys/gnark/std/algebra/emulated/sw_bn254" + "github.com/consensys/gnark/std/algebra/emulated/sw_bw6761" + "github.com/consensys/gnark/std/algebra/emulated/sw_emulated" + "github.com/consensys/gnark/std/algebra/native/sw_bls12377" + "github.com/consensys/gnark/std/algebra/native/sw_bls24315" + "github.com/consensys/gnark/std/math/emulated/emparams" +) + +// GetCurve returns the [Curve] implementation corresponding to the scalar and +// G1 type parameters. The method allows to have a fully generic implementation +// without taking into consideration the initialization differences of different +// curves. +func GetCurve[S ScalarT, G1El G1ElementT](api frontend.API) (Curve[S, G1El], error) { + var ret Curve[S, G1El] + switch s := any(&ret).(type) { + case *Curve[sw_bn254.Scalar, sw_bn254.G1Affine]: + c, err := sw_emulated.New[emparams.BN254Fp, emparams.BN254Fr](api, sw_emulated.GetBN254Params()) + if err != nil { + return ret, fmt.Errorf("new curve: %w", err) + } + *s = c + case *Curve[sw_bw6761.Scalar, sw_bw6761.G1Affine]: + c, err := sw_emulated.New[emparams.BW6761Fp, emparams.BW6761Fr](api, sw_emulated.GetBW6761Params()) + if err != nil { + return ret, fmt.Errorf("new curve: %w", err) + } + *s = c + case *Curve[sw_bls12381.Scalar, sw_bls12381.G1Affine]: + c, err := sw_emulated.New[emparams.BLS12381Fp, emparams.BLS12381Fr](api, sw_emulated.GetBLS12381Params()) + if err != nil { + return ret, fmt.Errorf("new curve: %w", err) + } + *s = c + case *Curve[sw_bls12377.Scalar, sw_bls12377.G1Affine]: + c := sw_bls12377.NewCurve(api) + *s = c + case *Curve[sw_bls24315.Scalar, sw_bls24315.G1Affine]: + c := sw_bls24315.NewCurve(api) + *s = c + default: + return ret, fmt.Errorf("unknown type parametrisation") + } + return ret, nil +} + +// GetPairing returns the [Pairing] implementation corresponding to the groups +// type parameters. The method allows to have a fully generic implementation +// without taking into consideration the initialization differences. +func GetPairing[G1El G1ElementT, G2El G2ElementT, GtEl GtElementT](api frontend.API) (Pairing[G1El, G2El, GtEl], error) { + var ret Pairing[G1El, G2El, GtEl] + switch s := any(&ret).(type) { + case *Pairing[sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl]: + p, err := sw_bn254.NewPairing(api) + if err != nil { + return ret, fmt.Errorf("new pairing: %w", err) + } + *s = p + case *Pairing[sw_bw6761.G1Affine, sw_bw6761.G2Affine, sw_bw6761.GTEl]: + p, err := sw_bw6761.NewPairing(api) + if err != nil { + return ret, fmt.Errorf("new pairing: %w", err) + } + *s = p + case *Pairing[sw_bls12381.G1Affine, sw_bls12381.G2Affine, sw_bls12381.GTEl]: + p, err := sw_bls12381.NewPairing(api) + if err != nil { + return ret, fmt.Errorf("new pairing: %w", err) + } + *s = p + case *Pairing[sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT]: + p := sw_bls12377.NewPairing(api) + *s = p + case *Pairing[sw_bls24315.G1Affine, sw_bls24315.G2Affine, sw_bls24315.GT]: + p := sw_bls24315.NewPairing(api) + *s = p + default: + return ret, fmt.Errorf("unknown type parametrisation") + } + return ret, nil +} diff --git a/std/algebra/doc.go b/std/algebra/doc.go index 18e70aeac5..7920ca408d 100644 --- a/std/algebra/doc.go +++ b/std/algebra/doc.go @@ -10,5 +10,8 @@ // - using nonnative field via field emulation (`emulated/`). This allows to // use any curve over any (SNARK) field (e.g. secp256k1 curve arithmetic over // BN254 SNARK field or BN254 pairing over BN254 SNARK field). The drawback -// of this approach is the extreme cost of the operations. +// of this approach is the additional cost (~15x) of the operations. +// +// This package also defines the generic interfaces [Curve] and [Pairing] for +// downstream curve-agnostic usage. package algebra diff --git a/std/algebra/emulated/fields_bls12381/e12_pairing.go b/std/algebra/emulated/fields_bls12381/e12_pairing.go index c096b0d2fd..1810557e14 100644 --- a/std/algebra/emulated/fields_bls12381/e12_pairing.go +++ b/std/algebra/emulated/fields_bls12381/e12_pairing.go @@ -80,8 +80,7 @@ func (e Ext12) ExptTorus(x *E6) *E6 { // } func (e *Ext12) MulBy014(z *E12, c0, c1 *E2) *E12 { - a := z.C0 - a = *e.MulBy01(&a, c0, c1) + a := e.MulBy01(&z.C0, c0, c1) var b E6 // Mul by E6{0, 1, 0} @@ -94,10 +93,10 @@ func (e *Ext12) MulBy014(z *E12, c0, c1 *E2) *E12 { zC1 := e.Ext6.Add(&z.C1, &z.C0) zC1 = e.Ext6.MulBy01(zC1, c0, d) - zC1 = e.Ext6.Sub(zC1, &a) + zC1 = e.Ext6.Sub(zC1, a) zC1 = e.Ext6.Sub(zC1, &b) zC0 := e.Ext6.MulByNonResidue(&b) - zC0 = e.Ext6.Add(zC0, &a) + zC0 = e.Ext6.Add(zC0, a) return &E12{ C0: *zC0, @@ -118,7 +117,7 @@ func (e *Ext12) MulBy014(z *E12, c0, c1 *E2) *E12 { // C0: E6{B0: d0, B1: d1, B2: 0}, // C1: E6{B0: 0, B1: 1, B2: 0}, // } -func (e Ext12) Mul014By014(d0, d1, c0, c1 *E2) *[5]E2 { +func (e Ext12) Mul014By014(d0, d1, c0, c1 *E2) [5]*E2 { one := e.Ext2.One() x0 := e.Ext2.Mul(c0, d0) x1 := e.Ext2.Mul(c1, d1) @@ -141,7 +140,7 @@ func (e Ext12) Mul014By014(d0, d1, c0, c1 *E2) *[5]E2 { zC0B0 := e.Ext2.NonResidue() zC0B0 = e.Ext2.Add(zC0B0, x0) - return &[5]E2{*zC0B0, *x01, *x1, *x04, *x14} + return [5]*E2{zC0B0, x01, x1, x04, x14} } // MulBy01245 multiplies z by an E12 sparse element of the form @@ -150,14 +149,14 @@ func (e Ext12) Mul014By014(d0, d1, c0, c1 *E2) *[5]E2 { // C0: E6{B0: c0, B1: c1, B2: c2}, // C1: E6{B0: 0, B1: c4, B2: c5}, // } -func (e *Ext12) MulBy01245(z *E12, x *[5]E2) *E12 { - c0 := &E6{B0: x[0], B1: x[1], B2: x[2]} - c1 := &E6{B0: *e.Ext2.Zero(), B1: x[3], B2: x[4]} +func (e *Ext12) MulBy01245(z *E12, x [5]*E2) *E12 { + c0 := &E6{B0: *x[0], B1: *x[1], B2: *x[2]} + c1 := &E6{B0: *e.Ext2.Zero(), B1: *x[3], B2: *x[4]} a := e.Ext6.Add(&z.C0, &z.C1) b := e.Ext6.Add(c0, c1) a = e.Ext6.Mul(a, b) b = e.Ext6.Mul(&z.C0, c0) - c := e.Ext6.MulBy12(&z.C1, &x[3], &x[4]) + c := e.Ext6.MulBy12(&z.C1, x[3], x[4]) z1 := e.Ext6.Sub(a, b) z1 = e.Ext6.Sub(z1, c) z0 := e.Ext6.MulByNonResidue(c) diff --git a/std/algebra/emulated/fields_bn254/e12_pairing.go b/std/algebra/emulated/fields_bn254/e12_pairing.go index 678b438eb8..257f7d640f 100644 --- a/std/algebra/emulated/fields_bn254/e12_pairing.go +++ b/std/algebra/emulated/fields_bn254/e12_pairing.go @@ -82,7 +82,7 @@ func (e *Ext12) Square034(x *E12) *E12 { B2: *e.Ext2.Zero(), } - c3 := E6{ + c3 := &E6{ B0: x.C0.B0, B1: *e.Ext2.Neg(&x.C1.B0), B2: *e.Ext2.Neg(&x.C1.B1), @@ -93,8 +93,8 @@ func (e *Ext12) Square034(x *E12) *E12 { B1: x.C1.B1, B2: *e.Ext2.Zero(), } - c3 = *e.MulBy01(&c3, &c0.B0, &c0.B1) - c3 = *e.Ext6.Add(&c3, &c2) + c3 = e.MulBy01(c3, &c0.B0, &c0.B1) + c3 = e.Ext6.Add(c3, &c2) var z E12 z.C1.B0 = *e.Ext2.Add(&c2.B0, &c2.B0) @@ -116,16 +116,15 @@ func (e *Ext12) Square034(x *E12) *E12 { func (e *Ext12) MulBy034(z *E12, c3, c4 *E2) *E12 { a := z.C0 - b := z.C1 - b = *e.MulBy01(&b, c3, c4) + b := e.MulBy01(&z.C1, c3, c4) c3 = e.Ext2.Add(e.Ext2.One(), c3) d := e.Ext6.Add(&z.C0, &z.C1) d = e.MulBy01(d, c3, c4) - zC1 := e.Ext6.Add(&a, &b) + zC1 := e.Ext6.Add(&a, b) zC1 = e.Ext6.Neg(zC1) zC1 = e.Ext6.Add(zC1, d) - zC0 := e.Ext6.MulByNonResidue(&b) + zC0 := e.Ext6.MulByNonResidue(b) zC0 = e.Ext6.Add(zC0, &a) return &E12{ @@ -147,7 +146,7 @@ func (e *Ext12) MulBy034(z *E12, c3, c4 *E2) *E12 { // C0: E6{B0: 1, B1: 0, B2: 0}, // C1: E6{B0: d3, B1: d4, B2: 0}, // } -func (e *Ext12) Mul034By034(d3, d4, c3, c4 *E2) *[5]E2 { +func (e *Ext12) Mul034By034(d3, d4, c3, c4 *E2) [5]*E2 { x3 := e.Ext2.Mul(c3, d3) x4 := e.Ext2.Mul(c4, d4) x04 := e.Ext2.Add(c4, d4) @@ -165,7 +164,7 @@ func (e *Ext12) Mul034By034(d3, d4, c3, c4 *E2) *[5]E2 { zC1B0 := x03 zC1B1 := x04 - return &[5]E2{*zC0B0, *zC0B1, *zC0B2, *zC1B0, *zC1B1} + return [5]*E2{zC0B0, zC0B1, zC0B2, zC1B0, zC1B1} } // MulBy01234 multiplies z by an E12 sparse element of the form @@ -174,14 +173,14 @@ func (e *Ext12) Mul034By034(d3, d4, c3, c4 *E2) *[5]E2 { // C0: E6{B0: c0, B1: c1, B2: c2}, // C1: E6{B0: c3, B1: c4, B2: 0}, // } -func (e *Ext12) MulBy01234(z *E12, x *[5]E2) *E12 { - c0 := &E6{B0: x[0], B1: x[1], B2: x[2]} - c1 := &E6{B0: x[3], B1: x[4], B2: *e.Ext2.Zero()} +func (e *Ext12) MulBy01234(z *E12, x [5]*E2) *E12 { + c0 := &E6{B0: *x[0], B1: *x[1], B2: *x[2]} + c1 := &E6{B0: *x[3], B1: *x[4], B2: *e.Ext2.Zero()} a := e.Ext6.Add(&z.C0, &z.C1) b := e.Ext6.Add(c0, c1) a = e.Ext6.Mul(a, b) b = e.Ext6.Mul(&z.C0, c0) - c := e.Ext6.MulBy01(&z.C1, &x[3], &x[4]) + c := e.Ext6.MulBy01(&z.C1, x[3], x[4]) z1 := e.Ext6.Sub(a, b) z1 = e.Ext6.Sub(z1, c) z0 := e.Ext6.MulByNonResidue(c) @@ -205,13 +204,13 @@ func (e *Ext12) MulBy01234(z *E12, x *[5]E2) *E12 { // C0: E6{B0: 1, B1: 0, B2: 0}, // C1: E6{B0: z3, B1: z4, B2: 0}, // } -func (e *Ext12) Mul01234By034(x *[5]E2, z3, z4 *E2) *E12 { - c0 := &E6{B0: x[0], B1: x[1], B2: x[2]} - c1 := &E6{B0: x[3], B1: x[4], B2: *e.Ext2.Zero()} +func (e *Ext12) Mul01234By034(x [5]*E2, z3, z4 *E2) *E12 { + c0 := &E6{B0: *x[0], B1: *x[1], B2: *x[2]} + c1 := &E6{B0: *x[3], B1: *x[4], B2: *e.Ext2.Zero()} a := e.Ext6.Add(e.Ext6.One(), &E6{B0: *z3, B1: *z4, B2: *e.Ext2.Zero()}) b := e.Ext6.Add(c0, c1) a = e.Ext6.Mul(a, b) - c := e.Ext6.Mul01By01(z3, z4, &x[3], &x[4]) + c := e.Ext6.Mul01By01(z3, z4, x[3], x[4]) z1 := e.Ext6.Sub(a, c0) z1 = e.Ext6.Sub(z1, c) z0 := e.Ext6.MulByNonResidue(c) @@ -263,7 +262,11 @@ func (e Ext12) DecompressTorus(y *E6) *E12 { // N.B.: we use MulTorus in the final exponentiation throughout y1 ≠ -y2 always. func (e Ext12) MulTorus(y1, y2 *E6) *E6 { n := e.Ext6.Mul(y1, y2) - n.B1 = *e.Ext2.Add(&n.B1, e.Ext2.One()) + n = &E6{ + B0: n.B0, + B1: *e.Ext2.Add(&n.B1, e.Ext2.One()), + B2: n.B2, + } d := e.Ext6.Add(y1, y2) y3 := e.Ext6.DivUnchecked(n, d) return y3 diff --git a/std/algebra/emulated/fields_bw6761/doc.go b/std/algebra/emulated/fields_bw6761/doc.go new file mode 100644 index 0000000000..29858f7424 --- /dev/null +++ b/std/algebra/emulated/fields_bw6761/doc.go @@ -0,0 +1,6 @@ +// Package fields_bw6761 implements the fields arithmetic of the Fp6 tower +// used to compute the pairing over the BW6-761 curve. +// +// 𝔽p³[u] = 𝔽p/u³+4 +// 𝔽p⁶[v] = 𝔽p²/v²-u +package fields_bw6761 diff --git a/std/algebra/emulated/fields_bw6761/e3.go b/std/algebra/emulated/fields_bw6761/e3.go new file mode 100644 index 0000000000..1abe35dbce --- /dev/null +++ b/std/algebra/emulated/fields_bw6761/e3.go @@ -0,0 +1,408 @@ +package fields_bw6761 + +import ( + "math/big" + + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/std/math/emulated" +) + +type curveF = emulated.Field[emulated.BW6761Fp] +type baseEl = emulated.Element[emulated.BW6761Fp] + +type E3 struct { + A0, A1, A2 baseEl +} + +type Ext3 struct { + api frontend.API + fp *curveF +} + +func NewExt3(api frontend.API) *Ext3 { + fp, err := emulated.NewField[emulated.BW6761Fp](api) + if err != nil { + panic(err) + } + return &Ext3{ + api: api, + fp: fp, + } +} + +func (e Ext3) Reduce(x *E3) *E3 { + var z E3 + z.A0 = *e.fp.Reduce(&x.A0) + z.A1 = *e.fp.Reduce(&x.A1) + z.A2 = *e.fp.Reduce(&x.A2) + return &z +} + +func (e Ext3) Zero() *E3 { + zero := e.fp.Zero() + return &E3{ + A0: *zero, + A1: *zero, + A2: *zero, + } +} + +func (e Ext3) One() *E3 { + one := e.fp.One() + zero := e.fp.Zero() + return &E3{ + A0: *one, + A1: *zero, + A2: *zero, + } +} + +func (e Ext3) Neg(x *E3) *E3 { + a0 := e.fp.Neg(&x.A0) + a1 := e.fp.Neg(&x.A1) + a2 := e.fp.Neg(&x.A2) + return &E3{ + A0: *a0, + A1: *a1, + A2: *a2, + } +} + +func (e Ext3) Add(x, y *E3) *E3 { + a0 := e.fp.Add(&x.A0, &y.A0) + a1 := e.fp.Add(&x.A1, &y.A1) + a2 := e.fp.Add(&x.A2, &y.A2) + return &E3{ + A0: *a0, + A1: *a1, + A2: *a2, + } +} + +func (e Ext3) Sub(x, y *E3) *E3 { + a0 := e.fp.Sub(&x.A0, &y.A0) + a1 := e.fp.Sub(&x.A1, &y.A1) + a2 := e.fp.Sub(&x.A2, &y.A2) + return &E3{ + A0: *a0, + A1: *a1, + A2: *a2, + } +} + +func (e Ext3) Double(x *E3) *E3 { + two := big.NewInt(2) + a0 := e.fp.MulConst(&x.A0, two) + a1 := e.fp.MulConst(&x.A1, two) + a2 := e.fp.MulConst(&x.A2, two) + return &E3{ + A0: *a0, + A1: *a1, + A2: *a2, + } +} + +func mulFpByNonResidue(fp *curveF, x *baseEl) *baseEl { + + z := fp.Neg(x) + z = fp.MulConst(z, big.NewInt(4)) + return z +} + +func (e Ext3) Conjugate(x *E3) *E3 { + a1 := e.fp.Neg(&x.A1) + return &E3{ + A0: x.A0, + A1: *a1, + A2: x.A2, + } +} + +func (e Ext3) MulByElement(x *E3, y *baseEl) *E3 { + a0 := e.fp.Mul(&x.A0, y) + a1 := e.fp.Mul(&x.A1, y) + a2 := e.fp.Mul(&x.A2, y) + z := &E3{ + A0: *a0, + A1: *a1, + A2: *a2, + } + return z +} + +func (e Ext3) MulByConstElement(x *E3, y *big.Int) *E3 { + a0 := e.fp.MulConst(&x.A0, y) + a1 := e.fp.MulConst(&x.A1, y) + a2 := e.fp.MulConst(&x.A2, y) + return &E3{ + A0: *a0, + A1: *a1, + A2: *a2, + } +} + +// MulBy01 multiplication by sparse element (c0,c1,0) +func (e Ext3) MulBy01(z *E3, c0, c1 *baseEl) *E3 { + + a := e.fp.Mul(&z.A0, c0) + b := e.fp.Mul(&z.A1, c1) + + tmp := e.fp.Add(&z.A1, &z.A2) + t0 := e.fp.Mul(c1, tmp) + t0 = e.fp.Sub(t0, b) + t0 = mulFpByNonResidue(e.fp, t0) + t0 = e.fp.Add(t0, a) + + tmp = e.fp.Add(&z.A0, &z.A2) + t2 := e.fp.Mul(c0, tmp) + t2 = e.fp.Sub(t2, a) + t2 = e.fp.Add(t2, b) + + t1 := e.fp.Add(c0, c1) + tmp = e.fp.Add(&z.A0, &z.A1) + t1 = e.fp.Mul(t1, tmp) + t1 = e.fp.Sub(t1, a) + t1 = e.fp.Sub(t1, b) + + return &E3{ + A0: *t0, + A1: *t1, + A2: *t2, + } +} + +// MulBy1 multiplication of E6 by sparse element (0, c1, 0) +func (e Ext3) MulBy1(z *E3, c1 *baseEl) *E3 { + + b := e.fp.Mul(&z.A1, c1) + + tmp := e.fp.Add(&z.A1, &z.A2) + t0 := e.fp.Mul(c1, tmp) + t0 = e.fp.Sub(t0, b) + t0 = mulFpByNonResidue(e.fp, t0) + + tmp = e.fp.Add(&z.A0, &z.A1) + t1 := e.fp.Mul(c1, tmp) + t1 = e.fp.Sub(t1, b) + + return &E3{ + A0: *t0, + A1: *t1, + A2: *b, + } +} + +// MulBy12 multiplication by sparse element (0,b1,b2) +func (e Ext3) MulBy12(x *E3, b1, b2 *baseEl) *E3 { + t1 := e.fp.Mul(&x.A1, b1) + t2 := e.fp.Mul(&x.A2, b2) + c0 := e.fp.Add(&x.A1, &x.A2) + tmp := e.fp.Add(b1, b2) + c0 = e.fp.Mul(c0, tmp) + c0 = e.fp.Sub(c0, t1) + c0 = e.fp.Sub(c0, t2) + c0 = mulFpByNonResidue(e.fp, c0) + c1 := e.fp.Add(&x.A0, &x.A1) + c1 = e.fp.Mul(c1, b1) + c1 = e.fp.Sub(c1, t1) + tmp = mulFpByNonResidue(e.fp, t2) + c1 = e.fp.Add(c1, tmp) + tmp = e.fp.Add(&x.A0, &x.A2) + c2 := e.fp.Mul(b2, tmp) + c2 = e.fp.Sub(c2, t2) + c2 = e.fp.Add(c2, t1) + return &E3{ + A0: *c0, + A1: *c1, + A2: *c2, + } +} + +// Mul01By01 multiplies two E3 sparse element of the form: +// +// E3{ +// A0: c0, +// A1: c1, +// A2: 0, +// } +// +// and +// +// E3{ +// A0: d0, +// A1: d1, +// A2: 0, +// } +func (e Ext3) Mul01By01(c0, c1, d0, d1 *baseEl) *E3 { + a := e.fp.Mul(d0, c0) + b := e.fp.Mul(d1, c1) + t0 := e.fp.Mul(c1, d1) + t0 = e.fp.Sub(t0, b) + t0 = mulFpByNonResidue(e.fp, t0) + t0 = e.fp.Add(t0, a) + t2 := e.fp.Mul(c0, d0) + t2 = e.fp.Sub(t2, a) + t2 = e.fp.Add(t2, b) + t1 := e.fp.Add(c0, c1) + tmp := e.fp.Add(d0, d1) + t1 = e.fp.Mul(t1, tmp) + t1 = e.fp.Sub(t1, a) + t1 = e.fp.Sub(t1, b) + return &E3{ + A0: *t0, + A1: *t1, + A2: *t2, + } +} + +func (e Ext3) Mul(x, y *E3) *E3 { + // Algorithm 13 from https://eprint.iacr.org/2010/354.pdf + t0 := e.fp.Mul(&x.A0, &y.A0) + t1 := e.fp.Mul(&x.A1, &y.A1) + t2 := e.fp.Mul(&x.A2, &y.A2) + + c0 := e.fp.Add(&x.A1, &x.A2) + tmp := e.fp.Add(&y.A1, &y.A2) + c0 = e.fp.Mul(c0, tmp) + c0 = e.fp.Sub(c0, t1) + c0 = e.fp.Sub(c0, t2) + c0 = mulFpByNonResidue(e.fp, c0) + + tmp = e.fp.Add(&x.A0, &x.A2) + c2 := e.fp.Add(&y.A0, &y.A2) + c2 = e.fp.Mul(c2, tmp) + c2 = e.fp.Sub(c2, t0) + c2 = e.fp.Sub(c2, t2) + + c1 := e.fp.Add(&x.A0, &x.A1) + tmp = e.fp.Add(&y.A0, &y.A1) + c1 = e.fp.Mul(c1, tmp) + c1 = e.fp.Sub(c1, t0) + c1 = e.fp.Sub(c1, t1) + t2 = mulFpByNonResidue(e.fp, t2) + + a0 := e.fp.Add(c0, t0) + a1 := e.fp.Add(c1, t2) + a2 := e.fp.Add(c2, t1) + + return &E3{ + A0: *a0, + A1: *a1, + A2: *a2, + } +} + +func (e Ext3) Square(x *E3) *E3 { + + // Algorithm 16 from https://eprint.iacr.org/2010/354.pdf + + c6 := e.fp.MulConst(&x.A1, big.NewInt(2)) + c4 := e.fp.Mul(&x.A0, c6) // x.A0 * xA1 * 2 + c5 := e.fp.Mul(&x.A2, &x.A2) + c1 := mulFpByNonResidue(e.fp, c5) + c1 = e.fp.Add(c1, c4) + c2 := e.fp.Sub(c4, c5) + + c3 := e.fp.Mul(&x.A0, &x.A0) + c4 = e.fp.Sub(&x.A0, &x.A1) + c4 = e.fp.Add(c4, &x.A2) + c5 = e.fp.Mul(c6, &x.A2) // x.A1 * xA2 * 2 + c4 = e.fp.Mul(c4, c4) + c0 := mulFpByNonResidue(e.fp, c5) + c4 = e.fp.Add(c4, c5) + c4 = e.fp.Sub(c4, c3) + + a0 := e.fp.Add(c0, c3) + a1 := c1 + a2 := e.fp.Add(c2, c4) + + return &E3{ + A0: *a0, + A1: *a1, + A2: *a2, + } +} + +func (e Ext3) Inverse(x *E3) *E3 { + res, err := e.fp.NewHint(inverseE3Hint, 3, &x.A0, &x.A1, &x.A2) + if err != nil { + // err is non-nil only for invalid number of inputs + panic(err) + } + + inv := E3{ + A0: *res[0], + A1: *res[1], + A2: *res[2], + } + one := e.One() + + // 1 == inv * x + _one := e.Mul(&inv, x) + e.AssertIsEqual(one, _one) + + return &inv + +} + +func (e Ext3) DivUnchecked(x, y *E3) *E3 { + res, err := e.fp.NewHint(divE3Hint, 6, &x.A0, &x.A1, &x.A2, &y.A0, &y.A1, &y.A2) + if err != nil { + // err is non-nil only for invalid number of inputs + panic(err) + } + + div := E3{ + A0: *res[0], + A1: *res[1], + A2: *res[2], + } + + // x = div * y + _x := e.Mul(&div, y) + e.AssertIsEqual(x, _x) + + return &div + +} + +// MulByNonResidue mul x by (0,1,0) +func (e Ext3) MulByNonResidue(x *E3) *E3 { + z := &E3{ + A0: x.A2, + A1: x.A0, + A2: x.A1, + } + z.A0 = *mulFpByNonResidue(e.fp, &z.A0) + return z +} + +func (e Ext3) AssertIsEqual(a, b *E3) { + e.fp.AssertIsEqual(&a.A0, &b.A0) + e.fp.AssertIsEqual(&a.A1, &b.A1) + e.fp.AssertIsEqual(&a.A2, &b.A2) +} + +func (e Ext3) Copy(x *E3) *E3 { + return &E3{ + A0: x.A0, + A1: x.A1, + A2: x.A2, + } +} + +func FromE3(a *bw6761.E3) E3 { + return E3{ + A0: emulated.ValueOf[emulated.BW6761Fp](a.A0), + A1: emulated.ValueOf[emulated.BW6761Fp](a.A1), + A2: emulated.ValueOf[emulated.BW6761Fp](a.A2), + } +} + +func (e Ext3) Select(selector frontend.Variable, z1, z0 *E3) *E3 { + a0 := e.fp.Select(selector, &z1.A0, &z0.A0) + a1 := e.fp.Select(selector, &z1.A1, &z0.A1) + a2 := e.fp.Select(selector, &z1.A2, &z0.A2) + return &E3{A0: *a0, A1: *a1, A2: *a2} +} diff --git a/std/algebra/emulated/fields_bw6761/e3_test.go b/std/algebra/emulated/fields_bw6761/e3_test.go new file mode 100644 index 0000000000..707be87b67 --- /dev/null +++ b/std/algebra/emulated/fields_bw6761/e3_test.go @@ -0,0 +1,363 @@ +package fields_bw6761 + +import ( + "testing" + + "github.com/consensys/gnark-crypto/ecc" + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fp" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/std/math/emulated" + "github.com/consensys/gnark/test" +) + +type e3Add struct { + A, B, C E3 +} + +func (circuit *e3Add) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.Add(&circuit.A, &circuit.B) + e.AssertIsEqual(expected, &circuit.C) + return nil +} + +func TestAddFp3(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b, c bw6761.E3 + _, _ = a.SetRandom() + _, _ = b.SetRandom() + c.Add(&a, &b) + + witness := e3Add{ + A: FromE3(&a), + B: FromE3(&b), + C: FromE3(&c), + } + + err := test.IsSolved(&e3Add{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e3Sub struct { + A, B, C E3 +} + +func (circuit *e3Sub) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.Sub(&circuit.A, &circuit.B) + e.AssertIsEqual(expected, &circuit.C) + return nil +} + +func TestSubFp3(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b, c bw6761.E3 + _, _ = a.SetRandom() + _, _ = b.SetRandom() + c.Sub(&a, &b) + + witness := e3Sub{ + A: FromE3(&a), + B: FromE3(&b), + C: FromE3(&c), + } + + err := test.IsSolved(&e3Sub{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e3Neg struct { + A, B E3 +} + +func (circuit *e3Neg) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.Neg(&circuit.A) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestNegFp3(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E3 + _, _ = a.SetRandom() + b.Neg(&a) + + witness := e3Neg{ + A: FromE3(&a), + B: FromE3(&b), + } + + err := test.IsSolved(&e3Neg{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e3Double struct { + A, B E3 +} + +func (circuit *e3Double) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.Double(&circuit.A) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestDoubleFp3(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E3 + _, _ = a.SetRandom() + b.Double(&a) + + witness := e3Double{ + A: FromE3(&a), + B: FromE3(&b), + } + + err := test.IsSolved(&e3Double{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e3Mul struct { + A, B, C E3 +} + +func (circuit *e3Mul) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.Mul(&circuit.A, &circuit.B) + e.AssertIsEqual(expected, &circuit.C) + return nil +} + +func TestMulFp3(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b, c bw6761.E3 + _, _ = a.SetRandom() + _, _ = b.SetRandom() + c.Mul(&a, &b) + + witness := e3Mul{ + A: FromE3(&a), + B: FromE3(&b), + C: FromE3(&c), + } + + err := test.IsSolved(&e3Mul{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e3MulByNonResidue struct { + A, B E3 +} + +func (circuit *e3MulByNonResidue) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.MulByNonResidue(&circuit.A) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestMulByNonResidueFp3(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E3 + _, _ = a.SetRandom() + b.Set(&a) + b.MulByNonResidue(&a) + + witness := e3MulByNonResidue{ + A: FromE3(&a), + B: FromE3(&b), + } + + err := test.IsSolved(&e3MulByNonResidue{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e3MulByElement struct { + A E3 + Y baseEl + B E3 +} + +func (circuit *e3MulByElement) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.MulByElement(&circuit.A, &circuit.Y) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestMulByElementFp3(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E3 + _, _ = a.SetRandom() + var y fp.Element + y.SetRandom() + b.Set(&a) + b.MulByElement(&a, &y) + + witness := e3MulByElement{ + A: FromE3(&a), + Y: emulated.ValueOf[emulated.BW6761Fp](y), + B: FromE3(&b), + } + + err := test.IsSolved(&e3MulByElement{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e3MulBy01 struct { + A E3 + C0, C1 baseEl + B E3 +} + +func (circuit *e3MulBy01) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.MulBy01(&circuit.A, &circuit.C0, &circuit.C1) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestMulBy01Fp3(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E3 + _, _ = a.SetRandom() + var c0, c1 fp.Element + c0.SetRandom() + c1.SetRandom() + b.Set(&a) + b.MulBy01(&c0, &c1) + + witness := e3MulBy01{ + A: FromE3(&a), + C0: emulated.ValueOf[emulated.BW6761Fp](c0), + C1: emulated.ValueOf[emulated.BW6761Fp](c1), + B: FromE3(&b), + } + + err := test.IsSolved(&e3MulBy01{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e3Square struct { + A, B E3 +} + +func (circuit *e3Square) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.Square(&circuit.A) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestSquareFp3(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E3 + _, _ = a.SetRandom() + b.Square(&a) + + witness := e3Square{ + A: FromE3(&a), + B: FromE3(&b), + } + + err := test.IsSolved(&e3Square{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e3Inverse struct { + A, B E3 +} + +func (circuit *e3Inverse) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.Inverse(&circuit.A) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestInverseFp3(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E3 + _, _ = a.SetRandom() + b.Inverse(&a) + + witness := e3Inverse{ + A: FromE3(&a), + B: FromE3(&b), + } + + // add=50605 equals=769 fromBinary=0 mul=50315 sub=558 toBinary=0 + err := test.IsSolved(&e3Inverse{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e3Div struct { + A, B, C E3 +} + +func (circuit *e3Div) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.DivUnchecked(&circuit.A, &circuit.B) + e.AssertIsEqual(expected, &circuit.C) + return nil +} + +func TestDivFp3(t *testing.T) { + + assert := test.NewAssert(t) + // witness values + var a, b, c bw6761.E3 + _, _ = a.SetRandom() + _, _ = b.SetRandom() + c.Inverse(&b) + c.Mul(&a, &c) + + witness := e3Div{ + A: FromE3(&a), + B: FromE3(&b), + C: FromE3(&c), + } + + err := test.IsSolved(&e3Div{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) + +} + +type e3Conjugate struct { + A, B E3 +} + +func (circuit *e3Conjugate) Define(api frontend.API) error { + e := NewExt3(api) + expected := e.Conjugate(&circuit.A) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestConjugateFp3(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E3 + _, _ = a.SetRandom() + b.Conjugate(&a) + + witness := e3Conjugate{ + A: FromE3(&a), + B: FromE3(&b), + } + + err := test.IsSolved(&e3Conjugate{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} diff --git a/std/algebra/emulated/fields_bw6761/e6.go b/std/algebra/emulated/fields_bw6761/e6.go new file mode 100644 index 0000000000..0bf6e38df4 --- /dev/null +++ b/std/algebra/emulated/fields_bw6761/e6.go @@ -0,0 +1,419 @@ +package fields_bw6761 + +import ( + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/std/math/emulated" +) + +type E6 struct { + B0, B1 E3 +} + +type Ext6 struct { + *Ext3 +} + +func (e Ext6) Reduce(x *E6) *E6 { + var z E6 + z.B0 = *e.Ext3.Reduce(&x.B0) + z.B1 = *e.Ext3.Reduce(&x.B1) + return &z +} + +func NewExt6(api frontend.API) *Ext6 { + return &Ext6{Ext3: NewExt3(api)} +} + +func (e Ext6) Zero() *E6 { + b0 := e.Ext3.Zero() + b1 := e.Ext3.Zero() + return &E6{ + B0: *b0, + B1: *b1, + } +} + +func (e Ext6) One() *E6 { + return &E6{ + B0: *e.Ext3.One(), + B1: *e.Ext3.Zero(), + } +} + +func (e Ext6) Add(x, y *E6) *E6 { + return &E6{ + B0: *e.Ext3.Add(&x.B0, &y.B0), + B1: *e.Ext3.Add(&x.B1, &y.B1), + } +} + +func (e Ext6) Sub(x, y *E6) *E6 { + return &E6{ + B0: *e.Ext3.Sub(&x.B0, &y.B0), + B1: *e.Ext3.Sub(&x.B1, &y.B1), + } +} + +func (e Ext6) Double(x *E6) *E6 { + return &E6{ + B0: *e.Ext3.Double(&x.B0), + B1: *e.Ext3.Double(&x.B1), + } +} + +func (e Ext6) Mul(x, y *E6) *E6 { + x = e.Reduce(x) + y = e.Reduce(y) + + a := e.Ext3.Add(&x.B0, &x.B1) + b := e.Ext3.Add(&y.B0, &y.B1) + a = e.Ext3.Mul(a, b) + b = e.Ext3.Mul(&x.B0, &y.B0) + c := e.Ext3.Mul(&x.B1, &y.B1) + b1 := e.Ext3.Sub(a, b) + b1 = e.Ext3.Sub(b1, c) + b0 := e.Ext3.MulByNonResidue(c) + b0 = e.Ext3.Add(b0, b) + + return &E6{ + B0: *b0, + B1: *b1, + } +} + +func (e Ext6) Square(x *E6) *E6 { + + x = e.Reduce(x) + //Algorithm 22 from https://eprint.iacr.org/2010/354.pdf + c0 := e.Ext3.Sub(&x.B0, &x.B1) + c3 := e.Ext3.MulByNonResidue(&x.B1) + c3 = e.Ext3.Neg(c3) + c3 = e.Ext3.Add(&x.B0, c3) + c2 := e.Ext3.Mul(&x.B0, &x.B1) + c0 = e.Ext3.Mul(c0, c3) + c0 = e.Ext3.Add(c0, c2) + b1 := e.Ext3.Double(c2) + c2 = e.Ext3.MulByNonResidue(c2) + b0 := e.Ext3.Add(c0, c2) + + return &E6{ + B0: *b0, + B1: *b1, + } +} + +// Karabina's compressed cyclotomic square +// https://eprint.iacr.org/2010/542.pdf +// Th. 3.2 with minor modifications to fit our tower +func (e Ext6) CyclotomicSquareCompressed(x *E6) *E6 { + x = e.Reduce(x) + z := e.Copy(x) + + var t [7]*baseEl + + // t0 = g1² + t[0] = e.fp.Mul(&x.B0.A1, &x.B0.A1) + // t1 = g5² + t[1] = e.fp.Mul(&x.B1.A2, &x.B1.A2) + // t5 = g1 + g5 + t[5] = e.fp.Add(&x.B0.A1, &x.B1.A2) + // t2 = (g1 + g5)² + t[2] = e.fp.Mul(t[5], t[5]) + + // t3 = g1² + g5² + t[3] = e.fp.Add(t[0], t[1]) + // t5 = 2 * g1 * g5 + t[5] = e.fp.Sub(t[2], t[3]) + + // t6 = g3 + g2 + t[6] = e.fp.Add(&x.B1.A0, &x.B0.A2) + // t3 = (g3 + g2)² + t[3] = e.fp.Mul(t[6], t[6]) + // t2 = g3² + t[2] = e.fp.Mul(&x.B1.A0, &x.B1.A0) + + // t6 = 2 * nr * g1 * g5 + t[6] = mulFpByNonResidue(e.fp, t[5]) + // t5 = 4 * nr * g1 * g5 + 2 * g3 + t[5] = e.fp.Add(t[6], &x.B1.A0) + t[5] = e.fp.Add(t[5], t[5]) + // z3 = 6 * nr * g1 * g5 + 2 * g3 + z.B1.A0 = *e.fp.Add(t[5], t[6]) + + // t4 = nr * g5² + t[4] = mulFpByNonResidue(e.fp, t[1]) + // t5 = nr * g5² + g1² + t[5] = e.fp.Add(t[0], t[4]) + // t6 = nr * g5² + g1² - g2 + t[6] = e.fp.Sub(t[5], &x.B0.A2) + + // t1 = g2² + t[1] = e.fp.Mul(&x.B0.A2, &x.B0.A2) + + // t6 = 2 * nr * g5² + 2 * g1² - 2*g2 + t[6] = e.fp.Add(t[6], t[6]) + // z2 = 3 * nr * g5² + 3 * g1² - 2*g2 + z.B0.A2 = *e.fp.Add(t[6], t[5]) + + // t4 = nr * g2² + t[4] = mulFpByNonResidue(e.fp, t[1]) + // t5 = g3² + nr * g2² + t[5] = e.fp.Add(t[2], t[4]) + // t6 = g3² + nr * g2² - g1 + t[6] = e.fp.Sub(t[5], &x.B0.A1) + // t6 = 2 * g3² + 2 * nr * g2² - 2 * g1 + t[6] = e.fp.Add(t[6], t[6]) + // z1 = 3 * g3² + 3 * nr * g2² - 2 * g1 + z.B0.A1 = *e.fp.Add(t[6], t[5]) + + // t0 = g2² + g3² + t[0] = e.fp.Add(t[2], t[1]) + // t5 = 2 * g3 * g2 + t[5] = e.fp.Sub(t[3], t[0]) + // t6 = 2 * g3 * g2 + g5 + t[6] = e.fp.Add(t[5], &x.B1.A2) + // t6 = 4 * g3 * g2 + 2 * g5 + t[6] = e.fp.Add(t[6], t[6]) + // z5 = 6 * g3 * g2 + 2 * g5 + z.B1.A2 = *e.fp.Add(t[5], t[6]) + + return z +} + +// DecompressKarabina Karabina's cyclotomic square result +// if g3 != 0 +// +// g4 = (E * g5^2 + 3 * g1^2 - 2 * g2)/4g3 +// +// if g3 == 0 +// +// g4 = 2g1g5/g2 +// +// if g3=g2=0 then g4=g5=g1=0 and g0=1 (x=1) +// Theorem 3.1 is well-defined for all x in Gϕₙ\{1} +func (e Ext6) DecompressKarabina(x *E6) *E6 { + + x = e.Reduce(x) + + var z E6 + + var t [3]*baseEl + var _t [2]*baseEl + one := e.fp.One() + + // if g3 == 0 + // t0 = 2 * g1 * g5 + // t1 = g2 + selector1 := e.fp.IsZero(&x.B1.A0) + _t[0] = e.fp.Mul(&x.B0.A1, &x.B0.A1) + _t[0] = e.fp.Add(_t[0], _t[0]) + _t[1] = &x.B0.A2 + + // if g2 == g3 == 0 + selector2 := e.fp.IsZero(_t[1]) + + // if g3 != 0 + // t0 = E * g5^2 + 3 * g1^2 - 2 * g2 + // t1 = 4 * g3 + t[0] = e.fp.Mul(&x.B0.A1, &x.B0.A1) + t[1] = e.fp.Sub(t[0], &x.B0.A2) + t[1] = e.fp.Add(t[1], t[1]) + t[1] = e.fp.Add(t[1], t[0]) + t[2] = e.fp.Mul(&x.B1.A2, &x.B1.A2) + t[0] = mulFpByNonResidue(e.fp, t[2]) + t[0] = e.fp.Add(t[0], t[1]) + t[1] = e.fp.Add(&x.B1.A0, &x.B1.A0) + t[1] = e.fp.Add(t[1], t[1]) + + // g4 = (E * g5^2 + 3 * g1^2 - 2 * g2)/4g3 or (2 * g1 * g5)/g2 + t[0] = e.fp.Select(selector1, _t[0], t[0]) + t[1] = e.fp.Select(selector1, _t[1], t[1]) + // g4 = dummy value, continue + t[1] = e.fp.Select(selector2, one, t[1]) + + z.B1.A1 = *e.fp.Div(t[0], t[1]) + + // Rest of the computation for all cases + // t1 = g2 * g1 + t[1] = e.fp.Mul(&x.B0.A2, &x.B0.A1) + // t2 = 2 * g4² - 3 * g2 * g1 + t[2] = e.fp.Mul(&z.B1.A1, &z.B1.A1) + t[2] = e.fp.Sub(t[2], t[1]) + t[2] = e.fp.Add(t[2], t[2]) + t[2] = e.fp.Sub(t[2], t[1]) + // t1 = g3 * g5 (g3 can be 0) + t[1] = e.fp.Mul(&x.B1.A0, &x.B1.A2) + // c₀ = E * (2 * g4² + g3 * g5 - 3 * g2 * g1) + 1 + t[2] = e.fp.Add(t[2], t[1]) + + z.B0.A0 = *mulFpByNonResidue(e.fp, t[2]) + z.B0.A0 = *e.fp.Add(&z.B0.A0, one) + + z.B0.A1 = x.B0.A1 + z.B0.A2 = x.B0.A2 + z.B1.A0 = x.B1.A0 + z.B1.A2 = x.B1.A2 + + return e.Select(e.api.And(selector1, selector2), e.One(), &z) +} + +// Granger-Scott's cyclotomic square +// https://eprint.iacr.org/2009/565.pdf, 3.2 +func (e Ext6) CyclotomicSquare(x *E6) *E6 { + // x=(x0,x1,x2,x3,x4,x5,x6,x7) in E3⁶ + // cyclosquare(x)=(3*x4²*u + 3*x0² - 2*x0, + // 3*x2²*u + 3*x3² - 2*x1, + // 3*x5²*u + 3*x1² - 2*x2, + // 6*x1*x5*u + 2*x3, + // 6*x0*x4 + 2*x4, + // 6*x2*x3 + 2*x5) + + x = e.Reduce(x) + + var t [9]*baseEl + + t[0] = e.fp.Mul(&x.B1.A1, &x.B1.A1) + t[1] = e.fp.Mul(&x.B0.A0, &x.B0.A0) + t[6] = e.fp.Add(&x.B1.A1, &x.B0.A0) + t[6] = e.fp.Mul(t[6], t[6]) + t[6] = e.fp.Sub(t[6], t[0]) + t[6] = e.fp.Sub(t[6], t[1]) // 2*x4*x0 + t[2] = e.fp.Mul(&x.B0.A2, &x.B0.A2) + t[3] = e.fp.Mul(&x.B1.A0, &x.B1.A0) + t[7] = e.fp.Add(&x.B0.A2, &x.B1.A0) + t[7] = e.fp.Mul(t[7], t[7]) + t[7] = e.fp.Sub(t[7], t[2]) + t[7] = e.fp.Sub(t[7], t[3]) // 2*x2*x3 + t[4] = e.fp.Mul(&x.B1.A2, &x.B1.A2) + t[5] = e.fp.Mul(&x.B0.A1, &x.B0.A1) + t[8] = e.fp.Add(&x.B1.A2, &x.B0.A1) + t[8] = e.fp.Mul(t[8], t[8]) + t[8] = e.fp.Sub(t[8], t[4]) + t[8] = e.fp.Sub(t[8], t[5]) + t[8] = mulFpByNonResidue(e.fp, t[8]) // 2*x5*x1*u + + t[0] = mulFpByNonResidue(e.fp, t[0]) + t[0] = e.fp.Add(t[0], t[1]) // x4²*u + x0² + t[2] = mulFpByNonResidue(e.fp, t[2]) + t[2] = e.fp.Add(t[2], t[3]) // x2²*u + x3² + t[4] = mulFpByNonResidue(e.fp, t[4]) + t[4] = e.fp.Add(t[4], t[5]) // x5²*u + x1² + + var z E6 + z.B0.A0 = *e.fp.Sub(t[0], &x.B0.A0) + z.B0.A0 = *e.fp.Add(&z.B0.A0, &z.B0.A0) + z.B0.A0 = *e.fp.Add(&z.B0.A0, t[0]) + z.B0.A1 = *e.fp.Sub(t[2], &x.B0.A1) + z.B0.A1 = *e.fp.Add(&z.B0.A1, &z.B0.A1) + z.B0.A1 = *e.fp.Add(&z.B0.A1, t[2]) + z.B0.A2 = *e.fp.Sub(t[4], &x.B0.A2) + z.B0.A2 = *e.fp.Add(&z.B0.A2, &z.B0.A2) + z.B0.A2 = *e.fp.Add(&z.B0.A2, t[4]) + + z.B1.A0 = *e.fp.Add(t[8], &x.B1.A0) + z.B1.A0 = *e.fp.Add(&z.B1.A0, &z.B1.A0) + z.B1.A0 = *e.fp.Add(&z.B1.A0, t[8]) + z.B1.A1 = *e.fp.Add(t[6], &x.B1.A1) + z.B1.A1 = *e.fp.Add(&z.B1.A1, &z.B1.A1) + z.B1.A1 = *e.fp.Add(&z.B1.A1, t[6]) + z.B1.A2 = *e.fp.Add(t[7], &x.B1.A2) + z.B1.A2 = *e.fp.Add(&z.B1.A2, &z.B1.A2) + z.B1.A2 = *e.fp.Add(&z.B1.A2, t[7]) + + return &z +} + +func (e Ext6) Inverse(x *E6) *E6 { + res, err := e.fp.NewHint(inverseE6Hint, 6, &x.B0.A0, &x.B0.A1, &x.B0.A2, &x.B1.A0, &x.B1.A1, &x.B1.A2) + if err != nil { + // err is non-nil only for invalid number of inputs + panic(err) + } + + inv := E6{ + B0: E3{A0: *res[0], A1: *res[1], A2: *res[2]}, + B1: E3{A0: *res[3], A1: *res[4], A2: *res[5]}, + } + one := e.One() + + // 1 == inv * x + _one := e.Mul(&inv, x) + e.AssertIsEqual(one, _one) + + return &inv + +} + +func (e Ext6) DivUnchecked(x, y *E6) *E6 { + res, err := e.fp.NewHint(divE6Hint, 12, &x.B0.A0, &x.B0.A1, &x.B0.A2, &x.B1.A0, &x.B1.A1, &x.B1.A2, &y.B0.A0, &y.B0.A1, &y.B0.A2, &y.B1.A0, &y.B1.A1, &y.B1.A2) + if err != nil { + // err is non-nil only for invalid number of inputs + panic(err) + } + + div := E6{ + B0: E3{A0: *res[0], A1: *res[1], A2: *res[2]}, + B1: E3{A0: *res[3], A1: *res[4], A2: *res[5]}, + } + + // x = div * y + _x := e.Mul(&div, y) + e.AssertIsEqual(x, _x) + + return &div + +} + +func (e Ext6) Conjugate(x *E6) *E6 { + return &E6{ + B0: x.B0, + B1: *e.Ext3.Neg(&x.B1), + } +} + +func (e Ext6) AssertIsEqual(a, b *E6) { + e.Ext3.AssertIsEqual(&a.B0, &b.B0) + e.Ext3.AssertIsEqual(&a.B1, &b.B1) +} + +func (e Ext6) Copy(x *E6) *E6 { + b0 := e.Ext3.Copy(&x.B0) + b1 := e.Ext3.Copy(&x.B1) + return &E6{ + B0: *b0, + B1: *b1, + } +} + +func FromE6(a *bw6761.E6) E6 { + return E6{ + B0: FromE3(&a.B0), + B1: FromE3(&a.B1), + } +} + +// Frobenius set z in E6 to Frobenius(x), return z +func (e Ext6) Frobenius(x *E6) *E6 { + _frobA := emulated.ValueOf[emulated.BW6761Fp]("4922464560225523242118178942575080391082002530232324381063048548642823052024664478336818169867474395270858391911405337707247735739826664939444490469542109391530482826728203582549674992333383150446779312029624171857054392282775648") + _frobB := emulated.ValueOf[emulated.BW6761Fp]("1968985824090209297278610739700577151397666382303825728450741611566800370218827257750865013421937292370006175842381275743914023380727582819905021229583192207421122272650305267822868639090213645505120388400344940985710520836292650") + _frobC := emulated.ValueOf[emulated.BW6761Fp]("4922464560225523242118178942575080391082002530232324381063048548642823052024664478336818169867474395270858391911405337707247735739826664939444490469542109391530482826728203582549674992333383150446779312029624171857054392282775649") + _frobAC := emulated.ValueOf[emulated.BW6761Fp]("-1") + _frobBC := emulated.ValueOf[emulated.BW6761Fp]("1968985824090209297278610739700577151397666382303825728450741611566800370218827257750865013421937292370006175842381275743914023380727582819905021229583192207421122272650305267822868639090213645505120388400344940985710520836292651") + var z E6 + z.B0.A0 = x.B0.A0 + z.B0.A1 = *e.fp.Mul(&x.B0.A1, &_frobA) + z.B0.A2 = *e.fp.Mul(&x.B0.A2, &_frobB) + + z.B1.A0 = *e.fp.Mul(&x.B1.A0, &_frobC) + z.B1.A1 = *e.fp.Mul(&x.B1.A1, &_frobAC) + z.B1.A2 = *e.fp.Mul(&x.B1.A2, &_frobBC) + + return &z +} + +func (e Ext6) Select(selector frontend.Variable, z1, z0 *E6) *E6 { + b0 := e.Ext3.Select(selector, &z1.B0, &z0.B0) + b1 := e.Ext3.Select(selector, &z1.B1, &z0.B1) + return &E6{B0: *b0, B1: *b1} +} diff --git a/std/algebra/emulated/fields_bw6761/e6_pairing.go b/std/algebra/emulated/fields_bw6761/e6_pairing.go new file mode 100644 index 0000000000..434483d1bd --- /dev/null +++ b/std/algebra/emulated/fields_bw6761/e6_pairing.go @@ -0,0 +1,241 @@ +package fields_bw6761 + +func (e Ext6) nSquareCompressed(z *E6, n int) *E6 { + for i := 0; i < n; i++ { + z = e.CyclotomicSquareCompressed(z) + } + return z +} + +// ExpX0Minus1 set z to z^{x₀-1} in E6 and return z +// x₀-1 = 91893752504881257682351033800651177983 +func (e Ext6) ExpX0Minus1(z *E6) *E6 { + z = e.Reduce(z) + result := e.Copy(z) + result = e.nSquareCompressed(result, 5) + result = e.DecompressKarabina(result) + result = e.Mul(result, z) + z33 := e.Copy(result) + result = e.nSquareCompressed(result, 7) + result = e.DecompressKarabina(result) + result = e.Mul(result, z33) + result = e.nSquareCompressed(result, 4) + result = e.DecompressKarabina(result) + result = e.Mul(result, z) + result = e.CyclotomicSquare(result) + result = e.Mul(result, z) + result = e.nSquareCompressed(result, 46) + result = e.DecompressKarabina(result) + + return result +} + +// ExpX0Minus1Square set z to z^{(x₀-1)²} in E6 and return z +// (x₀-1)² = 91893752504881257682351033800651177984 +func (e Ext6) ExpX0Minus1Square(z *E6) *E6 { + z = e.Reduce(z) + result := e.Copy(z) + result = e.CyclotomicSquare(result) + t0 := e.Mul(z, result) + t1 := e.CyclotomicSquare(t0) + t0 = e.Mul(t0, t1) + result = e.Mul(result, t0) + t1 = e.Mul(t1, result) + t0 = e.Mul(t0, t1) + t2 := e.CyclotomicSquare(t0) + t2 = e.Mul(t1, t2) + t0 = e.Mul(t0, t2) + t2 = e.nSquareCompressed(t2, 7) + t2 = e.DecompressKarabina(t2) + t1 = e.Mul(t1, t2) + t1 = e.nSquareCompressed(t1, 11) + t1 = e.DecompressKarabina(t1) + t1 = e.Mul(t0, t1) + t1 = e.nSquareCompressed(t1, 9) + t1 = e.DecompressKarabina(t1) + t0 = e.Mul(t0, t1) + t0 = e.CyclotomicSquare(t0) + result = e.Mul(result, t0) + result = e.nSquareCompressed(result, 92) + result = e.DecompressKarabina(result) + + return result + +} + +// ExpX0Plus1 set z to z^(x₀+1) in E6 and return z +// x₀+1 = 91893752504881257682351033800651177985 +func (e Ext6) ExpX0Plus1(z *E6) *E6 { + result := e.ExpX0Minus1(z) + t := e.CyclotomicSquare(z) + result = e.Mul(result, t) + return result +} + +// ExpX0Minus1Div3 set z to z^(x₀-1)/3 in E6 and return z +// (x₀-1)/3 = 3195374304363544576 +func (e Ext6) ExptMinus1Div3(z *E6) *E6 { + z = e.Reduce(z) + result := e.Copy(z) + result = e.CyclotomicSquare(result) + result = e.Mul(result, z) + t0 := e.Mul(result, z) + t0 = e.CyclotomicSquare(t0) + result = e.Mul(result, t0) + t0 = result + t0 = e.nSquareCompressed(t0, 7) + t0 = e.DecompressKarabina(t0) + result = e.Mul(result, t0) + result = e.nSquareCompressed(result, 5) + result = e.DecompressKarabina(result) + result = e.Mul(result, z) + result = e.nSquareCompressed(result, 46) + result = e.DecompressKarabina(result) + + return result +} + +// ExpC1 set z to z^C1 in E6 and return z +// ht, hy = 13, 9 +// C1 = (ht+hy)/2 = 11 +func (e Ext6) ExpC1(z *E6) *E6 { + z = e.Reduce(z) + result := e.CyclotomicSquare(z) + result = e.Mul(result, z) + t0 := e.Mul(z, result) + t0 = e.CyclotomicSquare(t0) + result = e.Mul(result, t0) + + return result +} + +// ExpC2 set z to z^C2 in E6 and return z +// ht, hy = 13, 9 +// C2 = (ht**2+3*hy**2)/4 = 103 +func (e Ext6) ExpC2(z *E6) *E6 { + z = e.Reduce(z) + + result := e.CyclotomicSquare(z) + result = e.Mul(result, z) + t0 := result + t0 = e.nSquareCompressed(t0, 4) + t0 = e.DecompressKarabina(t0) + result = e.Mul(result, t0) + result = e.CyclotomicSquare(result) + result = e.Mul(result, z) + + return result +} + +// MulBy014 multiplies z by an E6 sparse element of the form +// +// E6{ +// B0: E3{A0: c0, A1: c1, A2: 0}, +// B1: E3{A0: 0, A1: 1, A2: 0}, +// } +func (e *Ext6) MulBy014(z *E6, c0, c1 *baseEl) *E6 { + z = e.Reduce(z) + + a := e.MulBy01(&z.B0, c0, c1) + + var b E3 + // Mul by E3{0, 1, 0} + b.A0 = *mulFpByNonResidue(e.fp, &z.B1.A2) + b.A2 = z.B1.A1 + b.A1 = z.B1.A0 + + one := e.fp.One() + d := e.fp.Add(c1, one) + + zC1 := e.Ext3.Add(&z.B1, &z.B0) + zC1 = e.Ext3.MulBy01(zC1, c0, d) + zC1 = e.Ext3.Sub(zC1, a) + zC1 = e.Ext3.Sub(zC1, &b) + zC0 := e.Ext3.MulByNonResidue(&b) + zC0 = e.Ext3.Add(zC0, a) + + return &E6{ + B0: *zC0, + B1: *zC1, + } +} + +// multiplies two E6 sparse element of the form: +// +// E6{ +// B0: E3{A0: c0, A1: c1, A2: 0}, +// B1: E3{A0: 0, A1: 1, A2: 0}, +// } +// +// and +// +// E6{ +// B0: E3{A0: d0, A1: d1, A2: 0}, +// B1: E3{A0: 0, A1: 1, A2: 0}, +// } +func (e Ext6) Mul014By014(d0, d1, c0, c1 *baseEl) [5]*baseEl { + one := e.fp.One() + x0 := e.fp.Mul(c0, d0) + x1 := e.fp.Mul(c1, d1) + tmp := e.fp.Add(c0, one) + x04 := e.fp.Add(d0, one) + x04 = e.fp.Mul(x04, tmp) + x04 = e.fp.Sub(x04, x0) + x04 = e.fp.Sub(x04, one) + tmp = e.fp.Add(c0, c1) + x01 := e.fp.Add(d0, d1) + x01 = e.fp.Mul(x01, tmp) + x01 = e.fp.Sub(x01, x0) + x01 = e.fp.Sub(x01, x1) + tmp = e.fp.Add(c1, one) + x14 := e.fp.Add(d1, one) + x14 = e.fp.Mul(x14, tmp) + x14 = e.fp.Sub(x14, x1) + x14 = e.fp.Sub(x14, one) + + zC0B0 := e.fp.Add(one, one) + zC0B0 = e.fp.Add(zC0B0, zC0B0) + zC0B0 = e.fp.Neg(zC0B0) + + zC0B0 = e.fp.Add(zC0B0, x0) + + return [5]*baseEl{zC0B0, x01, x1, x04, x14} +} + +// Mul01245By014 multiplies two E6 sparse element of the form +// +// E6{ +// C0: E3{B0: x0, B1: x1, B2: x2}, +// C1: E3{B0: 0, B1: x4, B2: x5}, +// } +// +// and +// +// E6{ +// C0: E3{B0: d0, B1: d1, B2: 0}, +// C1: E3{B0: 0, B1: 1, B2: 0}, +// } +func (e *Ext6) Mul01245By014(x [5]*baseEl, d0, d1 *baseEl) *E6 { + zero := e.fp.Zero() + c0 := &E3{A0: *x[0], A1: *x[1], A2: *x[2]} + b := &E3{ + A0: *x[0], + A1: *e.fp.Add(x[1], x[3]), + A2: *e.fp.Add(x[2], x[4]), + } + a := e.Ext3.MulBy01(b, d0, e.fp.Add(d1, e.fp.One())) + b = e.Ext3.MulBy01(c0, d0, d1) + c := &E3{ + A0: *mulFpByNonResidue(e.fp, x[4]), + A1: *zero, + A2: *x[3], + } + z1 := e.Ext3.Sub(a, b) + z1 = e.Ext3.Sub(z1, c) + z0 := e.Ext3.MulByNonResidue(c) + z0 = e.Ext3.Add(z0, b) + return &E6{ + B0: *z0, + B1: *z1, + } +} diff --git a/std/algebra/emulated/fields_bw6761/e6_test.go b/std/algebra/emulated/fields_bw6761/e6_test.go new file mode 100644 index 0000000000..c4a557afaa --- /dev/null +++ b/std/algebra/emulated/fields_bw6761/e6_test.go @@ -0,0 +1,404 @@ +package fields_bw6761 + +import ( + "testing" + + "github.com/consensys/gnark-crypto/ecc" + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fp" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/std/math/emulated" + "github.com/consensys/gnark/test" +) + +type e6Add struct { + A, B, C E6 +} + +func (circuit *e6Add) Define(api frontend.API) error { + var expected E6 + e := NewExt6(api) + expected = *e.Add(&circuit.A, &circuit.B) + e.AssertIsEqual(&expected, &circuit.C) + return nil +} + +func TestAddFp6(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b, c bw6761.E6 + _, _ = a.SetRandom() + _, _ = b.SetRandom() + c.Add(&a, &b) + + witness := e6Add{ + A: FromE6(&a), + B: FromE6(&b), + C: FromE6(&c), + } + + err := test.IsSolved(&e6Add{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e6Sub struct { + A, B, C E6 +} + +func (circuit *e6Sub) Define(api frontend.API) error { + var expected E6 + e := NewExt6(api) + expected = *e.Sub(&circuit.A, &circuit.B) + e.AssertIsEqual(&expected, &circuit.C) + return nil +} + +func TestSubFp6(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b, c bw6761.E6 + _, _ = a.SetRandom() + _, _ = b.SetRandom() + c.Sub(&a, &b) + + witness := e6Sub{ + A: FromE6(&a), + B: FromE6(&b), + C: FromE6(&c), + } + + err := test.IsSolved(&e6Sub{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e6Double struct { + A, B E6 +} + +func (circuit *e6Double) Define(api frontend.API) error { + var expected E6 + e := NewExt6(api) + expected = *e.Double(&circuit.A) + e.AssertIsEqual(&expected, &circuit.B) + return nil +} + +func TestDoubleFp6(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E6 + _, _ = a.SetRandom() + b.Double(&a) + + witness := e6Double{ + A: FromE6(&a), + B: FromE6(&b), + } + + err := test.IsSolved(&e6Double{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e6Mul struct { + A, B, C E6 +} + +func (circuit *e6Mul) Define(api frontend.API) error { + var expected E6 + e := NewExt6(api) + expected = *e.Mul(&circuit.A, &circuit.B) + e.AssertIsEqual(&expected, &circuit.C) + return nil +} + +func TestMulFp6(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b, c bw6761.E6 + _, _ = a.SetRandom() + _, _ = b.SetRandom() + c.Mul(&a, &b) + + witness := e6Mul{ + A: FromE6(&a), + B: FromE6(&b), + C: FromE6(&c), + } + + err := test.IsSolved(&e6Mul{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e6Square struct { + A, B E6 +} + +func (circuit *e6Square) Define(api frontend.API) error { + var expected E6 + e := NewExt6(api) + expected = *e.Square(&circuit.A) + e.AssertIsEqual(&expected, &circuit.B) + return nil +} + +func TestSquareFp6(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E6 + _, _ = a.SetRandom() + b.Square(&a) + + witness := e6Square{ + A: FromE6(&a), + B: FromE6(&b), + } + + err := test.IsSolved(&e6Square{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e6Inverse struct { + A, B E6 +} + +func (circuit *e6Inverse) Define(api frontend.API) error { + var expected E6 + e := NewExt6(api) + expected = *e.Inverse(&circuit.A) + e.AssertIsEqual(&expected, &circuit.B) + return nil +} + +func TestInverseFp6(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E6 + _, _ = a.SetRandom() + b.Inverse(&a) + + witness := e6Inverse{ + A: FromE6(&a), + B: FromE6(&b), + } + + err := test.IsSolved(&e6Inverse{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e6Div struct { + A, B, C E6 +} + +func (circuit *e6Div) Define(api frontend.API) error { + e := NewExt6(api) + expected := e.DivUnchecked(&circuit.A, &circuit.B) + e.AssertIsEqual(expected, &circuit.C) + return nil +} + +func TestDivFp6(t *testing.T) { + + assert := test.NewAssert(t) + // witness values + var a, b, c bw6761.E6 + _, _ = a.SetRandom() + _, _ = b.SetRandom() + c.Inverse(&b) + c.Mul(&a, &c) + + witness := e6Div{ + A: FromE6(&a), + B: FromE6(&b), + C: FromE6(&c), + } + + err := test.IsSolved(&e6Div{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) + +} + +type e6Conjugate struct { + A, B E6 +} + +func (circuit *e6Conjugate) Define(api frontend.API) error { + var expected E6 + e := NewExt6(api) + expected = *e.Conjugate(&circuit.A) + e.AssertIsEqual(&expected, &circuit.B) + return nil +} + +func TestConjugateFp6(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E6 + _, _ = a.SetRandom() + b.Conjugate(&a) + + witness := e6Conjugate{ + A: FromE6(&a), + B: FromE6(&b), + } + + err := test.IsSolved(&e6Conjugate{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e6CyclotomicSquareCompressed struct { + A, B E6 +} + +func (circuit *e6CyclotomicSquareCompressed) Define(api frontend.API) error { + e := NewExt6(api) + expected := e.CyclotomicSquareCompressed(&circuit.A) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestCyclotomicSquareCompressedFp6(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E6 + _, _ = a.SetRandom() + b.Set(&a) + b.CyclotomicSquareCompressed(&a) + + witness := e6CyclotomicSquareCompressed{ + A: FromE6(&a), + B: FromE6(&b), + } + + err := test.IsSolved(&e6CyclotomicSquareCompressed{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e6DecompressKarabina struct { + A, B E6 +} + +func (circuit *e6DecompressKarabina) Define(api frontend.API) error { + e := NewExt6(api) + expected := e.DecompressKarabina(&circuit.A) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestDecompressKarabinaFp6(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E6 + _, _ = a.SetRandom() + b.Set(&a) + a.DecompressKarabina(&a) + + witness := e6DecompressKarabina{ + A: FromE6(&b), + B: FromE6(&a), + } + + err := test.IsSolved(&e6DecompressKarabina{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e6CyclotomicSquare struct { + A, B E6 +} + +func (circuit *e6CyclotomicSquare) Define(api frontend.API) error { + e := NewExt6(api) + expected := e.CyclotomicSquare(&circuit.A) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestCyclotomicSquareFp6(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E6 + _, _ = a.SetRandom() + b.Set(&a) + b.CyclotomicSquare(&a) + + witness := e6CyclotomicSquare{ + A: FromE6(&a), + B: FromE6(&b), + } + + err := test.IsSolved(&e6CyclotomicSquare{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e6Expt struct { + A, B E6 +} + +func (circuit *e6Expt) Define(api frontend.API) error { + e := NewExt6(api) + expected := e.ExpX0Minus1(&circuit.A) + expected = e.Mul(expected, &circuit.A) + e.AssertIsEqual(expected, &circuit.B) + return nil +} + +func TestExptFp6(t *testing.T) { + assert := test.NewAssert(t) + // witness values + var a, b bw6761.E6 + _, _ = a.SetRandom() + + // put a in the cyclotomic subgroup + var tmp bw6761.E6 + tmp.Conjugate(&a) + a.Inverse(&a) + tmp.Mul(&tmp, &a) + a.Frobenius(&tmp).Mul(&a, &tmp) + + b.Expt(&a) + + witness := e6Expt{ + A: FromE6(&a), + B: FromE6(&b), + } + + err := test.IsSolved(&e6Expt{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type e6MulBy014 struct { + A E6 `gnark:",public"` + W E6 + B, C baseEl +} + +func (circuit *e6MulBy014) Define(api frontend.API) error { + e := NewExt6(api) + res := e.MulBy014(&circuit.A, &circuit.B, &circuit.C) + e.AssertIsEqual(res, &circuit.W) + return nil +} + +func TestFp6MulBy014(t *testing.T) { + + assert := test.NewAssert(t) + // witness values + var a, w bw6761.E6 + _, _ = a.SetRandom() + var one, b, c fp.Element + one.SetOne() + _, _ = b.SetRandom() + _, _ = c.SetRandom() + w.Set(&a) + w.MulBy014(&b, &c, &one) + + witness := e6MulBy014{ + A: FromE6(&a), + B: emulated.ValueOf[emulated.BW6761Fp](&b), + C: emulated.ValueOf[emulated.BW6761Fp](&c), + W: FromE6(&w), + } + + err := test.IsSolved(&e6MulBy014{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) + +} diff --git a/std/algebra/emulated/fields_bw6761/hints.go b/std/algebra/emulated/fields_bw6761/hints.go new file mode 100644 index 0000000000..5c100d085c --- /dev/null +++ b/std/algebra/emulated/fields_bw6761/hints.go @@ -0,0 +1,124 @@ +package fields_bw6761 + +import ( + "math/big" + + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark/constraint/solver" + "github.com/consensys/gnark/std/math/emulated" +) + +func init() { + solver.RegisterHint(GetHints()...) +} + +// GetHints returns all hint functions used in the package. +func GetHints() []solver.Hint { + return []solver.Hint{ + // E3 + divE3Hint, + inverseE3Hint, + // E6 + divE6Hint, + inverseE6Hint, + } +} + +// E3 +func inverseE3Hint(nativeMod *big.Int, nativeInputs, nativeOutputs []*big.Int) error { + return emulated.UnwrapHint(nativeInputs, nativeOutputs, + func(mod *big.Int, inputs, outputs []*big.Int) error { + var a, c bw6761.E3 + + a.A0.SetBigInt(inputs[0]) + a.A1.SetBigInt(inputs[1]) + a.A2.SetBigInt(inputs[2]) + + c.Inverse(&a) + + c.A0.BigInt(outputs[0]) + c.A1.BigInt(outputs[1]) + c.A2.BigInt(outputs[2]) + + return nil + }) +} + +func divE3Hint(nativeMod *big.Int, nativeInputs, nativeOutputs []*big.Int) error { + return emulated.UnwrapHint(nativeInputs, nativeOutputs, + func(mod *big.Int, inputs, outputs []*big.Int) error { + var a, b, c bw6761.E3 + + a.A0.SetBigInt(inputs[0]) + a.A1.SetBigInt(inputs[1]) + a.A2.SetBigInt(inputs[2]) + b.A0.SetBigInt(inputs[3]) + b.A1.SetBigInt(inputs[4]) + b.A2.SetBigInt(inputs[5]) + + c.Inverse(&b).Mul(&c, &a) + + c.A0.BigInt(outputs[0]) + c.A1.BigInt(outputs[1]) + c.A2.BigInt(outputs[2]) + + return nil + }) +} + +// E6 +func inverseE6Hint(nativeMod *big.Int, nativeInputs, nativeOutputs []*big.Int) error { + return emulated.UnwrapHint(nativeInputs, nativeOutputs, + func(mod *big.Int, inputs, outputs []*big.Int) error { + var a, c bw6761.E6 + + a.B0.A0.SetBigInt(inputs[0]) + a.B0.A1.SetBigInt(inputs[1]) + a.B0.A2.SetBigInt(inputs[2]) + a.B1.A0.SetBigInt(inputs[3]) + a.B1.A1.SetBigInt(inputs[4]) + a.B1.A2.SetBigInt(inputs[5]) + + c.Inverse(&a) + + c.B0.A0.BigInt(outputs[0]) + c.B0.A1.BigInt(outputs[1]) + c.B0.A2.BigInt(outputs[2]) + c.B1.A0.BigInt(outputs[3]) + c.B1.A1.BigInt(outputs[4]) + c.B1.A2.BigInt(outputs[5]) + + return nil + }) +} + +func divE6Hint(nativeMod *big.Int, nativeInputs, nativeOutputs []*big.Int) error { + return emulated.UnwrapHint(nativeInputs, nativeOutputs, + func(mod *big.Int, inputs, outputs []*big.Int) error { + var a, b, c bw6761.E6 + + a.B0.A0.SetBigInt(inputs[0]) + a.B0.A1.SetBigInt(inputs[1]) + a.B0.A2.SetBigInt(inputs[2]) + a.B1.A0.SetBigInt(inputs[3]) + a.B1.A1.SetBigInt(inputs[4]) + a.B1.A2.SetBigInt(inputs[5]) + b.B0.A0.SetBigInt(inputs[6]) + b.B0.A1.SetBigInt(inputs[7]) + b.B0.A2.SetBigInt(inputs[8]) + b.B1.A0.SetBigInt(inputs[9]) + b.B1.A1.SetBigInt(inputs[10]) + b.B1.A2.SetBigInt(inputs[11]) + + c.Inverse(&b).Mul(&c, &a) + + c.B0.A0.BigInt(outputs[0]) + c.B0.A1.BigInt(outputs[1]) + c.B0.A2.BigInt(outputs[2]) + c.B1.A0.BigInt(outputs[3]) + c.B1.A1.BigInt(outputs[4]) + c.B1.A2.BigInt(outputs[5]) + + return nil + }) +} diff --git a/std/algebra/emulated/sw_bls12381/doc_test.go b/std/algebra/emulated/sw_bls12381/doc_test.go index a1ce0f5ca5..45b32bf24e 100644 --- a/std/algebra/emulated/sw_bls12381/doc_test.go +++ b/std/algebra/emulated/sw_bls12381/doc_test.go @@ -53,38 +53,26 @@ func ExamplePairing() { ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) if err != nil { panic(err) - } else { - fmt.Println("compiled") } pk, vk, err := groth16.Setup(ccs) if err != nil { panic(err) - } else { - fmt.Println("setup done") } secretWitness, err := frontend.NewWitness(&witness, ecc.BN254.ScalarField()) if err != nil { panic(err) - } else { - fmt.Println("secret witness") } publicWitness, err := secretWitness.Public() if err != nil { panic(err) - } else { - fmt.Println("public witness") } proof, err := groth16.Prove(ccs, pk, secretWitness) if err != nil { panic(err) - } else { - fmt.Println("proof") } err = groth16.Verify(proof, vk, publicWitness) if err != nil { panic(err) - } else { - fmt.Println("verify") } } diff --git a/std/algebra/emulated/sw_bls12381/g1.go b/std/algebra/emulated/sw_bls12381/g1.go index f7908b5ded..72ae2af86e 100644 --- a/std/algebra/emulated/sw_bls12381/g1.go +++ b/std/algebra/emulated/sw_bls12381/g1.go @@ -4,13 +4,22 @@ import ( "fmt" bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381" + fr_bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark/frontend" "github.com/consensys/gnark/std/algebra/emulated/sw_emulated" "github.com/consensys/gnark/std/math/emulated" ) +// G1Affine is the point in G1. It is an alias to the generic emulated affine +// point. type G1Affine = sw_emulated.AffinePoint[emulated.BLS12381Fp] +// Scalar is the scalar in the groups. It is an alias to the emulated element +// defined over the scalar field of the groups. +type Scalar = emulated.Element[emulated.BLS12381Fr] + +// NewG1Affine allocates a witness from the native G1 element and returns it. + func NewG1Affine(v bls12381.G1Affine) G1Affine { return G1Affine{ X: emulated.ValueOf[emulated.BLS12381Fp](v.X), @@ -43,3 +52,8 @@ func (g1 *G1) phi(q *G1Affine) *G1Affine { Y: q.Y, } } + +// NewScalar allocates a witness from the native scalar and returns it. +func NewScalar(v fr_bls12381.Element) Scalar { + return emulated.ValueOf[emulated.BLS12381Fr](v) +} diff --git a/std/algebra/emulated/sw_bls12381/pairing.go b/std/algebra/emulated/sw_bls12381/pairing.go index 9f4996d457..570d558da1 100644 --- a/std/algebra/emulated/sw_bls12381/pairing.go +++ b/std/algebra/emulated/sw_bls12381/pairing.go @@ -150,7 +150,10 @@ func (pr Pairing) finalExponentiation(e *GTEl, unsafe bool) *GTEl { // the case, the result is 1 in the torus. We assign a dummy value (1) to e.C1 // and proceed further. selector1 = pr.Ext6.IsZero(&e.C1) - e.C1 = *pr.Ext6.Select(selector1, _dummy, &e.C1) + e = &fields_bls12381.E12{ + C0: e.C0, + C1: *pr.Ext6.Select(selector1, _dummy, &e.C1), + } } // Torus compression absorbed: @@ -346,7 +349,7 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { // i = 62, separately to avoid an E12 Square // (Square(res) = 1² = 1) - // k = 0, separately to avoid MulBy034 (res × ℓ) + // k = 0, separately to avoid MulBy014 (res × ℓ) // Qacc[k] ← 3Qacc[k], // l1 the tangent ℓ to 2Q[k] @@ -358,15 +361,24 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { res.C0.B0 = *pr.MulByElement(&l1.R1, yInv[0]) res.C1.B1 = *pr.Ext2.One() // line evaluation at P[0] - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY[0]) - l2.R1 = *pr.MulByElement(&l2.R1, yInv[0]) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY[0]), + R1: *pr.MulByElement(&l2.R1, yInv[0]), + } // res = ℓ × ℓ - prodLines := *pr.Mul014By014(&l2.R1, &l2.R0, &res.C0.B0, &res.C0.B1) - res.C0.B0 = prodLines[0] - res.C0.B1 = prodLines[1] - res.C0.B2 = prodLines[2] - res.C1.B1 = prodLines[3] - res.C1.B2 = prodLines[4] + prodLines := pr.Mul014By014(&l2.R1, &l2.R0, &res.C0.B0, &res.C0.B1) + res = &fields_bls12381.E12{ + C0: fields_bls12381.E6{ + B0: *prodLines[0], + B1: *prodLines[1], + B2: *prodLines[2], + }, + C1: fields_bls12381.E6{ + B0: res.C1.B0, + B1: *prodLines[3], + B2: *prodLines[4], + }, + } for k := 1; k < n; k++ { // Qacc[k] ← 3Qacc[k], @@ -374,15 +386,19 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { // l2 the line ℓ passing 2Q[k] and Q[k] Qacc[k], l1, l2 = pr.tripleStep(Qacc[k]) // line evaluation at P[k] - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY[k]) - l1.R1 = *pr.MulByElement(&l1.R1, yInv[k]) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l1.R1, yInv[k]), + } // line evaluation at P[k] - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY[k]) - l2.R1 = *pr.MulByElement(&l2.R1, yInv[k]) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l2.R1, yInv[k]), + } // ℓ × ℓ - prodLines = *pr.Mul014By014(&l1.R1, &l1.R0, &l2.R1, &l2.R0) + prodLines = pr.Mul014By014(&l1.R1, &l1.R0, &l2.R1, &l2.R0) // (ℓ × ℓ) × res - res = pr.MulBy01245(res, &prodLines) + res = pr.MulBy01245(res, prodLines) } @@ -397,8 +413,10 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { // Qacc[k] ← 2Qacc[k] and l1 the tangent ℓ passing 2Qacc[k] Qacc[k], l1 = pr.doubleStep(Qacc[k]) // line evaluation at P[k] - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY[k]) - l1.R1 = *pr.MulByElement(&l1.R1, yInv[k]) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l1.R1, yInv[k]), + } // ℓ × res res = pr.MulBy014(res, &l1.R1, &l1.R0) } @@ -409,15 +427,19 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { // l2 the line ℓ passing (Qacc[k]+Q[k]) and Qacc[k] Qacc[k], l1, l2 = pr.doubleAndAddStep(Qacc[k], Q[k]) // line evaluation at P[k] - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY[k]) - l1.R1 = *pr.MulByElement(&l1.R1, yInv[k]) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l1.R1, yInv[k]), + } // line evaluation at P[k] - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY[k]) - l2.R1 = *pr.MulByElement(&l2.R1, yInv[k]) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l2.R1, yInv[k]), + } // ℓ × ℓ - prodLines = *pr.Mul014By014(&l1.R1, &l1.R0, &l2.R1, &l2.R0) + prodLines = pr.Mul014By014(&l1.R1, &l1.R0, &l2.R1, &l2.R0) // (ℓ × ℓ) × res - res = pr.MulBy01245(res, &prodLines) + res = pr.MulBy01245(res, prodLines) } } } @@ -428,8 +450,10 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { // l1 the tangent ℓ passing 2Qacc[k] l1 = pr.tangentCompute(Qacc[k]) // line evaluation at P[k] - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY[k]) - l1.R1 = *pr.MulByElement(&l1.R1, yInv[k]) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l1.R1, yInv[k]), + } // ℓ × res res = pr.MulBy014(res, &l1.R1, &l1.R0) } @@ -725,15 +749,24 @@ func (pr Pairing) DoubleMillerLoopFixedQ(P, T *G1Affine, Q *G2Affine) (*GTEl, er res.C0.B0 = *pr.MulByElement(&l1.R1, yInv) res.C1.B1 = *pr.Ext2.One() // line evaluation at P - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY) - l2.R1 = *pr.MulByElement(&l2.R1, yInv) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY), + R1: *pr.MulByElement(&l2.R1, yInv), + } // res = ℓ × ℓ - prodLines := *pr.Mul014By014(&l2.R1, &l2.R0, &res.C0.B0, &res.C0.B1) - res.C0.B0 = prodLines[0] - res.C0.B1 = prodLines[1] - res.C0.B2 = prodLines[2] - res.C1.B1 = prodLines[3] - res.C1.B2 = prodLines[4] + prodLines := pr.Mul014By014(&l2.R1, &l2.R0, &res.C0.B0, &res.C0.B1) + res = &fields_bls12381.E12{ + C0: fields_bls12381.E6{ + B0: *prodLines[0], + B1: *prodLines[1], + B2: *prodLines[2], + }, + C1: fields_bls12381.E6{ + B0: res.C1.B0, + B1: *prodLines[3], + B2: *prodLines[4], + }, + } res = pr.MulBy014(res, pr.MulByElement(&pr.lines[1][62], y2Inv), @@ -758,8 +791,10 @@ func (pr Pairing) DoubleMillerLoopFixedQ(P, T *G1Affine, Q *G2Affine) (*GTEl, er // Qacc ← 2Qacc and l1 the tangent ℓ passing 2Qacc Qacc, l1 = pr.doubleStep(Qacc) // line evaluation at P - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY) - l1.R1 = *pr.MulByElement(&l1.R1, yInv) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY), + R1: *pr.MulByElement(&l1.R1, yInv), + } // ℓ × res res = pr.MulBy014(res, &l1.R1, &l1.R0) } else { @@ -776,15 +811,19 @@ func (pr Pairing) DoubleMillerLoopFixedQ(P, T *G1Affine, Q *G2Affine) (*GTEl, er // l2 the line ℓ passing (Qacc+Q) and Qacc Qacc, l1, l2 = pr.doubleAndAddStep(Qacc, Q) // line evaluation at P - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY) - l1.R1 = *pr.MulByElement(&l1.R1, yInv) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY), + R1: *pr.MulByElement(&l1.R1, yInv), + } // line evaluation at P - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY) - l2.R1 = *pr.MulByElement(&l2.R1, yInv) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY), + R1: *pr.MulByElement(&l2.R1, yInv), + } // ℓ × ℓ - prodLines = *pr.Mul014By014(&l1.R1, &l1.R0, &l2.R1, &l2.R0) + prodLines = pr.Mul014By014(&l1.R1, &l1.R0, &l2.R1, &l2.R0) // (ℓ × ℓ) × res - res = pr.MulBy01245(res, &prodLines) + res = pr.MulBy01245(res, prodLines) } } @@ -794,17 +833,19 @@ func (pr Pairing) DoubleMillerLoopFixedQ(P, T *G1Affine, Q *G2Affine) (*GTEl, er // l1 the tangent ℓ passing 2Qacc l1 = pr.tangentCompute(Qacc) // line evaluation at P - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY) - l1.R1 = *pr.MulByElement(&l1.R1, yInv) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY), + R1: *pr.MulByElement(&l1.R1, yInv), + } // ℓ × ℓ - prodLines = *pr.Mul014By014( + prodLines = pr.Mul014By014( &l1.R1, &l1.R0, pr.MulByElement(&pr.lines[1][0], y2Inv), pr.MulByElement(&pr.lines[0][0], x2OverY2), ) // (ℓ × ℓ) × res - res = pr.MulBy01245(res, &prodLines) + res = pr.MulBy01245(res, prodLines) // negative x₀ res = pr.Ext12.Conjugate(res) diff --git a/std/algebra/emulated/sw_bn254/doc_test.go b/std/algebra/emulated/sw_bn254/doc_test.go index 7d8ef6a6cd..36f7445dac 100644 --- a/std/algebra/emulated/sw_bn254/doc_test.go +++ b/std/algebra/emulated/sw_bn254/doc_test.go @@ -53,38 +53,26 @@ func ExamplePairing() { ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) if err != nil { panic(err) - } else { - fmt.Println("compiled") } pk, vk, err := groth16.Setup(ccs) if err != nil { panic(err) - } else { - fmt.Println("setup done") } secretWitness, err := frontend.NewWitness(&witness, ecc.BN254.ScalarField()) if err != nil { panic(err) - } else { - fmt.Println("secret witness") } publicWitness, err := secretWitness.Public() if err != nil { panic(err) - } else { - fmt.Println("public witness") } proof, err := groth16.Prove(ccs, pk, secretWitness) if err != nil { panic(err) - } else { - fmt.Println("proof") } err = groth16.Verify(proof, vk, publicWitness) if err != nil { panic(err) - } else { - fmt.Println("verify") } } diff --git a/std/algebra/emulated/sw_bn254/g1.go b/std/algebra/emulated/sw_bn254/g1.go index 69ce54898c..1d528cb404 100644 --- a/std/algebra/emulated/sw_bn254/g1.go +++ b/std/algebra/emulated/sw_bn254/g1.go @@ -2,15 +2,28 @@ package sw_bn254 import ( "github.com/consensys/gnark-crypto/ecc/bn254" + fr_bn254 "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark/std/algebra/emulated/sw_emulated" "github.com/consensys/gnark/std/math/emulated" ) +// G1Affine is the point in G1. It is an alias to the generic emulated affine +// point. type G1Affine = sw_emulated.AffinePoint[emulated.BN254Fp] +// Scalar is the scalar in the groups. It is an alias to the emulated element +// defined over the scalar field of the groups. +type Scalar = emulated.Element[emulated.BN254Fr] + +// NewG1Affine allocates a witness from the native G1 element and returns it. func NewG1Affine(v bn254.G1Affine) G1Affine { return G1Affine{ X: emulated.ValueOf[emulated.BN254Fp](v.X), Y: emulated.ValueOf[emulated.BN254Fp](v.Y), } } + +// NewScalar allocates a witness from the native scalar and returns it. +func NewScalar(v fr_bn254.Element) Scalar { + return emulated.ValueOf[emulated.BN254Fr](v) +} diff --git a/std/algebra/emulated/sw_bn254/pairing.go b/std/algebra/emulated/sw_bn254/pairing.go index a11fb80bb8..11b9ba7d75 100644 --- a/std/algebra/emulated/sw_bn254/pairing.go +++ b/std/algebra/emulated/sw_bn254/pairing.go @@ -151,7 +151,10 @@ func (pr Pairing) finalExponentiation(e *GTEl, unsafe bool) *GTEl { // the case, the result is 1 in the torus. We assign a dummy value (1) to e.C1 // and proceed further. selector1 = pr.Ext6.IsZero(&e.C1) - e.C1 = *pr.Ext6.Select(selector1, _dummy, &e.C1) + e = &fields_bn254.E12{ + C0: e.C0, + C1: *pr.Ext6.Select(selector1, _dummy, &e.C1), + } } // Torus compression absorbed: @@ -329,7 +332,7 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { } res := pr.Ext12.One() - var prodLines [5]fields_bn254.E2 + var prodLines [5]*fields_bn254.E2 var l1, l2 *lineEvaluation Qacc := make([]*G2Affine, n) @@ -358,8 +361,14 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { // (assign line to res) Qacc[0], l1 = pr.doubleStep(Qacc[0]) // line evaluation at P[0] - res.C1.B0 = *pr.MulByElement(&l1.R0, xNegOverY[0]) - res.C1.B1 = *pr.MulByElement(&l1.R1, yInv[0]) + res = &fields_bn254.E12{ + C0: res.C0, + C1: fields_bn254.E6{ + B0: *pr.MulByElement(&l1.R0, xNegOverY[0]), + B1: *pr.MulByElement(&l1.R1, yInv[0]), + B2: res.C1.B2, + }, + } if n >= 2 { // k = 1, separately to avoid MulBy034 (res × ℓ) @@ -367,16 +376,25 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { Qacc[1], l1 = pr.doubleStep(Qacc[1]) // line evaluation at P[1] - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY[1]) - l1.R1 = *pr.MulByElement(&l1.R1, yInv[1]) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY[1]), + R1: *pr.MulByElement(&l1.R1, yInv[1]), + } // ℓ × res - prodLines = *pr.Mul034By034(&l1.R0, &l1.R1, &res.C1.B0, &res.C1.B1) - res.C0.B0 = prodLines[0] - res.C0.B1 = prodLines[1] - res.C0.B2 = prodLines[2] - res.C1.B0 = prodLines[3] - res.C1.B1 = prodLines[4] + prodLines = pr.Mul034By034(&l1.R0, &l1.R1, &res.C1.B0, &res.C1.B1) + res = &fields_bn254.E12{ + C0: fields_bn254.E6{ + B0: *prodLines[0], + B1: *prodLines[1], + B2: *prodLines[2], + }, + C1: fields_bn254.E6{ + B0: *prodLines[3], + B1: *prodLines[4], + B2: res.C1.B2, + }, + } } if n >= 3 { @@ -385,11 +403,13 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { Qacc[2], l1 = pr.doubleStep(Qacc[2]) // line evaluation at P[1] - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY[2]) - l1.R1 = *pr.MulByElement(&l1.R1, yInv[2]) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY[2]), + R1: *pr.MulByElement(&l1.R1, yInv[2]), + } // ℓ × res - res = pr.Mul01234By034(&prodLines, &l1.R0, &l1.R1) + res = pr.Mul01234By034(prodLines, &l1.R0, &l1.R1) // k >= 3 for k := 3; k < n; k++ { @@ -397,8 +417,10 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { Qacc[k], l1 = pr.doubleStep(Qacc[k]) // line evaluation at P[k] - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY[k]) - l1.R1 = *pr.MulByElement(&l1.R1, yInv[k]) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l1.R1, yInv[k]), + } // ℓ × res res = pr.MulBy034(res, &l1.R0, &l1.R1) @@ -420,21 +442,25 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { l2 = pr.lineCompute(Qacc[k], QNeg[k]) // line evaluation at P[k] - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY[k]) - l2.R1 = *pr.MulByElement(&l2.R1, yInv[k]) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l2.R1, yInv[k]), + } // Qacc[k] ← Qacc[k]+Q[k] and // l1 the line ℓ passing Qacc[k] and Q[k] Qacc[k], l1 = pr.addStep(Qacc[k], Q[k]) // line evaluation at P[k] - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY[k]) - l1.R1 = *pr.MulByElement(&l1.R1, yInv[k]) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l1.R1, yInv[k]), + } // ℓ × ℓ - prodLines = *pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) + prodLines = pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) } l1s := make([]*lineEvaluation, n) @@ -452,8 +478,10 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { Qacc[k], l1s[k] = pr.doubleStep(Qacc[k]) // line evaluation at P[k] - l1s[k].R0 = *pr.MulByElement(&l1s[k].R0, xNegOverY[k]) - l1s[k].R1 = *pr.MulByElement(&l1s[k].R1, yInv[k]) + l1s[k] = &lineEvaluation{ + R0: *pr.MulByElement(&l1s[k].R0, xNegOverY[k]), + R1: *pr.MulByElement(&l1s[k].R1, yInv[k]), + } } @@ -468,9 +496,9 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { // mul lines 2-by-2 for k := 1; k < n; k += 2 { // ℓ × ℓ - prodLines = *pr.Mul034By034(&l1s[k].R0, &l1s[k].R1, &l1s[k-1].R0, &l1s[k-1].R1) + prodLines = pr.Mul034By034(&l1s[k].R0, &l1s[k].R1, &l1s[k-1].R0, &l1s[k-1].R1) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) } @@ -482,17 +510,21 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { Qacc[k], l1, l2 = pr.doubleAndAddStep(Qacc[k], Q[k]) // line evaluation at P[k] - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY[k]) - l1.R1 = *pr.MulByElement(&l1.R1, yInv[k]) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l1.R1, yInv[k]), + } // line evaluation at P[k] - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY[k]) - l2.R1 = *pr.MulByElement(&l2.R1, yInv[k]) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l2.R1, yInv[k]), + } // ℓ × ℓ - prodLines = *pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) + prodLines = pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) } @@ -504,17 +536,21 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { Qacc[k], l1, l2 = pr.doubleAndAddStep(Qacc[k], QNeg[k]) // line evaluation at P[k] - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY[k]) - l1.R1 = *pr.MulByElement(&l1.R1, yInv[k]) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l1.R1, yInv[k]), + } // line evaluation at P[k] - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY[k]) - l2.R1 = *pr.MulByElement(&l2.R1, yInv[k]) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l2.R1, yInv[k]), + } // ℓ × ℓ - prodLines = *pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) + prodLines = pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) } @@ -524,37 +560,49 @@ func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { } // Compute ∏ᵢ { ℓᵢ_{[6x₀+2]Q,π(Q)}(P) · ℓᵢ_{[6x₀+2]Q+π(Q),-π²(Q)}(P) } - Q1, Q2 := new(G2Affine), new(G2Affine) + var Q1, Q2 *G2Affine for k := 0; k < n; k++ { //Q1 = π(Q) - Q1.X = *pr.Ext2.Conjugate(&Q[k].X) - Q1.X = *pr.Ext2.MulByNonResidue1Power2(&Q1.X) - Q1.Y = *pr.Ext2.Conjugate(&Q[k].Y) - Q1.Y = *pr.Ext2.MulByNonResidue1Power3(&Q1.Y) + Q1X := pr.Ext2.Conjugate(&Q[k].X) + Q1X = pr.Ext2.MulByNonResidue1Power2(Q1X) + Q1Y := pr.Ext2.Conjugate(&Q[k].Y) + Q1Y = pr.Ext2.MulByNonResidue1Power3(Q1Y) + Q1 = &G2Affine{ + X: *Q1X, + Y: *Q1Y, + } // Q2 = -π²(Q) - Q2.X = *pr.Ext2.MulByNonResidue2Power2(&Q[k].X) - Q2.Y = *pr.Ext2.MulByNonResidue2Power3(&Q[k].Y) - Q2.Y = *pr.Ext2.Neg(&Q2.Y) + + Q2Y := pr.Ext2.MulByNonResidue2Power3(&Q[k].Y) + Q2Y = pr.Ext2.Neg(Q2Y) + Q2 = &G2Affine{ + X: *pr.Ext2.MulByNonResidue2Power2(&Q[k].X), + Y: *Q2Y, + } // Qacc[k] ← Qacc[k]+π(Q) and // l1 the line passing Qacc[k] and π(Q) Qacc[k], l1 = pr.addStep(Qacc[k], Q1) // line evaluation at P[k] - l1.R0 = *pr.Ext2.MulByElement(&l1.R0, xNegOverY[k]) - l1.R1 = *pr.Ext2.MulByElement(&l1.R1, yInv[k]) + l1 = &lineEvaluation{ + R0: *pr.Ext2.MulByElement(&l1.R0, xNegOverY[k]), + R1: *pr.Ext2.MulByElement(&l1.R1, yInv[k]), + } // l2 the line passing Qacc[k] and -π²(Q) l2 = pr.lineCompute(Qacc[k], Q2) // line evaluation at P[k] - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY[k]) - l2.R1 = *pr.MulByElement(&l2.R1, yInv[k]) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY[k]), + R1: *pr.MulByElement(&l2.R1, yInv[k]), + } // ℓ × ℓ - prodLines = *pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) + prodLines = pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) } @@ -752,14 +800,14 @@ func (pr Pairing) MillerLoopFixedQ(P *G1Affine) (*GTEl, error) { res = pr.Square034(res) // lines evaluations at P // and ℓ × ℓ - prodLines := *pr.Mul034By034( + prodLines := pr.Mul034By034( pr.MulByElement(&pr.lines[0][63], xOverY), pr.MulByElement(&pr.lines[1][63], yInv), pr.MulByElement(&pr.lines[2][63], xOverY), pr.MulByElement(&pr.lines[3][63], yInv), ) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) for i := 62; i >= 0; i-- { res = pr.Square(res) @@ -775,14 +823,14 @@ func (pr Pairing) MillerLoopFixedQ(P *G1Affine) (*GTEl, error) { } else { // lines evaluations at P // and ℓ × ℓ - prodLines := *pr.Mul034By034( + prodLines := pr.Mul034By034( pr.MulByElement(&pr.lines[0][i], xOverY), pr.MulByElement(&pr.lines[1][i], yInv), pr.MulByElement(&pr.lines[2][i], xOverY), pr.MulByElement(&pr.lines[3][i], yInv), ) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) } } @@ -790,14 +838,14 @@ func (pr Pairing) MillerLoopFixedQ(P *G1Affine) (*GTEl, error) { // Compute ℓ_{[6x₀+2]Q,π(Q)}(P) · ℓ_{[6x₀+2]Q+π(Q),-π²(Q)}(P) // lines evaluations at P // and ℓ × ℓ - prodLines = *pr.Mul034By034( + prodLines = pr.Mul034By034( pr.MulByElement(&pr.lines[0][65], xOverY), pr.MulByElement(&pr.lines[1][65], yInv), pr.MulByElement(&pr.lines[0][66], xOverY), pr.MulByElement(&pr.lines[1][66], yInv), ) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) return res, nil } @@ -807,7 +855,7 @@ func (pr Pairing) MillerLoopFixedQ(P *G1Affine) (*GTEl, error) { func (pr Pairing) DoubleMillerLoopFixedQ(P, T *G1Affine, Q *G2Affine) (*GTEl, error) { res := pr.Ext12.One() - var prodLines [5]fields_bn254.E2 + var prodLines [5]*fields_bn254.E2 var l1, l2 *lineEvaluation var Qacc, QNeg *G2Affine Qacc = Q @@ -827,18 +875,20 @@ func (pr Pairing) DoubleMillerLoopFixedQ(P, T *G1Affine, Q *G2Affine) (*GTEl, er Qacc, l1 = pr.doubleStep(Qacc) // line evaluation at P - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY) - l1.R1 = *pr.MulByElement(&l1.R1, yInv) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY), + R1: *pr.MulByElement(&l1.R1, yInv), + } // precomputed-ℓ × ℓ - prodLines = *pr.Mul034By034( + prodLines = pr.Mul034By034( &l1.R0, &l1.R1, pr.MulByElement(&pr.lines[0][64], x2OverY2), pr.MulByElement(&pr.lines[1][64], y2Inv), ) // (precomputed-ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) // i = 63, separately to avoid a doubleStep // (at this point Qacc = 2Q, so 2Qacc-Q=3Q is equivalent to Qacc+Q=3Q @@ -848,31 +898,35 @@ func (pr Pairing) DoubleMillerLoopFixedQ(P, T *G1Affine, Q *G2Affine) (*GTEl, er l2 = pr.lineCompute(Qacc, QNeg) // line evaluation at P - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY) - l2.R1 = *pr.MulByElement(&l2.R1, yInv) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY), + R1: *pr.MulByElement(&l2.R1, yInv), + } // Qacc ← Qacc+Q and // l1 the line ℓ passing Qacc and Q Qacc, l1 = pr.addStep(Qacc, Q) // line evaluation at P - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY) - l1.R1 = *pr.MulByElement(&l1.R1, yInv) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY), + R1: *pr.MulByElement(&l1.R1, yInv), + } // ℓ × ℓ - prodLines = *pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) + prodLines = pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) // precomputed-ℓ × precomputed-ℓ - prodLines = *pr.Mul034By034( + prodLines = pr.Mul034By034( pr.MulByElement(&pr.lines[0][63], x2OverY2), pr.MulByElement(&pr.lines[1][63], y2Inv), pr.MulByElement(&pr.lines[2][63], x2OverY2), pr.MulByElement(&pr.lines[3][63], y2Inv), ) // (precomputed-ℓ × precomputed-ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) // Compute ∏ᵢ { fᵢ_{6x₀+2,Q}(P) } for i := 62; i >= 0; i-- { @@ -887,29 +941,31 @@ func (pr Pairing) DoubleMillerLoopFixedQ(P, T *G1Affine, Q *G2Affine) (*GTEl, er Qacc, l1 = pr.doubleStep(Qacc) // line evaluation at P - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY) - l1.R1 = *pr.MulByElement(&l1.R1, yInv) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY), + R1: *pr.MulByElement(&l1.R1, yInv), + } // precomputed-ℓ × ℓ - prodLines = *pr.Mul034By034( + prodLines = pr.Mul034By034( &l1.R0, &l1.R1, pr.MulByElement(&pr.lines[0][i], x2OverY2), pr.MulByElement(&pr.lines[1][i], y2Inv), ) // (precomputed-ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) case 1: // precomputed-ℓ × precomputed-ℓ - prodLines = *pr.Mul034By034( + prodLines = pr.Mul034By034( pr.MulByElement(&pr.lines[0][i], x2OverY2), pr.MulByElement(&pr.lines[1][i], y2Inv), pr.MulByElement(&pr.lines[2][i], x2OverY2), pr.MulByElement(&pr.lines[3][i], y2Inv), ) // (precomputed-ℓ × precomputed-ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) // Qacc ← 2Qacc+Q, // l1 the line ℓ passing Qacc and Q @@ -917,28 +973,32 @@ func (pr Pairing) DoubleMillerLoopFixedQ(P, T *G1Affine, Q *G2Affine) (*GTEl, er Qacc, l1, l2 = pr.doubleAndAddStep(Qacc, Q) // line evaluation at P - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY) - l1.R1 = *pr.MulByElement(&l1.R1, yInv) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY), + R1: *pr.MulByElement(&l1.R1, yInv), + } // line evaluation at P - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY) - l2.R1 = *pr.MulByElement(&l2.R1, yInv) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY), + R1: *pr.MulByElement(&l2.R1, yInv), + } // ℓ × ℓ - prodLines = *pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) + prodLines = pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) case -1: // precomputed-ℓ × precomputed-ℓ - prodLines = *pr.Mul034By034( + prodLines = pr.Mul034By034( pr.MulByElement(&pr.lines[0][i], x2OverY2), pr.MulByElement(&pr.lines[1][i], y2Inv), pr.MulByElement(&pr.lines[2][i], x2OverY2), pr.MulByElement(&pr.lines[3][i], y2Inv), ) // (precomputed-ℓ × precomputed-ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) // Qacc ← 2Qacc-Q, // l1 the line ℓ passing Qacc and -Q @@ -946,17 +1006,21 @@ func (pr Pairing) DoubleMillerLoopFixedQ(P, T *G1Affine, Q *G2Affine) (*GTEl, er Qacc, l1, l2 = pr.doubleAndAddStep(Qacc, QNeg) // line evaluation at P - l1.R0 = *pr.MulByElement(&l1.R0, xNegOverY) - l1.R1 = *pr.MulByElement(&l1.R1, yInv) + l1 = &lineEvaluation{ + R0: *pr.MulByElement(&l1.R0, xNegOverY), + R1: *pr.MulByElement(&l1.R1, yInv), + } // line evaluation at P - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY) - l2.R1 = *pr.MulByElement(&l2.R1, yInv) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY), + R1: *pr.MulByElement(&l2.R1, yInv), + } // ℓ × ℓ - prodLines = *pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) + prodLines = pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) default: return nil, errors.New("invalid loopCounter") @@ -964,46 +1028,54 @@ func (pr Pairing) DoubleMillerLoopFixedQ(P, T *G1Affine, Q *G2Affine) (*GTEl, er } // Compute ∏ᵢ { ℓᵢ_{[6x₀+2]Q,π(Q)}(P) · ℓᵢ_{[6x₀+2]Q+π(Q),-π²(Q)}(P) } - Q1, Q2 := new(G2Affine), new(G2Affine) + var Q1, Q2 *G2Affine //Q1 = π(Q) - Q1.X = *pr.Ext2.Conjugate(&Q.X) - Q1.X = *pr.Ext2.MulByNonResidue1Power2(&Q1.X) - Q1.Y = *pr.Ext2.Conjugate(&Q.Y) - Q1.Y = *pr.Ext2.MulByNonResidue1Power3(&Q1.Y) + Q1X := pr.Ext2.Conjugate(&Q.X) + Q1Y := pr.Ext2.Conjugate(&Q.Y) + Q1 = &G2Affine{ + X: *pr.Ext2.MulByNonResidue1Power2(Q1X), + Y: *pr.Ext2.MulByNonResidue1Power3(Q1Y), + } // Q2 = -π²(Q) - Q2.X = *pr.Ext2.MulByNonResidue2Power2(&Q.X) - Q2.Y = *pr.Ext2.MulByNonResidue2Power3(&Q.Y) - Q2.Y = *pr.Ext2.Neg(&Q2.Y) + Q2Y := pr.Ext2.MulByNonResidue2Power3(&Q.Y) + Q2 = &G2Affine{ + X: *pr.Ext2.MulByNonResidue2Power2(&Q.X), + Y: *pr.Ext2.Neg(Q2Y), + } // Qacc ← Qacc+π(Q) and // l1 the line passing Qacc and π(Q) Qacc, l1 = pr.addStep(Qacc, Q1) // line evaluation at P - l1.R0 = *pr.Ext2.MulByElement(&l1.R0, xNegOverY) - l1.R1 = *pr.Ext2.MulByElement(&l1.R1, yInv) + l1 = &lineEvaluation{ + R0: *pr.Ext2.MulByElement(&l1.R0, xNegOverY), + R1: *pr.Ext2.MulByElement(&l1.R1, yInv), + } // l2 the line passing Qacc and -π²(Q) l2 = pr.lineCompute(Qacc, Q2) // line evaluation at P - l2.R0 = *pr.MulByElement(&l2.R0, xNegOverY) - l2.R1 = *pr.MulByElement(&l2.R1, yInv) + l2 = &lineEvaluation{ + R0: *pr.MulByElement(&l2.R0, xNegOverY), + R1: *pr.MulByElement(&l2.R1, yInv), + } // ℓ × ℓ - prodLines = *pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) + prodLines = pr.Mul034By034(&l1.R0, &l1.R1, &l2.R0, &l2.R1) // (ℓ × ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) // precomputed-ℓ × precomputed-ℓ - prodLines = *pr.Mul034By034( + prodLines = pr.Mul034By034( pr.MulByElement(&pr.lines[0][65], x2OverY2), pr.MulByElement(&pr.lines[1][65], y2Inv), pr.MulByElement(&pr.lines[0][66], x2OverY2), pr.MulByElement(&pr.lines[1][66], y2Inv), ) // (precomputed-ℓ × precomputed-ℓ) × res - res = pr.MulBy01234(res, &prodLines) + res = pr.MulBy01234(res, prodLines) return res, nil } diff --git a/std/algebra/emulated/sw_bw6761/doc.go b/std/algebra/emulated/sw_bw6761/doc.go new file mode 100644 index 0000000000..e608cc7d1c --- /dev/null +++ b/std/algebra/emulated/sw_bw6761/doc.go @@ -0,0 +1,7 @@ +// Package sw_bw6761 implements G1 and G2 arithmetics and pairing computation over BW6-761 curve. +// +// The implementation follows [Housni22]: "Pairings in Rank-1 Constraint Systems" and [BW6-761-hackmd]. +// +// [Housni22]: https://eprint.iacr.org/2022/1162 +// [BW6-761-hackmd]: https://hackmd.io/@gnark/BW6-761-changes +package sw_bw6761 diff --git a/std/algebra/emulated/sw_bw6761/doc_test.go b/std/algebra/emulated/sw_bw6761/doc_test.go new file mode 100644 index 0000000000..e1bddcbd38 --- /dev/null +++ b/std/algebra/emulated/sw_bw6761/doc_test.go @@ -0,0 +1,91 @@ +package sw_bw6761_test + +import ( + "crypto/rand" + "fmt" + + "github.com/consensys/gnark-crypto/ecc" + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/consensys/gnark/std/algebra/emulated/sw_bw6761" +) + +type PairCircuit struct { + InG1 sw_bw6761.G1Affine + InG2 sw_bw6761.G2Affine + Res sw_bw6761.GTEl +} + +func (c *PairCircuit) Define(api frontend.API) error { + pairing, err := sw_bw6761.NewPairing(api) + if err != nil { + return fmt.Errorf("new pairing: %w", err) + } + // Pair method does not check that the points are in the proper groups. + // Compute the pairing + res, err := pairing.Pair([]*sw_bw6761.G1Affine{&c.InG1}, []*sw_bw6761.G2Affine{&c.InG2}) + if err != nil { + return fmt.Errorf("pair: %w", err) + } + pairing.AssertIsEqual(res, &c.Res) + return nil +} + +func ExamplePairing() { + p, q, err := randomG1G2Affines() + if err != nil { + panic(err) + } + res, err := bw6761.Pair([]bw6761.G1Affine{p}, []bw6761.G2Affine{q}) + if err != nil { + panic(err) + } + circuit := PairCircuit{} + witness := PairCircuit{ + InG1: sw_bw6761.NewG1Affine(p), + InG2: sw_bw6761.NewG2Affine(q), + Res: sw_bw6761.NewGTEl(res), + } + ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) + if err != nil { + panic(err) + } + pk, vk, err := groth16.Setup(ccs) + if err != nil { + panic(err) + } + secretWitness, err := frontend.NewWitness(&witness, ecc.BN254.ScalarField()) + if err != nil { + panic(err) + } + publicWitness, err := secretWitness.Public() + if err != nil { + panic(err) + } + proof, err := groth16.Prove(ccs, pk, secretWitness) + if err != nil { + panic(err) + } + err = groth16.Verify(proof, vk, publicWitness) + if err != nil { + panic(err) + } +} + +func randomG1G2Affines() (p bw6761.G1Affine, q bw6761.G2Affine, err error) { + _, _, G1AffGen, G2AffGen := bw6761.Generators() + mod := bw6761.ID.ScalarField() + s1, err := rand.Int(rand.Reader, mod) + if err != nil { + return p, q, err + } + s2, err := rand.Int(rand.Reader, mod) + if err != nil { + return p, q, err + } + p.ScalarMultiplication(&G1AffGen, s1) + q.ScalarMultiplication(&G2AffGen, s2) + return +} diff --git a/std/algebra/emulated/sw_bw6761/g1.go b/std/algebra/emulated/sw_bw6761/g1.go new file mode 100644 index 0000000000..4c5f5a573f --- /dev/null +++ b/std/algebra/emulated/sw_bw6761/g1.go @@ -0,0 +1,29 @@ +package sw_bw6761 + +import ( + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + fr_bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark/std/algebra/emulated/sw_emulated" + "github.com/consensys/gnark/std/math/emulated" +) + +// G1Affine is the point in G1. It is an alias to the generic emulated affine +// point. +type G1Affine = sw_emulated.AffinePoint[emulated.BW6761Fp] + +// Scalar is the scalar in the groups. It is an alias to the emulated element +// defined over the scalar field of the groups. +type Scalar = emulated.Element[emulated.BW6761Fr] + +// NewG1Affine allocates a witness from the native G1 element and returns it. +func NewG1Affine(v bw6761.G1Affine) G1Affine { + return G1Affine{ + X: emulated.ValueOf[emulated.BW6761Fp](v.X), + Y: emulated.ValueOf[emulated.BW6761Fp](v.Y), + } +} + +// NewScalar allocates a witness from the native scalar and returns it. +func NewScalar(v fr_bw6761.Element) Scalar { + return emulated.ValueOf[emulated.BW6761Fr](v) +} diff --git a/std/algebra/emulated/sw_bw6761/g2.go b/std/algebra/emulated/sw_bw6761/g2.go new file mode 100644 index 0000000000..8de701405a --- /dev/null +++ b/std/algebra/emulated/sw_bw6761/g2.go @@ -0,0 +1,16 @@ +package sw_bw6761 + +import ( + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark/std/algebra/emulated/sw_emulated" + "github.com/consensys/gnark/std/math/emulated" +) + +type G2Affine = sw_emulated.AffinePoint[emulated.BW6761Fp] + +func NewG2Affine(v bw6761.G2Affine) G2Affine { + return G2Affine{ + X: emulated.ValueOf[emulated.BW6761Fp](v.X), + Y: emulated.ValueOf[emulated.BW6761Fp](v.Y), + } +} diff --git a/std/algebra/emulated/sw_bw6761/pairing.go b/std/algebra/emulated/sw_bw6761/pairing.go new file mode 100644 index 0000000000..bff75e7551 --- /dev/null +++ b/std/algebra/emulated/sw_bw6761/pairing.go @@ -0,0 +1,771 @@ +package sw_bw6761 + +import ( + "errors" + "fmt" + "math/big" + + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/std/algebra/emulated/fields_bw6761" + "github.com/consensys/gnark/std/math/emulated" +) + +type Pairing struct { + api frontend.API + *fields_bw6761.Ext6 + curveF *emulated.Field[emulated.BW6761Fp] + lines [4][189]emulated.Element[emulated.BW6761Fp] +} + +type GTEl = fields_bw6761.E6 + +func NewGTEl(v bw6761.GT) GTEl { + return GTEl{ + B0: fields_bw6761.E3{ + A0: emulated.ValueOf[emulated.BW6761Fp](v.B0.A0), + A1: emulated.ValueOf[emulated.BW6761Fp](v.B0.A1), + A2: emulated.ValueOf[emulated.BW6761Fp](v.B0.A2), + }, + B1: fields_bw6761.E3{ + A0: emulated.ValueOf[emulated.BW6761Fp](v.B1.A0), + A1: emulated.ValueOf[emulated.BW6761Fp](v.B1.A1), + A2: emulated.ValueOf[emulated.BW6761Fp](v.B1.A2), + }, + } +} + +func NewPairing(api frontend.API) (*Pairing, error) { + ba, err := emulated.NewField[emulated.BW6761Fp](api) + if err != nil { + return nil, fmt.Errorf("new base api: %w", err) + } + return &Pairing{ + api: api, + Ext6: fields_bw6761.NewExt6(api), + curveF: ba, + lines: getPrecomputedLines(), + }, nil +} + +// FinalExponentiation computes the exponentiation zᵈ where +// +// d = (p⁶-1)/r = (p⁶-1)/Φ₆(p) ⋅ Φ₆(p)/r = (p³-1)(p+1)(p²-p+1)/r +// +// we use instead d = s⋅(p³-1)(p+1)(p²-p+1)/r +// where s is the cofactor (x₀+1) +func (pr Pairing) FinalExponentiation(z *GTEl) *GTEl { + + z = pr.Reduce(z) + result := pr.Copy(z) + + // 1. Easy part + // (p³-1)(p+1) + buf := pr.Conjugate(result) + buf = pr.DivUnchecked(buf, result) + result = pr.Frobenius(buf) + result = pr.Mul(result, buf) + + // 2. Hard part (up to permutation) + // (x₀+1)(p²-p+1)/r + // Algorithm 4.4 from https://yelhousni.github.io/phd.pdf + a := pr.ExpX0Minus1Square(result) + a = pr.Mul(a, pr.Frobenius(result)) + b := pr.ExpX0Plus1(a) + b = pr.Mul(b, pr.Conjugate(result)) + t := pr.CyclotomicSquare(a) + a = pr.Mul(a, t) + c := pr.ExptMinus1Div3(b) + d := pr.ExpX0Minus1(c) + e := pr.ExpX0Minus1Square(d) + e = pr.Mul(e, d) + d = pr.Conjugate(d) + f := pr.Mul(d, b) + g := pr.ExpX0Plus1(e) + g = pr.Mul(g, f) + h := pr.Mul(g, c) + i := pr.Mul(g, d) + i = pr.ExpX0Plus1(i) + i = pr.Mul(i, pr.Conjugate(f)) + j := pr.ExpC1(h) + j = pr.Mul(j, e) + k := pr.CyclotomicSquare(j) + k = pr.Mul(k, j) + k = pr.Mul(k, b) + t = pr.ExpC2(i) + k = pr.Mul(k, t) + result = pr.Mul(a, k) + + return result +} + +// lineEvaluation represents a sparse Fp6 Elmt (result of the line evaluation) +// line: 1 + R0(x/y) + R1(1/y) = 0 instead of R0'*y + R1'*x + R2' = 0 This +// makes the multiplication by lines (MulBy014) +type lineEvaluation struct { + R0, R1 emulated.Element[emulated.BW6761Fp] +} + +// Pair calculates the reduced pairing for a set of points +// ∏ᵢ e(Pᵢ, Qᵢ). +// +// This function doesn't check that the inputs are in the correct subgroup. See IsInSubGroup. +func (pr Pairing) Pair(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { + f, err := pr.MillerLoop(P, Q) + if err != nil { + return nil, err + } + return pr.FinalExponentiation(f), nil +} + +// PairingCheck calculates the reduced pairing for a set of points and asserts if the result is One +// ∏ᵢ e(Pᵢ, Qᵢ) =? 1 +// +// This function doesn't check that the inputs are in the correct subgroups. +func (pr Pairing) PairingCheck(P []*G1Affine, Q []*G2Affine) error { + f, err := pr.Pair(P, Q) + if err != nil { + return err + + } + one := pr.One() + pr.AssertIsEqual(f, one) + + return nil +} + +func (pr Pairing) AssertIsEqual(x, y *GTEl) { + pr.Ext6.AssertIsEqual(x, y) +} + +// seed x₀=9586122913090633729 +// +// x₀+1 in binary (64 bits) padded with 0s +var loopCounter1 = [190]int8{ + 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 1, 0, + 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +} + +// x₀³-x₀²-x₀ in 2-NAF +var loopCounter2 = [190]int8{ + -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, -1, + 0, 1, 0, -1, 0, 0, 0, 0, -1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, + 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 0, 0, 0, 0, -1, 0, 0, + 1, 0, 0, 0, -1, 0, 0, -1, 0, 1, 0, -1, 0, 0, 0, 1, 0, 0, 1, 0, -1, 0, 1, 0, + 1, 0, 0, 0, 1, 0, -1, 0, -1, 0, 0, 0, 0, 0, 1, 0, 0, 1, +} + +// thirdRootOne² + thirdRootOne + 1 = 0 in BW6761Fp +var thirdRootOne = emulated.ValueOf[emulated.BW6761Fp]("1968985824090209297278610739700577151397666382303825728450741611566800370218827257750865013421937292370006175842381275743914023380727582819905021229583192207421122272650305267822868639090213645505120388400344940985710520836292650") + +// MillerLoop computes the optimal Tate multi-Miller loop +// (or twisted ate or Eta revisited) +// +// ∏ᵢ { fᵢ_{x₀+1+λ(x₀³-x₀²-x₀),Qᵢ}(Pᵢ) } +// +// Alg.2 in https://eprint.iacr.org/2021/1359.pdf +// Eq. (6') in https://hackmd.io/@gnark/BW6-761-changes +func (pr Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GTEl, error) { + + // check input size match + n := len(P) + if n == 0 || n != len(Q) { + return nil, errors.New("invalid inputs sizes") + } + + // precomputations + negQ := make([]*G2Affine, n) + imQ := make([]*G2Affine, n) + imQneg := make([]*G2Affine, n) + accQ := make([]*G2Affine, n) + yInv := make([]*emulated.Element[emulated.BW6761Fp], n) + xNegOverY := make([]*emulated.Element[emulated.BW6761Fp], n) + + for k := 0; k < n; k++ { + // P and Q are supposed to be on G1 and G2 respectively of prime order r. + // The point (x,0) is of order 2. But this function does not check + // subgroup membership. + yInv[k] = pr.curveF.Inverse(&P[k].Y) + xNegOverY[k] = pr.curveF.MulMod(&P[k].X, yInv[k]) + xNegOverY[k] = pr.curveF.Neg(xNegOverY[k]) + // negQ = -Q = (x, -y) + negQ[k] = &G1Affine{X: Q[k].X, Y: *pr.curveF.Neg(&Q[k].Y)} + // imQ = (w*x, -y) + imQ[k] = &G1Affine{X: *pr.curveF.MulMod(&Q[k].X, &thirdRootOne), Y: negQ[k].Y} + // imQneg = (w*x, y) + imQneg[k] = &G1Affine{X: imQ[k].X, Y: Q[k].Y} + // point accumulator initialized to imQ + accQ[k] = imQ[k] + } + + // f_{x₀+1+λ(x₀³-x₀²-x₀),Q}(P) + result := pr.Ext6.One() + var l0, l1 *lineEvaluation + + var prodLines [5]*emulated.Element[emulated.BW6761Fp] + // i = 188, separately to avoid an E6 Square + // (Square(res) = 1² = 1) + // k = 0, separately to avoid MulBy014 (res × ℓ) + // (assign line to res) + accQ[0], l0 = pr.doubleStep(imQ[0]) + result = &fields_bw6761.E6{ + B0: fields_bw6761.E3{ + A0: *pr.curveF.MulMod(&l0.R1, yInv[0]), + A1: *pr.curveF.MulMod(&l0.R0, xNegOverY[0]), + A2: result.B0.A2, + }, + B1: fields_bw6761.E3{ + A0: result.B1.A0, + A1: *pr.curveF.One(), + A2: result.B1.A2, + }, + } + + if n >= 2 { + // k = 1, separately to avoid MulBy014 (res × ℓ) + // (res is also a line at this point, so we use Mul014By014 ℓ × ℓ) + accQ[1], l0 = pr.doubleStep(accQ[1]) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[1]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[1]), + } + prodLines = pr.Mul014By014(&l0.R1, &l0.R0, &result.B0.A0, &result.B0.A1) + result = &fields_bw6761.E6{ + B0: fields_bw6761.E3{ + A0: *prodLines[0], + A1: *prodLines[1], + A2: *prodLines[2], + }, + B1: fields_bw6761.E3{ + A0: result.B1.A0, + A1: *prodLines[3], + A2: *prodLines[4], + }, + } + } + + if n >= 3 { + // k = 2, separately to avoid MulBy014 (res × ℓ) + // (res has a zero E2 element, so we use Mul01234By034) + accQ[2], l0 = pr.doubleStep(accQ[2]) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[2]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[2]), + } + result = pr.Mul01245By014(prodLines, &l0.R1, &l0.R0) + + // k >= 3 + for k := 3; k < n; k++ { + accQ[k], l0 = pr.doubleStep(accQ[k]) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[k]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[k]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + } + } + + for i := 187; i >= 1; i-- { + // mutualize the square among n Miller loops + // (∏ᵢfᵢ)² + result = pr.Square(result) + + j := loopCounter2[i]*3 + loopCounter1[i] + + for k := 0; k < n; k++ { + switch j { + // cases -4, -2, 2 and 4 are omitted as they do not occur given the + // static loop counters. + case -3: + accQ[k], l0, l1 = pr.doubleAndAddStep(accQ[k], imQneg[k]) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[k]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[k]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + l1 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l1.R0, xNegOverY[k]), + R1: *pr.curveF.MulMod(&l1.R1, yInv[k]), + } + result = pr.MulBy014(result, &l1.R1, &l1.R0) + case -1: + accQ[k], l0, l1 = pr.doubleAndAddStep(accQ[k], negQ[k]) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[k]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[k]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + l1 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l1.R0, xNegOverY[k]), + R1: *pr.curveF.MulMod(&l1.R1, yInv[k]), + } + result = pr.MulBy014(result, &l1.R1, &l1.R0) + case 0: + accQ[k], l0 = pr.doubleStep(accQ[k]) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[k]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[k]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + case 1: + accQ[k], l0, l1 = pr.doubleAndAddStep(accQ[k], Q[k]) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[k]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[k]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + l1 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l1.R0, xNegOverY[k]), + R1: *pr.curveF.MulMod(&l1.R1, yInv[k]), + } + result = pr.MulBy014(result, &l1.R1, &l1.R0) + case 3: + accQ[k], l0, l1 = pr.doubleAndAddStep(accQ[k], imQ[k]) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[k]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[k]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + l1 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l1.R0, xNegOverY[k]), + R1: *pr.curveF.MulMod(&l1.R1, yInv[k]), + } + result = pr.MulBy014(result, &l1.R1, &l1.R0) + default: + return nil, errors.New("invalid loopCounter") + } + } + } + + // i = 0, j = -3 + // The resulting accumulator point is the infinity point because + // [(x₀+1) + λ(x₀³-x₀²-x₀)]Q = [3(x₀-1)² ⋅ r]Q = ∞ + // since we're using affine coordinates, the addStep in the last iteration + // (j=-3) will fail as the slope of a vertical line in indefinite. But in + // projective coordinates, vertinal lines meet at (0:1:0) so the result + // should be unchanged if we ommit the addStep in this case. Moreover we + // just compute before the tangent line and not the full doubleStep as we + // only care about the Miller loop result in Fp6 and not the point itself. + result = pr.Square(result) + for k := 0; k < n; k++ { + l0 = pr.tangentCompute(accQ[k]) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[k]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[k]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + } + + return result, nil + +} + +// addStep adds two points in affine coordinates, and evaluates the line in Miller loop +// https://eprint.iacr.org/2022/1162 (Section 6.1) +func (pr Pairing) addStep(p1, p2 *G2Affine) (*G2Affine, *lineEvaluation) { + + // compute λ = (y2-y1)/(x2-x1) + p2ypy := pr.curveF.Sub(&p2.Y, &p1.Y) + p2xpx := pr.curveF.Sub(&p2.X, &p1.X) + λ := pr.curveF.Div(p2ypy, p2xpx) + + // xr = λ²-x1-x2 + λλ := pr.curveF.Mul(λ, λ) + p2xpx = pr.curveF.Add(&p1.X, &p2.X) + xr := pr.curveF.Sub(λλ, p2xpx) + + // yr = λ(x1-xr) - y1 + pxrx := pr.curveF.Sub(&p1.X, xr) + λpxrx := pr.curveF.Mul(λ, pxrx) + yr := pr.curveF.Sub(λpxrx, &p1.Y) + + var res G2Affine + res.X = *xr + res.Y = *yr + + var line lineEvaluation + line.R0 = *λ + line.R1 = *pr.curveF.Mul(λ, &p1.X) + line.R1 = *pr.curveF.Sub(&line.R1, &p1.Y) + + return &res, &line + +} + +// doubleAndAddStep doubles p1 and adds p2 to the result in affine coordinates, and evaluates the line in Miller loop +// https://eprint.iacr.org/2022/1162 (Section 6.1) +func (pr Pairing) doubleAndAddStep(p1, p2 *G2Affine) (*G2Affine, *lineEvaluation, *lineEvaluation) { + + var line1, line2 lineEvaluation + var p G2Affine + + // compute λ1 = (y2-y1)/(x2-x1) + n := pr.curveF.Sub(&p1.Y, &p2.Y) + d := pr.curveF.Sub(&p1.X, &p2.X) + l1 := pr.curveF.Div(n, d) + + // compute x3 =λ1²-x1-x2 + x3 := pr.curveF.Mul(l1, l1) + x3 = pr.curveF.Sub(x3, &p1.X) + x3 = pr.curveF.Sub(x3, &p2.X) + + // omit y3 computation + + // compute line1 + line1.R0 = *l1 + line1.R1 = *pr.curveF.Mul(l1, &p1.X) + line1.R1 = *pr.curveF.Sub(&line1.R1, &p1.Y) + + // compute λ2 = -λ1-2y1/(x3-x1) + n = pr.curveF.Add(&p1.Y, &p1.Y) + d = pr.curveF.Sub(x3, &p1.X) + l2 := pr.curveF.Div(n, d) + l2 = pr.curveF.Add(l2, l1) + l2 = pr.curveF.Neg(l2) + + // compute x4 = λ2²-x1-x3 + x4 := pr.curveF.Mul(l2, l2) + x4 = pr.curveF.Sub(x4, &p1.X) + x4 = pr.curveF.Sub(x4, x3) + + // compute y4 = λ2(x1 - x4)-y1 + y4 := pr.curveF.Sub(&p1.X, x4) + y4 = pr.curveF.Mul(l2, y4) + y4 = pr.curveF.Sub(y4, &p1.Y) + + p.X = *x4 + p.Y = *y4 + + // compute line2 + line2.R0 = *l2 + line2.R1 = *pr.curveF.Mul(l2, &p1.X) + line2.R1 = *pr.curveF.Sub(&line2.R1, &p1.Y) + + return &p, &line1, &line2 +} + +// doubleStep doubles a point in affine coordinates, and evaluates the line in Miller loop +// https://eprint.iacr.org/2022/1162 (Section 6.1) +func (pr Pairing) doubleStep(p1 *G2Affine) (*G2Affine, *lineEvaluation) { + + var p G2Affine + var line lineEvaluation + + // λ = 3x²/2y + n := pr.curveF.Mul(&p1.X, &p1.X) + three := big.NewInt(3) + n = pr.curveF.MulConst(n, three) + d := pr.curveF.Add(&p1.Y, &p1.Y) + λ := pr.curveF.Div(n, d) + + // xr = λ²-2x + xr := pr.curveF.Mul(λ, λ) + xr = pr.curveF.Sub(xr, &p1.X) + xr = pr.curveF.Sub(xr, &p1.X) + + // yr = λ(x-xr)-y + yr := pr.curveF.Sub(&p1.X, xr) + yr = pr.curveF.Mul(λ, yr) + yr = pr.curveF.Sub(yr, &p1.Y) + + p.X = *xr + p.Y = *yr + + line.R0 = *λ + line.R1 = *pr.curveF.Mul(λ, &p1.X) + line.R1 = *pr.curveF.Sub(&line.R1, &p1.Y) + + return &p, &line + +} + +// tangentCompute computes the line that goes through p1 and p2 but does not compute p1+p2 +func (pr Pairing) tangentCompute(p1 *G2Affine) *lineEvaluation { + + // λ = 3x²/2y + n := pr.curveF.Mul(&p1.X, &p1.X) + three := big.NewInt(3) + n = pr.curveF.MulConst(n, three) + d := pr.curveF.Add(&p1.Y, &p1.Y) + λ := pr.curveF.Div(n, d) + + var line lineEvaluation + line.R0 = *λ + line.R1 = *pr.curveF.Mul(λ, &p1.X) + line.R1 = *pr.curveF.Sub(&line.R1, &p1.Y) + + return &line + +} + +// ---------------------------- +// Fixed-argument pairing +// ---------------------------- +// +// The second argument Q is g2 the fixed canonical generator of G2. +// +// g2.X = 0x110133241d9b816c852a82e69d660f9d61053aac5a7115f4c06201013890f6d26b41c5dab3da268734ec3f1f09feb58c5bbcae9ac70e7c7963317a300e1b6bace6948cb3cd208d700e96efbc2ad54b06410cf4fe1bf995ba830c194cd025f1c +// g2.Y = 0x17c3357761369f8179eb10e4b6d2dc26b7cf9acec2181c81a78e2753ffe3160a1d86c80b95a59c94c97eb733293fef64f293dbd2c712b88906c170ffa823003ea96fcd504affc758aa2d3a3c5a02a591ec0594f9eac689eb70a16728c73b61 + +// MillerLoopFixed computes the single Miller loop +// fᵢ_{x₀+1+λ(x₀³-x₀²-x₀),Qᵢ}(Pᵢ), where g2 is fixed. +func (pr Pairing) MillerLoopFixedQ(P *G1Affine) (*GTEl, error) { + + // P and Q are supposed to be on G1 and G2 respectively of prime order r. + // The point (x,0) is of order 2. But this function does not check + // subgroup membership. + yInv := pr.curveF.Inverse(&P.Y) + xNegOverY := pr.curveF.MulMod(&P.X, yInv) + xNegOverY = pr.curveF.Neg(xNegOverY) + + // f_{x₀+1+λ(x₀³-x₀²-x₀),Q}(P) + result := pr.Ext6.One() + + // i = 188, separately to avoid an E6 Square + // (Square(res) = 1² = 1) + // and avoid MulBy014 (res × ℓ) + // (assign line to res) + result = &fields_bw6761.E6{ + B0: fields_bw6761.E3{ + A0: *pr.curveF.MulMod(&pr.lines[1][188], yInv), + A1: *pr.curveF.MulMod(&pr.lines[0][188], xNegOverY), + A2: result.B0.A2, + }, + B1: fields_bw6761.E3{ + A0: result.B1.A0, + A1: *pr.curveF.One(), + A2: result.B1.A2, + }, + } + + for i := 187; i >= 1; i-- { + // mutualize the square among n Miller loops + // (∏ᵢfᵢ)² + result = pr.Square(result) + + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[1][i], yInv), + pr.curveF.MulMod(&pr.lines[0][i], xNegOverY), + ) + + if loopCounter2[i]*3+loopCounter1[i] != 0 { + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[3][i], yInv), + pr.curveF.MulMod(&pr.lines[2][i], xNegOverY), + ) + } + } + + // i = 0 + result = pr.Square(result) + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[1][0], yInv), + pr.curveF.MulMod(&pr.lines[0][0], xNegOverY), + ) + + return result, nil +} + +// DoubleMillerLoopFixedQ computes the double Miller loop +// fᵢ_{u,g2}(T) * fᵢ_{u,Q}(P), where g2 is fixed. +func (pr Pairing) DoubleMillerLoopFixedQ(P [2]*G1Affine, Q *G2Affine) (*GTEl, error) { + + // P and Q are supposed to be on G1 and G2 respectively of prime order r. + // The point (x,0) is of order 2. But this function does not check + // subgroup membership. + yInv := make([]*emulated.Element[emulated.BW6761Fp], 2) + xNegOverY := make([]*emulated.Element[emulated.BW6761Fp], 2) + yInv[1] = pr.curveF.Inverse(&P[1].Y) + xNegOverY[1] = pr.curveF.MulMod(&P[1].X, yInv[1]) + xNegOverY[1] = pr.curveF.Neg(xNegOverY[1]) + yInv[0] = pr.curveF.Inverse(&P[0].Y) + xNegOverY[0] = pr.curveF.MulMod(&P[0].X, yInv[0]) + xNegOverY[0] = pr.curveF.Neg(xNegOverY[0]) + // negQ = -Q = (x, -y) + negQ := &G1Affine{X: Q.X, Y: *pr.curveF.Neg(&Q.Y)} + // imQ = (w*x, -y) + imQ := &G1Affine{X: *pr.curveF.MulMod(&Q.X, &thirdRootOne), Y: negQ.Y} + // imQneg = (w*x, y) + imQneg := &G1Affine{X: imQ.X, Y: Q.Y} + // point accumulator initialized to imQ + accQ := imQ + + // f_{x₀+1+λ(x₀³-x₀²-x₀),Q}(P[1]) + result := pr.Ext6.One() + var l0, l1 *lineEvaluation + + for i := 188; i >= 1; i-- { + // mutualize the square among n Miller loops + // (∏ᵢfᵢ)² + result = pr.Square(result) + + j := loopCounter2[i]*3 + loopCounter1[i] + + switch j { + // cases -4, -2, 2 and 4 are omitted as they do not occur given the + // static loop counters. + case -3: + accQ, l0, l1 = pr.doubleAndAddStep(accQ, imQneg) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[1]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[1]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + l1 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l1.R0, xNegOverY[1]), + R1: *pr.curveF.MulMod(&l1.R1, yInv[1]), + } + result = pr.MulBy014(result, &l1.R1, &l1.R0) + + // fixed-argument + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[1][i], yInv[0]), + pr.curveF.MulMod(&pr.lines[0][i], xNegOverY[0]), + ) + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[3][i], yInv[0]), + pr.curveF.MulMod(&pr.lines[2][i], xNegOverY[0]), + ) + case -1: + accQ, l0, l1 = pr.doubleAndAddStep(accQ, negQ) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[1]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[1]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + l1 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l1.R0, xNegOverY[1]), + R1: *pr.curveF.MulMod(&l1.R1, yInv[1]), + } + result = pr.MulBy014(result, &l1.R1, &l1.R0) + + // fixed-argument + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[1][i], yInv[0]), + pr.curveF.MulMod(&pr.lines[0][i], xNegOverY[0]), + ) + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[3][i], yInv[0]), + pr.curveF.MulMod(&pr.lines[2][i], xNegOverY[0]), + ) + case 0: + accQ, l0 = pr.doubleStep(accQ) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[1]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[1]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + + // fixed-argument + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[1][i], yInv[0]), + pr.curveF.MulMod(&pr.lines[0][i], xNegOverY[0]), + ) + case 1: + accQ, l0, l1 = pr.doubleAndAddStep(accQ, Q) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[1]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[1]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + l1 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l1.R0, xNegOverY[1]), + R1: *pr.curveF.MulMod(&l1.R1, yInv[1]), + } + result = pr.MulBy014(result, &l1.R1, &l1.R0) + + // fixed-argument + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[1][i], yInv[0]), + pr.curveF.MulMod(&pr.lines[0][i], xNegOverY[0]), + ) + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[3][i], yInv[0]), + pr.curveF.MulMod(&pr.lines[2][i], xNegOverY[0]), + ) + case 3: + accQ, l0, l1 = pr.doubleAndAddStep(accQ, imQ) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[1]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[1]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + l1 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l1.R0, xNegOverY[1]), + R1: *pr.curveF.MulMod(&l1.R1, yInv[1]), + } + result = pr.MulBy014(result, &l1.R1, &l1.R0) + + // fixed-argument + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[1][i], yInv[0]), + pr.curveF.MulMod(&pr.lines[0][i], xNegOverY[0]), + ) + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[3][i], yInv[0]), + pr.curveF.MulMod(&pr.lines[2][i], xNegOverY[0]), + ) + default: + return nil, errors.New("invalid loopCounter") + } + } + + // i = 0, j = -3 + // The resulting accumulator point is the infinity point because + // [(x₀+1) + λ(x₀³-x₀²-x₀)]Q = [3(x₀-1)² ⋅ r]Q = ∞ + // since we're using affine coordinates, the addStep in the last iteration + // (j=-3) will fail as the slope of a vertical line in indefinite. But in + // projective coordinates, vertinal lines meet at (0:1:0) so the result + // should be unchanged if we ommit the addStep in this case. Moreover we + // just compute before the tangent line and not the full doubleStep as we + // only care about the Miller loop result in Fp6 and not the point itself. + result = pr.Square(result) + l0 = pr.tangentCompute(accQ) + l0 = &lineEvaluation{ + R0: *pr.curveF.MulMod(&l0.R0, xNegOverY[1]), + R1: *pr.curveF.MulMod(&l0.R1, yInv[1]), + } + result = pr.MulBy014(result, &l0.R1, &l0.R0) + // fixed-argument + result = pr.MulBy014(result, + pr.curveF.MulMod(&pr.lines[1][0], yInv[0]), + pr.curveF.MulMod(&pr.lines[0][0], xNegOverY[0]), + ) + + return result, nil +} + +// PairFixedQ calculates the reduced pairing for a set of points +// e(P, g2), where g2 is fixed. +// +// This function doesn't check that the inputs are in the correct subgroups. +func (pr Pairing) PairFixedQ(P *G1Affine) (*GTEl, error) { + res, err := pr.MillerLoopFixedQ(P) + if err != nil { + return nil, fmt.Errorf("miller loop: %w", err) + } + res = pr.FinalExponentiation(res) + return res, nil +} + +// DoublePairFixedQ calculates the reduced pairing for a set of points +// e(P, Q) * e(T, g2), where g2 is fixed. +// +// This function doesn't check that the inputs are in the correct subgroups. +func (pr Pairing) DoublePairFixedQ(P [2]*G1Affine, Q *G2Affine) (*GTEl, error) { + res, err := pr.DoubleMillerLoopFixedQ(P, Q) + if err != nil { + return nil, fmt.Errorf("double miller loop: %w", err) + } + res = pr.FinalExponentiation(res) + return res, nil +} diff --git a/std/algebra/emulated/sw_bw6761/pairing_test.go b/std/algebra/emulated/sw_bw6761/pairing_test.go new file mode 100644 index 0000000000..190c304c0b --- /dev/null +++ b/std/algebra/emulated/sw_bw6761/pairing_test.go @@ -0,0 +1,321 @@ +package sw_bw6761 + +import ( + "bytes" + "crypto/rand" + "fmt" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark/constraint" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/consensys/gnark/frontend/cs/scs" + "github.com/consensys/gnark/test" +) + +func randomG1G2Affines() (bw6761.G1Affine, bw6761.G2Affine) { + _, _, G1AffGen, G2AffGen := bw6761.Generators() + mod := bw6761.ID.ScalarField() + s1, err := rand.Int(rand.Reader, mod) + if err != nil { + panic(err) + } + s2, err := rand.Int(rand.Reader, mod) + if err != nil { + panic(err) + } + var p bw6761.G1Affine + p.ScalarMultiplication(&G1AffGen, s1) + var q bw6761.G2Affine + q.ScalarMultiplication(&G2AffGen, s2) + return p, q +} + +type FinalExponentiationCircuit struct { + InGt GTEl + Res GTEl +} + +func (c *FinalExponentiationCircuit) Define(api frontend.API) error { + pairing, err := NewPairing(api) + if err != nil { + return fmt.Errorf("new pairing: %w", err) + } + res := pairing.FinalExponentiation(&c.InGt) + pairing.AssertIsEqual(res, &c.Res) + return nil +} + +func TestFinalExponentiationTestSolve(t *testing.T) { + assert := test.NewAssert(t) + var gt bw6761.GT + gt.SetRandom() + res := bw6761.FinalExponentiation(>) + witness := FinalExponentiationCircuit{ + InGt: NewGTEl(gt), + Res: NewGTEl(res), + } + err := test.IsSolved(&FinalExponentiationCircuit{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type PairCircuit struct { + InG1 G1Affine + InG2 G2Affine + Res GTEl +} + +func (c *PairCircuit) Define(api frontend.API) error { + pairing, err := NewPairing(api) + if err != nil { + return fmt.Errorf("new pairing: %w", err) + } + res, err := pairing.Pair([]*G1Affine{&c.InG1}, []*G2Affine{&c.InG2}) + if err != nil { + return fmt.Errorf("pair: %w", err) + } + pairing.AssertIsEqual(res, &c.Res) + return nil +} + +func TestPairTestSolve(t *testing.T) { + assert := test.NewAssert(t) + p, q := randomG1G2Affines() + res, err := bw6761.Pair([]bw6761.G1Affine{p}, []bw6761.G2Affine{q}) + assert.NoError(err) + witness := PairCircuit{ + InG1: NewG1Affine(p), + InG2: NewG2Affine(q), + Res: NewGTEl(res), + } + err = test.IsSolved(&PairCircuit{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type MultiPairCircuit struct { + InG1 G1Affine + InG2 G2Affine + Res GTEl + n int +} + +func (c *MultiPairCircuit) Define(api frontend.API) error { + pairing, err := NewPairing(api) + if err != nil { + return fmt.Errorf("new pairing: %w", err) + } + P, Q := []*G1Affine{}, []*G2Affine{} + for i := 0; i < c.n; i++ { + P = append(P, &c.InG1) + Q = append(Q, &c.InG2) + } + res, err := pairing.Pair(P, Q) + if err != nil { + return fmt.Errorf("pair: %w", err) + } + pairing.AssertIsEqual(res, &c.Res) + return nil +} + +func TestMultiPairTestSolve(t *testing.T) { + assert := test.NewAssert(t) + p1, q1 := randomG1G2Affines() + p := make([]bw6761.G1Affine, 4) + q := make([]bw6761.G2Affine, 4) + for i := 0; i < 4; i++ { + p[i] = p1 + q[i] = q1 + } + + for i := 2; i < 4; i++ { + res, err := bw6761.Pair(p[:i], q[:i]) + assert.NoError(err) + witness := MultiPairCircuit{ + InG1: NewG1Affine(p1), + InG2: NewG2Affine(q1), + Res: NewGTEl(res), + } + err = test.IsSolved(&MultiPairCircuit{n: i}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) + } +} + +type PairingCheckCircuit struct { + In1G1 G1Affine + In2G1 G1Affine + In1G2 G2Affine + In2G2 G2Affine +} + +func (c *PairingCheckCircuit) Define(api frontend.API) error { + pairing, err := NewPairing(api) + if err != nil { + return fmt.Errorf("new pairing: %w", err) + } + err = pairing.PairingCheck([]*G1Affine{&c.In1G1, &c.In1G1, &c.In2G1, &c.In2G1}, []*G2Affine{&c.In1G2, &c.In2G2, &c.In1G2, &c.In2G2}) + if err != nil { + return fmt.Errorf("pair: %w", err) + } + return nil +} + +func TestPairingCheckTestSolve(t *testing.T) { + assert := test.NewAssert(t) + p1, q1 := randomG1G2Affines() + _, q2 := randomG1G2Affines() + var p2 bw6761.G1Affine + p2.Neg(&p1) + witness := PairingCheckCircuit{ + In1G1: NewG1Affine(p1), + In1G2: NewG2Affine(q1), + In2G1: NewG1Affine(p2), + In2G2: NewG2Affine(q2), + } + err := test.IsSolved(&PairingCheckCircuit{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +// ---------------------------- +// Fixed-argument pairing +// ---------------------------- +// +// The second argument Q is the fixed canonical generator of G2. +// +// g2.X = 0x110133241d9b816c852a82e69d660f9d61053aac5a7115f4c06201013890f6d26b41c5dab3da268734ec3f1f09feb58c5bbcae9ac70e7c7963317a300e1b6bace6948cb3cd208d700e96efbc2ad54b06410cf4fe1bf995ba830c194cd025f1c +// g2.Y = 0x17c3357761369f8179eb10e4b6d2dc26b7cf9acec2181c81a78e2753ffe3160a1d86c80b95a59c94c97eb733293fef64f293dbd2c712b88906c170ffa823003ea96fcd504affc758aa2d3a3c5a02a591ec0594f9eac689eb70a16728c73b61 + +type PairFixedCircuit struct { + InG1 G1Affine + Res GTEl +} + +func (c *PairFixedCircuit) Define(api frontend.API) error { + pairing, err := NewPairing(api) + if err != nil { + return fmt.Errorf("new pairing: %w", err) + } + res, err := pairing.PairFixedQ(&c.InG1) + if err != nil { + return fmt.Errorf("pair: %w", err) + } + pairing.AssertIsEqual(res, &c.Res) + return nil +} + +func TestPairFixedTestSolve(t *testing.T) { + assert := test.NewAssert(t) + p, _ := randomG1G2Affines() + _, _, _, G2AffGen := bw6761.Generators() + res, err := bw6761.Pair([]bw6761.G1Affine{p}, []bw6761.G2Affine{G2AffGen}) + assert.NoError(err) + witness := PairFixedCircuit{ + InG1: NewG1Affine(p), + Res: NewGTEl(res), + } + err = test.IsSolved(&PairFixedCircuit{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +type DoublePairFixedCircuit struct { + In1G1 G1Affine + In2G1 G1Affine + In1G2 G2Affine + Res GTEl +} + +func (c *DoublePairFixedCircuit) Define(api frontend.API) error { + pairing, err := NewPairing(api) + if err != nil { + return fmt.Errorf("new pairing: %w", err) + } + res, err := pairing.DoublePairFixedQ([2]*G1Affine{&c.In1G1, &c.In2G1}, &c.In1G2) + if err != nil { + return fmt.Errorf("pair: %w", err) + } + pairing.AssertIsEqual(res, &c.Res) + return nil +} + +func TestDoublePairFixedTestSolve(t *testing.T) { + assert := test.NewAssert(t) + p1, q := randomG1G2Affines() + p2, _ := randomG1G2Affines() + _, _, _, G2AffGen := bw6761.Generators() + res, err := bw6761.Pair([]bw6761.G1Affine{p1, p2}, []bw6761.G2Affine{G2AffGen, q}) + assert.NoError(err) + witness := DoublePairFixedCircuit{ + In1G1: NewG1Affine(p1), + In2G1: NewG1Affine(p2), + In1G2: NewG2Affine(q), + Res: NewGTEl(res), + } + err = test.IsSolved(&DoublePairFixedCircuit{}, &witness, ecc.BN254.ScalarField()) + assert.NoError(err) +} + +// bench +func BenchmarkPairing(b *testing.B) { + + p, q := randomG1G2Affines() + res, err := bw6761.Pair([]bw6761.G1Affine{p}, []bw6761.G2Affine{q}) + if err != nil { + b.Fatal(err) + } + witness := PairCircuit{ + InG1: NewG1Affine(p), + InG2: NewG2Affine(q), + Res: NewGTEl(res), + } + w, err := frontend.NewWitness(&witness, ecc.BN254.ScalarField()) + if err != nil { + b.Fatal(err) + } + var ccs constraint.ConstraintSystem + b.Run("compile scs", func(b *testing.B) { + b.ResetTimer() + for i := 0; i < b.N; i++ { + if ccs, err = frontend.Compile(ecc.BN254.ScalarField(), scs.NewBuilder, &PairCircuit{}); err != nil { + b.Fatal(err) + } + } + }) + var buf bytes.Buffer + _, err = ccs.WriteTo(&buf) + if err != nil { + b.Fatal(err) + } + b.Logf("scs size: %d (bytes), nb constraints %d, nbInstructions: %d", buf.Len(), ccs.GetNbConstraints(), ccs.GetNbInstructions()) + b.Run("solve scs", func(b *testing.B) { + b.ResetTimer() + for i := 0; i < b.N; i++ { + if _, err := ccs.Solve(w); err != nil { + b.Fatal(err) + } + } + }) + b.Run("compile r1cs", func(b *testing.B) { + b.ResetTimer() + for i := 0; i < b.N; i++ { + if ccs, err = frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &PairCircuit{}); err != nil { + b.Fatal(err) + } + } + }) + buf.Reset() + _, err = ccs.WriteTo(&buf) + if err != nil { + b.Fatal(err) + } + b.Logf("r1cs size: %d (bytes), nb constraints %d, nbInstructions: %d", buf.Len(), ccs.GetNbConstraints(), ccs.GetNbInstructions()) + + b.Run("solve r1cs", func(b *testing.B) { + b.ResetTimer() + for i := 0; i < b.N; i++ { + if _, err := ccs.Solve(w); err != nil { + b.Fatal(err) + } + } + }) +} diff --git a/std/algebra/emulated/sw_bw6761/precomputations.go b/std/algebra/emulated/sw_bw6761/precomputations.go new file mode 100644 index 0000000000..d45cbecc82 --- /dev/null +++ b/std/algebra/emulated/sw_bw6761/precomputations.go @@ -0,0 +1,669 @@ +package sw_bw6761 + +import ( + "sync" + + "github.com/consensys/gnark/std/math/emulated" +) + +// precomputed lines going through Q and multiples of Q +// where Q is the fixed canonical generator of G2 +// +// Q.X = 0x110133241d9b816c852a82e69d660f9d61053aac5a7115f4c06201013890f6d26b41c5dab3da268734ec3f1f09feb58c5bbcae9ac70e7c7963317a300e1b6bace6948cb3cd208d700e96efbc2ad54b06410cf4fe1bf995ba830c194cd025f1c +// Q.Y = 0x17c3357761369f8179eb10e4b6d2dc26b7cf9acec2181c81a78e2753ffe3160a1d86c80b95a59c94c97eb733293fef64f293dbd2c712b88906c170ffa823003ea96fcd504affc758aa2d3a3c5a02a591ec0594f9eac689eb70a16728c73b61 + +type baseEl = emulated.Element[emulated.BW6761Fp] + +var precomputedLines [4][189]baseEl +var precomputedLinesOnce sync.Once + +func getPrecomputedLines() [4][189]baseEl { + precomputedLinesOnce.Do(func() { + precomputedLines = computePrecomputedLines() + }) + return precomputedLines +} + +func computePrecomputedLines() [4][189]baseEl { + var PrecomputedLines [4][189]baseEl + // i = 188 j = 0 + PrecomputedLines[0][188] = emulated.ValueOf[emulated.BW6761Fp]("4601722206214295589033088118535164033177870656592393416182096720489015375349530497029363906228062576089643123713140574049771600086112717246828284242782907384274009546064259364292020067384886633838728785085024491886709733695441232") + PrecomputedLines[1][188] = emulated.ValueOf[emulated.BW6761Fp]("3236784823006863466220910026173765602174401950878036630758358824838437862247058427793759575023892215394054069008320620643640558315304032361447991476750076954187501466307705591468937850275918000275643689484882669396511455818909363") + // i = 187 j = 0 + PrecomputedLines[0][187] = emulated.ValueOf[emulated.BW6761Fp]("4284789655013537363710179286068174786772309617997172492875713618334010224041698623742954093647630044979590696811111164975595092645867490218999600504619327786427075567895145591027412670194923356675080657179976322498310050902640523") + PrecomputedLines[1][187] = emulated.ValueOf[emulated.BW6761Fp]("3397077585795497770574956326615748114624499481864974680679794687756340652124038636739703112837391756600057731371987592607050612125672213939529572905167024548876842558316659173179147459198230753321661513330822067010574384973022555") + // i = 186 j = 3 + PrecomputedLines[0][186] = emulated.ValueOf[emulated.BW6761Fp]("1091218109139043282885863378251414023636707952314494512372017652056066775447345582100836025388344721550644843738483035932390251464065810927296998086029013978640474160190153308907248340838991167773800991890772343915279880213727062") + PrecomputedLines[1][186] = emulated.ValueOf[emulated.BW6761Fp]("1932776294962294607326429681443867590647757998709969192146711063851317722986642914651257815274395176263091471604122034263563029802869462271974741247507241537190216497945753260923732481508423404907321219572689937399399368195704348") + PrecomputedLines[2][186] = emulated.ValueOf[emulated.BW6761Fp]("1167511316335148398324667389573191338189911188523766691946541436850923544315370902735475929874422878371203198033582100003913387275481716694511228659348082334915004259698541139727514336425787780973815806097545196965816776735499316") + PrecomputedLines[3][186] = emulated.ValueOf[emulated.BW6761Fp]("2232147160804702884423918597342996957835298299780156070425000360564717706225474915966264161194163856897624977960159816970762695368466669819933690851287248431427008562532340374129779212941298363468687914653260398814409814536898700") + // i = 185 j = 0 + PrecomputedLines[0][185] = emulated.ValueOf[emulated.BW6761Fp]("4306004365533618104109465014384942345157633268446223951065825943259943902363054326485029738013348784033879397763697209004558286305504012553322685432568902284266798288509918199237758986260160353745025118056507076964718412200474980") + PrecomputedLines[1][185] = emulated.ValueOf[emulated.BW6761Fp]("5317846815027928872027613253615316395074256905491637198887969705823256420589631614168749537597578371751367448584458328091779472154865596580401722980439239350433503000609691988033596752719329540220674293362655543781993007414133383") + // i = 184 j = 0 + PrecomputedLines[0][184] = emulated.ValueOf[emulated.BW6761Fp]("6421916924517227492429982731209978531258674731359925152390387866121182338289516345823943563081211701852337791457795568248902135968560498937316951201929059763508154635231637895327320269539372239958994090925499869310860986253930201") + PrecomputedLines[1][184] = emulated.ValueOf[emulated.BW6761Fp]("1686639416390596371553909149095788961102940263395631521907287043550337997450943996462705979234109248163103596400703514709833294645758815792512648361448153221566523943762780235884649119800769196410541762065839619252631807329090400") + // i = 183 j = 0 + PrecomputedLines[0][183] = emulated.ValueOf[emulated.BW6761Fp]("114180830592984977608620197891299258218097289300462265571701992566828885245678086954282598493180224315526740165796002388700857620749087925225196526364545673995004707545174481617926132341198681064474105987363733245947798260480021") + PrecomputedLines[1][183] = emulated.ValueOf[emulated.BW6761Fp]("3643314528606611673549077340603412386053042463599462417346484334589212006592934006344061880772131103404526782421964102589866962439448410017161380283698685419352722215199790426320563536567649897993742663645650081369281521868591534") + // i = 182 j = 0 + PrecomputedLines[0][182] = emulated.ValueOf[emulated.BW6761Fp]("1688781593013252202379324989130651254204553658263455673413039374103132244330512227184037116111497079310793760489525262660487540543772659221909636681163656965516816665546804331362111396729665567623773273510723669438274720439381576") + PrecomputedLines[1][182] = emulated.ValueOf[emulated.BW6761Fp]("5193662126481262079756387379977171046664714957086394774928779609243972397458622726105179616127621272398676788700476587946061649662787381825305427284456092085078167585950026775781619955464645576626263196914090116793028720585355481") + // i = 181 j = 0 + PrecomputedLines[0][181] = emulated.ValueOf[emulated.BW6761Fp]("6209325124145239397598057826971218736702656412059021495624233205137199082483268094371872462575283057254647931585279829235136285083590372314804121031720427317525847055896280145588498274054925464619306135927111436825130374258987514") + PrecomputedLines[1][181] = emulated.ValueOf[emulated.BW6761Fp]("1010036127161986089883406275933044583728615228275798613559152479035653938286105077506616278778086885698838149352983372472274935209107911460860800570032291053269269264676570250105939952537570296490530893420393830672599263484907593") + // i = 180 j = -3 + PrecomputedLines[0][180] = emulated.ValueOf[emulated.BW6761Fp]("3916228906012440253680477868908277251188577745381915527147519311139261470401587060157132237422167594920897130917968660806249831166496286071990001152498332736769111249089722262822452866904553313342810671126322183997326609281401583") + PrecomputedLines[1][180] = emulated.ValueOf[emulated.BW6761Fp]("3435089271581010462145195911014096957530126526254735357340434973538495700622046265485990757746992495980170396689021322325804572193781170277216716200627690025249089996091294085258675338244847987082579742015932982288506050846801367") + PrecomputedLines[2][180] = emulated.ValueOf[emulated.BW6761Fp]("2092456650515366730031987771316607202541762165703719836977478738322079795443253020269257496488401455876600213975362916923917314143513070424323372549014825342086761376907156571063602331303475993817855431561965996939828394721782593") + PrecomputedLines[3][180] = emulated.ValueOf[emulated.BW6761Fp]("6122599343245408702600384763049321564036931568283100932099921667936978634943943045806116525776697821713388531514318882641704622862282367112968413235610663665510382250760330294750688640834276927034495199398556442136786749389317895") + // i = 179 j = 0 + PrecomputedLines[0][179] = emulated.ValueOf[emulated.BW6761Fp]("2511580551397099010509156635216328707755934194815457044079051714639178304720486457528769378752459457724113502551911503989967751565016715972609480447192036056055039234581972315469267363618696732916527984903826194214600735642319929") + PrecomputedLines[1][179] = emulated.ValueOf[emulated.BW6761Fp]("5236472788607503982524038470940158051826098915931331379976687924803796292051708055582509266856976616002835930583332408804995941668529244992278273158141526570518774812582018031213946165449114235122523836895477325571581042012028314") + // i = 178 j = -3 + PrecomputedLines[0][178] = emulated.ValueOf[emulated.BW6761Fp]("1800040314478109155379981516978971807927978753985771292093243510338787993118579044453988179372350623657128285404863657231415514828304930341219926840787023926783731174197374231588404417124836232214161233727655561791136924294415108") + PrecomputedLines[1][178] = emulated.ValueOf[emulated.BW6761Fp]("2804331434973313747360020974539045142189940350869252061028836865792880045822923060547593583152729801334865015365392914719118701386614928096404458139252733417581429421730546318906704389393001623046225620830318086821332624756683051") + PrecomputedLines[2][178] = emulated.ValueOf[emulated.BW6761Fp]("4010999921375805968870272930021879668093105275880304512189843893600197903154276931811822077163095190015328087423981272784184916779390182971756524393178302760547186651561079225545153363602239291122847568838787351267442061099216202") + PrecomputedLines[3][178] = emulated.ValueOf[emulated.BW6761Fp]("6584340050018347280681423899606339541161987220622923184308043318586136566115378253810690431289328806239175170506538394612454436990768398488496929349106290361186183199132928039289063200363255216189738034088179050204170911896584164") + // i = 177 j = 0 + PrecomputedLines[0][177] = emulated.ValueOf[emulated.BW6761Fp]("1726061023290836353179062634745502209899779569524834789308289547123213283973848080035664750629441061589219352338927276240139068374410873973059216682750238724779941510904773266077989956039532262991092557793969212159854495230862985") + PrecomputedLines[1][177] = emulated.ValueOf[emulated.BW6761Fp]("1869481926743730917071424311764961273208736749090789559157861316592814440537481639260176341853100863466049497097529708062437289228998726553060613609659652146673372729676870794229410647181863143859417329385468871464108699012273846") + // i = 176 j = 3 + PrecomputedLines[0][176] = emulated.ValueOf[emulated.BW6761Fp]("1793856615109417544088516938753960964286334983251529993717647594862500706265566364226045939516389855497516791858306940602084518785322491855363658977981397037532005228517905667670972924240011942702220857789530663664665855554661222") + PrecomputedLines[1][176] = emulated.ValueOf[emulated.BW6761Fp]("6395681833640894351709246408264671690875088694587688739687699328485856788238236843578177291133035935076604936426130742942110635837371378818936098673000308956223463158303247180215145841735445353551571220444236560630404547986209151") + PrecomputedLines[2][176] = emulated.ValueOf[emulated.BW6761Fp]("2658682327531363340886128523639936936794527892563754531362906969704238916622746439525897392384544276189617954255779173048784430461921811078498554047189350865557776717733143006886329081619462673131362508883137636048162229001001752") + PrecomputedLines[3][176] = emulated.ValueOf[emulated.BW6761Fp]("5617554506492221938975106502341524911009083485697351396054750293054330722247646308915037251220568495308451049624878789498457236602788959655661892769719935053597205105831249795638145145160065336641583965498951605487903540981050802") + // i = 175 j = 0 + PrecomputedLines[0][175] = emulated.ValueOf[emulated.BW6761Fp]("4470825980738541959726721215854613341075368401570063568735045574729298521280405311388012358627027886827869852758362621710083501885879349449153525400484725770873456029339549390419942617945037345005301748390450340592093970111359648") + PrecomputedLines[1][175] = emulated.ValueOf[emulated.BW6761Fp]("1920568269358036725699475088712901539873940686097511566671943459117851140609958127041667654873888431108367861061290576749035032066751452068241877286059270867844790516311803884123933497833146654049795784282665587712174325905132873") + // i = 174 j = 0 + PrecomputedLines[0][174] = emulated.ValueOf[emulated.BW6761Fp]("5291384463484550783828111596416354076934608698798352210861001411148434599869192035754382317926833597940265596903863701701788020967880271864100145931495422846851649251335105459602027077341966720233978984790404433500118923981944200") + PrecomputedLines[1][174] = emulated.ValueOf[emulated.BW6761Fp]("3474094727155888343948366737511294507806212772178038965971065737773346795180126935784209107320980882788680952477240615967935772085115185124196534170261596941607671230574363788110421854443479506298253443971821547731182829113123758") + // i = 173 j = 0 + PrecomputedLines[0][173] = emulated.ValueOf[emulated.BW6761Fp]("291159196638023833126170878103962505591607284608066095971122730560627573500586990219507525188803791560522939176145008398336699153787221653227248248679740995364715081700438265042240313974596422431379084154243300992164241337305047") + PrecomputedLines[1][173] = emulated.ValueOf[emulated.BW6761Fp]("6637317848210107927777296198352103696418016960893139157040698743859956793987416974109447386120759632207128207385903665276310974970806579773735756047615944951621704212430487647997793051862976764280585794774975775226509571642012340") + // i = 172 j = 3 + PrecomputedLines[0][172] = emulated.ValueOf[emulated.BW6761Fp]("2896810465976073493801911194359719472039621862530606610348926118077429042177497167032204517123568128544943241127796124713416640564286772536699713462008786003585732747296391431773911293376178622434550785330571364764018950598140281") + PrecomputedLines[1][172] = emulated.ValueOf[emulated.BW6761Fp]("4467780101676510281057903463361611394824850531998065167477996007953191976696291514533668019146536239987670464917412247254495086054690728549700926765864200643470435884309849171046000608577799964445823074181488503901546261068862692") + PrecomputedLines[2][172] = emulated.ValueOf[emulated.BW6761Fp]("6272874977137170475628881314521834520164802977112863597206461767527978691719236433645395640481127588156828153563372518828973086791188240123830748726657346202491578701145062590487795733410523861316913042654984845940431454041270826") + PrecomputedLines[3][172] = emulated.ValueOf[emulated.BW6761Fp]("1491519716322459721476131635974774555179040450041728831056366076429022306831905396756755500249298779816133035692361613992371069670991372872597941652981525923051283306282081842575371025653013841958071420611736931252604211356071193") + // i = 171 j = 0 + PrecomputedLines[0][171] = emulated.ValueOf[emulated.BW6761Fp]("6235152558585444833493261246207340539804815046978363622291717651978489706660435740588616632493125821972920945508541635592163696120037180705138229828421746317536693647276274336413212776398698284731916429701610474931192570311724369") + PrecomputedLines[1][171] = emulated.ValueOf[emulated.BW6761Fp]("2242672995139372462518294141548297566108953081821049993780338631576943780891567110286309915075208848428813595805761752053821655145068096776825637230030519832332866845461170464293849009631853717299166685335971374640413019426278822") + // i = 170 j = 3 + PrecomputedLines[0][170] = emulated.ValueOf[emulated.BW6761Fp]("1429112810826305113526238024068625519617466029256408943654835886511226415318108590963818255082352675735276265680900244338259945649853087097891859279422169569976330990028761011846609181766104569064844095718653016473753086326169683") + PrecomputedLines[1][170] = emulated.ValueOf[emulated.BW6761Fp]("813257970458576737350931150692350224241454191961120910165875211113010579758220571491319151091620887394870103181754304418786013541661782866413149178950930415672806510706468797621548448679182563747650309708282309926241128288673135") + PrecomputedLines[2][170] = emulated.ValueOf[emulated.BW6761Fp]("4615750120910655458062027499234169678376889872550909552027422821903093257029585915682695214715621199838795424793869266363821929915750873802222291947479724423100732276424960820653424133885708067276050345616746879636049613871724548") + PrecomputedLines[3][170] = emulated.ValueOf[emulated.BW6761Fp]("2638402868714127517218145901538866889787798408569326361013240257145955029560417297271208866993745742109396095491575331998333504570744548528767176847869288148827357015366601590545284586737024599703154347220653254040080070976337387") + // i = 169 j = 0 + PrecomputedLines[0][169] = emulated.ValueOf[emulated.BW6761Fp]("5151053341945160206168664078810674786626229464615324047316643094827022265609681594014045415804609654596019048771436372513937327411900645278608871991536593143635447460485268900090456257568071987996227531987643282129888289795315787") + PrecomputedLines[1][169] = emulated.ValueOf[emulated.BW6761Fp]("4433808739393296174309455467258529089977837943895638563487610473212932875402456433695523862673003713626322557768899732261336799338902558617024412348901220971079947263098919960666503561149253055091635599396540987777068978448589942") + // i = 168 j = -3 + PrecomputedLines[0][168] = emulated.ValueOf[emulated.BW6761Fp]("6573223359061173030393223133587057276018849035205508504868826180775040314361353866958802262264508674286374515698526491670274560376396065049758844992950298456959398440073914900051030331074092036623459108691740030899630765167991804") + PrecomputedLines[1][168] = emulated.ValueOf[emulated.BW6761Fp]("433563974948430427145344771367180664437544635785104771951413151629760183394516728212069088933279038869983505880491252805362319646939442623952219790532410824637016808664128139205897840527209397149113432381134849938543997936884322") + PrecomputedLines[2][168] = emulated.ValueOf[emulated.BW6761Fp]("4780438282108523499566089886493747223520600917434700114406865073037482480152520938722159790890960945691254743766968215641137493966211783939811018374792848222347243639100762780690923695318149020751623537076408377114194177696633685") + PrecomputedLines[3][168] = emulated.ValueOf[emulated.BW6761Fp]("4398587526357486098002805624557711076050197843122002073647939149855114862782129256464606723482050274657562960400962939800415311926127759986216984470993028744802713704731703741576155965585649125162356816261139543815225445809074181") + // i = 167 j = 0 + PrecomputedLines[0][167] = emulated.ValueOf[emulated.BW6761Fp]("1647664286159942497853030191368343827570740490539376712617435901117835353856034051805911137507399235191594161042407712721047214542679095949784491999942252413102189504539951390607797346810193659400896920380179521693479129340870667") + PrecomputedLines[1][167] = emulated.ValueOf[emulated.BW6761Fp]("2390287243773902242827076870477280124811775106501454290834199465068296153193080900637929806619586936243431863370083347240345585229599137089325914019290067280079041375854153097245939993055850496740464076243793307860295157220489626") + // i = 166 j = 3 + PrecomputedLines[0][166] = emulated.ValueOf[emulated.BW6761Fp]("1709949524102589335847024052923955037315354604476833601571466852493572947268595108101545007187963974951788163063772644877512200289677146556030347420379930797247490496246244568568366831695984141657359289286221852473683503769654596") + PrecomputedLines[1][166] = emulated.ValueOf[emulated.BW6761Fp]("5893228435550224958157425121741517506045893261892569389741766590829366373878060500577175254669696862906574707093963266493237865832467079001148718602123158286651558066342826773724504656661517285037169488249399183378203836969057275") + PrecomputedLines[2][166] = emulated.ValueOf[emulated.BW6761Fp]("4921983716737681000020818352454095467819829881281876982451644207064948583097442692356739526117975859626329445105576138093211975899941930818396721119247635647664410038740022271464816311809170130035908923628827870553651390809645802") + PrecomputedLines[3][166] = emulated.ValueOf[emulated.BW6761Fp]("5029845452797108141104211578426309150462531240073265769929326427375198592202381636899891851441329434234634550874094633387930815132677505491831139226402537766580191195789214542567421298499734903290553980122172973997208601705231659") + // i = 165 j = 0 + PrecomputedLines[0][165] = emulated.ValueOf[emulated.BW6761Fp]("232572658672356797578043680617364321714987274969275103384954673212085034692277243801536023990339429694630902665031278912607605354670471863738561553739117166877166249283856098435668003254327820655318227276087352237830333067397901") + PrecomputedLines[1][165] = emulated.ValueOf[emulated.BW6761Fp]("1177003727892546832866508390022270538418538637811025906900098412455823364300811651099613431544482191415414696640978119758171995565582917710180141194046220261448537016001183780200162781547581979982676317862484246620865147679291007") + // i = 164 j = 0 + PrecomputedLines[0][164] = emulated.ValueOf[emulated.BW6761Fp]("2349331348722034078094792909294804855570951673402701960283569222334892024519680930010762814852187567219323305147207170737074416468468147497844346987827839689162791474940276610135929682703627664335709075399106574798477419568510404") + PrecomputedLines[1][164] = emulated.ValueOf[emulated.BW6761Fp]("4643592453902905429004614211524140894845659434425593111572454756461288139175970093152918726074460168876790628276619819105484323910842976004056293477403091127131033849251587843923973198464228459201150194269377117357851959140188983") + // i = 163 j = 3 + PrecomputedLines[0][163] = emulated.ValueOf[emulated.BW6761Fp]("52333827582414640990444534040624682671317644434073521325969166221486850859764683430510421110637534653003049014936771816647586553444768944089642009319835647240758299476088602838239048121990973268096414788580977151073980666905173") + PrecomputedLines[1][163] = emulated.ValueOf[emulated.BW6761Fp]("5006684479864566702650706524533038451782099505978373537938019834872314066037685041434363897742556522136697745017124053405850753653625837991051365133733468176243186518320051581480634917947326031233212380721027184237318081704860647") + PrecomputedLines[2][163] = emulated.ValueOf[emulated.BW6761Fp]("5395654817304820758994256808056443755302607472199986470885449004836905491060935175671751260574929678123841275884311222036169994045893346568582934629470732910646691606254245944852080137051701323813006245299620576672799106022769191") + PrecomputedLines[3][163] = emulated.ValueOf[emulated.BW6761Fp]("650736239870954558727404407995742820117382750229335308197505424837541407737453157093401352116197759550277694567784174548449221820880569998450653552639238746278423134232062947534257012891122077779859567838318586182926302109543544") + // i = 162 j = 0 + PrecomputedLines[0][162] = emulated.ValueOf[emulated.BW6761Fp]("966717885432164187094459687544853956185577913816048544255971196493117042692563379865241269084260979556031191892848503771930723476206819278309877985692266623192674173120914799472153518381472278820426394303797572079804504184790592") + PrecomputedLines[1][162] = emulated.ValueOf[emulated.BW6761Fp]("3020563500188388877315150135981069820453062660195966231237216820091970048960040900415963172445058143765783985486909755563513372900233756646993726834722183040094649261040765637788368630807503529899998612992257688511729617092599566") + // i = 161 j = 0 + PrecomputedLines[0][161] = emulated.ValueOf[emulated.BW6761Fp]("2723607430772659730961790718447048641080044768560277644165932027404231869402860898365710504322696991333576407490560210763843022332678136578931828844357649891343819627199496798332807949085545953320422192134670525530072902264941810") + PrecomputedLines[1][161] = emulated.ValueOf[emulated.BW6761Fp]("5129647963722466620942267952017113426807349734845472346580002125338862335088074302957583315286637384286504759748385986635964533515779512711081531549908778015105442463377527732867005927843773890731693246823345607932059642142760798") + // i = 160 j = 0 + PrecomputedLines[0][160] = emulated.ValueOf[emulated.BW6761Fp]("4577081852725837198311485026729893776093048114534072194182826126558577801376180078853990029623202669787826337303348968528811571259730377323605351607188692812693591153711457444300581467449173000389674163550804048515753424541817422") + PrecomputedLines[1][160] = emulated.ValueOf[emulated.BW6761Fp]("6823267443517190019085934072639764787968936714958209215443747270739622300855648847686288401398497539575198945858259391975943073313266494176351501686559627127468167527194301984132678997501550728894241645116121821284329222527177962") + // i = 159 j = -3 + PrecomputedLines[0][159] = emulated.ValueOf[emulated.BW6761Fp]("324666234286344014142243331706963725161768120676466297357473498737699334487145813076943214189021251649111237638123254146524201058991160673739004317499134741949534700426970766872442991803079017288149670074860050490378349499214790") + PrecomputedLines[1][159] = emulated.ValueOf[emulated.BW6761Fp]("6636617842236151333529964795471565361123586529270909295929312662528628437720181288422740806930288827619317815525172605840744493547399413754001814728110749523835116156866350602345640678920229273484885229046952992387064993457157436") + PrecomputedLines[2][159] = emulated.ValueOf[emulated.BW6761Fp]("1169179279619239804760750507913447876532533844848139969203384466214658111893717691761594819542977135521349003566911643497169771797238574098650840878350350576524995523069126926065245425919471114523133443004728427891243447229624997") + PrecomputedLines[3][159] = emulated.ValueOf[emulated.BW6761Fp]("2390427752410237822703140443565386259958384606729063432115446405419710053220952184622592611083931480113986415359456896163696394246582897076631508221080761843074406385325595566132184951204637527250104024067754267802701541998679557") + // i = 158 j = 0 + PrecomputedLines[0][158] = emulated.ValueOf[emulated.BW6761Fp]("3491904621617458570467400788193922471061198912031461784747449931748081025059466670568428332079097896613569081003140863723140077212586281756178360708378112359111486236347800618246901043921316993513560276265805960807878983619245559") + PrecomputedLines[1][158] = emulated.ValueOf[emulated.BW6761Fp]("417856161039822250074025130344193295614063765635472298026365759996884135822621963191274082696570821282309331068298315248982310647089513266052159284058406772587255082241693604593372878764622714817790390306552672168290744941573972") + // i = 157 j = 3 + PrecomputedLines[0][157] = emulated.ValueOf[emulated.BW6761Fp]("4416768778599945461484847862340231334711151920303921966291479519171793363978734600584343456569070959362086842259460846000247532597524419720784251527346762651605556856226230166708907343085002581478167277794058294334559739054882282") + PrecomputedLines[1][157] = emulated.ValueOf[emulated.BW6761Fp]("1852250632907130188790893656467026455875589500512379999240781207064612732149124265752964795049049985863460565553287636692334980501445894984189522931023218131754348919092652254131161270167055415009583904450484090003403007608216201") + PrecomputedLines[2][157] = emulated.ValueOf[emulated.BW6761Fp]("6419587158487718559878776704806672864160369506950210711711280857313527636662817583186572880263258124425113400796353147829983523112230494675357910189445382699469684963105565162531093198544478048674343136597682281113733105381561484") + PrecomputedLines[3][157] = emulated.ValueOf[emulated.BW6761Fp]("3722521298055602645113974315645129742353660711003216708553366823782751701485535270778468429985655461253844592752384773735943768616708965636222693924487722919702682456701829091211690631752269481530604403689987227114111118801741291") + // i = 156 j = 0 + PrecomputedLines[0][156] = emulated.ValueOf[emulated.BW6761Fp]("5545397166873684773113053583641661672108126538637820134423766600317555663306128116352446557181332464485047243194281156743833711442696272262603883214065623959398816145024510953571106836545478602195020059304848842043236857015989475") + PrecomputedLines[1][156] = emulated.ValueOf[emulated.BW6761Fp]("2021590172970888707299215336977681125009536065721635254366967178736120889484508938106586665954402299023024059935122342894157282324534073308900827186536479866968034667597034915763748285429919026521587470306288580300219266936435751") + // i = 155 j = -3 + PrecomputedLines[0][155] = emulated.ValueOf[emulated.BW6761Fp]("2016865517214075442991619362432436346237505752533125825957302068177615343035502121762958126205312972599032371564787346477939284797660841551376056858103023635292164833972540987459452711445577182426858038678074119933397740796188486") + PrecomputedLines[1][155] = emulated.ValueOf[emulated.BW6761Fp]("4921345336829462778422198035628471967952412359162858241819721707968328922396138974134758163031865261007952041010331403775328345606360716582715294577475815722137271438236084963126900294584198188573469683598274793001598489599256460") + PrecomputedLines[2][155] = emulated.ValueOf[emulated.BW6761Fp]("5452289706620705804122345049563462587304628763777977219263610747055789773510449643975645265547749639659444244640337694551203034035261233368893792061960538755835274166778837905048877917573980193672340922379035547913555181646229675") + PrecomputedLines[3][155] = emulated.ValueOf[emulated.BW6761Fp]("5330445646284714072427223464819908250873719099893171438520402130309158016777695740685710535487563917271490762774254768988035428482089657277539422521799971990067449345899767156841349406799431919183289922328930736393420384380134402") + // i = 154 j = 0 + PrecomputedLines[0][154] = emulated.ValueOf[emulated.BW6761Fp]("253912214657176650779918926993734061117688943036911490719333378236618949753645725163602990742193281812052940313043489777988352564974178451182405372274947126939291999108991745538124642855162625248568122897774021118657024053312683") + PrecomputedLines[1][154] = emulated.ValueOf[emulated.BW6761Fp]("90454792504233772539673107359188540647097721046630177682340822562308368630868281445493887876764666553007223881517444670503149218141008821745056939702373642828660491180908094902056900653734980980380688909835110896102319539002977") + // i = 153 j = 0 + PrecomputedLines[0][153] = emulated.ValueOf[emulated.BW6761Fp]("6028738756457492013157559987618981254929533455069995957583780714104521228354324336665057575842907015569076760475281374260515946359427739417293714745131308832966623853621805462030358839593520487417623566459988488074177330491690115") + PrecomputedLines[1][153] = emulated.ValueOf[emulated.BW6761Fp]("1631726893290625522967477247480127881963815714733592087840583915245097436988929044123570137356058690088183128562372529403839139498072059028902685488333113667077839046050085914776299141708527188673659500983012708933861459824370354") + // i = 152 j = -3 + PrecomputedLines[0][152] = emulated.ValueOf[emulated.BW6761Fp]("5933803588081086836643239680396970245259265430854718440369458799593483447844770571690936853076432836031619939245130596952627136235619761911719664250570429698192340677267247016627691113223709288555832534009759604452293307865143494") + PrecomputedLines[1][152] = emulated.ValueOf[emulated.BW6761Fp]("5550204757083904566420810705852033305679349032748397343293761160397094554860340453829269107483927221899278788731018461771943481285186546907778589805772690228002061319154636587342918455042668146557380849513813601461971655743687056") + PrecomputedLines[2][152] = emulated.ValueOf[emulated.BW6761Fp]("5429392460871260615699079029883938895836745062461512650968419621941700112039072543911058280706056517701803479157512850062007543761793301890234430901759239345753853600059003936486418961130146759088508684002234349028962962738685068") + PrecomputedLines[3][152] = emulated.ValueOf[emulated.BW6761Fp]("215901435142930451312237140515151372543713525366036765710265909611361979775752756507445753622709378297284041357450332076126978470060269809606181904660211968469485287903524957875773224720255771036744293584738133055279492104737994") + // i = 151 j = 0 + PrecomputedLines[0][151] = emulated.ValueOf[emulated.BW6761Fp]("2217293244223503800342403802356275665761021417440395243162100358918795095532458487975221684743329261847466915573304126110010215444704511730329892139369564310926788959661698224464272668381456474342031548863057544802188409502254976") + PrecomputedLines[1][151] = emulated.ValueOf[emulated.BW6761Fp]("3032194119911089122528491590697665117123806514556903354257236683855345124475823193585527248686809547112579726301073625230584295621251331002111854329737234183946579207765098307179350519051519256775154498600279600797852210708198376") + // i = 150 j = 0 + PrecomputedLines[0][150] = emulated.ValueOf[emulated.BW6761Fp]("3317725176190874681964926700784232223615994993578507763097512731747224333990127117353469138689288343877122942681200365845677839967780121222325648610973030551825533993376537297018053710863282315262763507163658751165792166003247917") + PrecomputedLines[1][150] = emulated.ValueOf[emulated.BW6761Fp]("5311779975568348866438266113993956098697070498926704949763810071482344968514706381575287770513988277963044893761326124770442604005626456815424800417298954303878320163148098474713806918505566913500600776461117972760217108510400910") + // i = 149 j = 0 + PrecomputedLines[0][149] = emulated.ValueOf[emulated.BW6761Fp]("415155167990909811773274527448565627660867339446865646154781434352236186331864123798900488790219924256132831458276407042546585642359457666570994480814911450794692261305789559623175773270786604043226061133341558724866809936660719") + PrecomputedLines[1][149] = emulated.ValueOf[emulated.BW6761Fp]("5132669865916179457696452861924723931652065582428728251052829101967757160674775905534892979784141098082940401101234100043172892634948165525152188787897019723204057104822162990347257969957048742649064916404071127374631731037783787") + // i = 148 j = 3 + PrecomputedLines[0][148] = emulated.ValueOf[emulated.BW6761Fp]("4140004734327878216201045763193531111409413457779764546283946394296932495071554746768260262011715289440991089370686959220415992218328800724536628417686911729756470325374372983040533731696917966729370346256691263147315029964417433") + PrecomputedLines[1][148] = emulated.ValueOf[emulated.BW6761Fp]("5178761700027705413555156019361934121977511563684255818901017175838012722352349528434988833378110316523353798871708165886738677088272124845182724155472081107868405450423263348442225855283938083889870097371759935545657257393660743") + PrecomputedLines[2][148] = emulated.ValueOf[emulated.BW6761Fp]("4695923425938084215087822823878724487099638070602575187528539251307287838561931020132982664450582460622253248261068258993590911594659794278303802029261890344535175816521628380905890439052026981269530236011516177022689249116647786") + PrecomputedLines[3][148] = emulated.ValueOf[emulated.BW6761Fp]("83528947143211173825194412878697539272293480449701172031899761574582354182272541367121649048326072317795681794964152795904204202564610458519701137277940395274219699446009240947218090898932240186508194787221672254383477999336292") + // i = 147 j = 0 + PrecomputedLines[0][147] = emulated.ValueOf[emulated.BW6761Fp]("151330053509034414994578636429425902053986786209892471274991525950831207463597520422983269342506983481008826046782392196380970176079709382608576483491871347753616020923167675319120990845115558945083570810266672945358625888108975") + PrecomputedLines[1][147] = emulated.ValueOf[emulated.BW6761Fp]("5167491934529740282963729196518203437756002188349193552571241004047127958410940743196333385903439868011966656948604086680076339273646285728893389483436487495087981055968161190372633493259530748320012260258671783021341427233010876") + // i = 146 j = 0 + PrecomputedLines[0][146] = emulated.ValueOf[emulated.BW6761Fp]("1114470723805581119184572097688824076189102046592493285463120237308793702368235633939040330876356116630125837423184649276149246843910103071261490581616432947384638209879233093598840409201085659072783743986568100816590424861898608") + PrecomputedLines[1][146] = emulated.ValueOf[emulated.BW6761Fp]("6478777454598824156094943494586967960469393198794904011417099271970302029910680845103571038668910097824920386480718598598173544918091048834436449425890641609187952938773707865251723261486358437197859060313186213189210893005143284") + // i = 145 j = -3 + PrecomputedLines[0][145] = emulated.ValueOf[emulated.BW6761Fp]("5859048622402069660036399935093585979604387754240347020221644378577333395030687729583771272159017531113058916586134938926883652393431747789333732232766810000369351298331399490404676517013544774690353892825693625562225579232134104") + PrecomputedLines[1][145] = emulated.ValueOf[emulated.BW6761Fp]("5130546493861392794307807336052690029200583399365091783170538676884818669069412208829663181252666936935600613069111913888311401666154773585381780173937110082909600581595352660767798085758658916528837369630263916920834131646570216") + PrecomputedLines[2][145] = emulated.ValueOf[emulated.BW6761Fp]("2108411833057507512223957404319309047383861110883456473273524034749089770620099048622453493049814509579167338063647043613852813952598011253428280877972494566190259092437580681150999842418960600345792711661706349548971961797181565") + PrecomputedLines[3][145] = emulated.ValueOf[emulated.BW6761Fp]("708505628301545791337509871984655495035967667250979838482537030174702586032171072906665305184787620631564903687145362843392236437965976088144658101185950961085417107422270026571519877928676536019489776985913311663627609273904453") + // i = 144 j = 0 + PrecomputedLines[0][144] = emulated.ValueOf[emulated.BW6761Fp]("1110693141506833443806812185356385657611154222022956119590162362046526185899629723001989265777826816933834594500045594892823893494919273443552937721223945431485877663287749801345271403238264692363210636517643455903357602891457290") + PrecomputedLines[1][144] = emulated.ValueOf[emulated.BW6761Fp]("603929437399447291214728195951492966191103124355063750553076589829671952973145516286561956436266736365337087448302435015635010597001789338483106033528008186612669695616779426601106014489886431783937516089415892537636015452587873") + // i = 143 j = 0 + PrecomputedLines[0][143] = emulated.ValueOf[emulated.BW6761Fp]("1486313396257225319276454417156949032235657107769651771999072596075215391850952818655646828707669204590200946140222685601763062865837088768474997694507956127217411232824640625777584603274979885819664470425193993045396278061554670") + PrecomputedLines[1][143] = emulated.ValueOf[emulated.BW6761Fp]("5900091056252635525061247890639784854912554186160436040691327117040239450463401250320405408025474934781221398674442720989333863513889420429293126000723066765887369389159668421318382662346470829811188424370478317951507216345256643") + // i = 142 j = 0 + PrecomputedLines[0][142] = emulated.ValueOf[emulated.BW6761Fp]("2792214896204349446812807850849074442364083133360182878815559121269639917337029363471023254536409861081182182842928669453464821839537737772926644433219600872696347238197912222306544693056149978277507630422064429186112081667145266") + PrecomputedLines[1][142] = emulated.ValueOf[emulated.BW6761Fp]("3944833749529436605658652267849673696497888958101001890165078244794151784397599858860254268537666046558802898859348382115556708759230970010852200503643863189853426075395343165134428906535715936830920322947079380746476083547373561") + // i = 141 j = 0 + PrecomputedLines[0][141] = emulated.ValueOf[emulated.BW6761Fp]("417856790372798077015113846490617233071837258253813598704739825574397265998748349018546952531836869989491058332284609105381556940328508752945025067012370823786220503025032772358278513253286721537773229007859176953964580903195727") + PrecomputedLines[1][141] = emulated.ValueOf[emulated.BW6761Fp]("3212095295566726705955520648736709733214732630239873901431866202929498890238143597713014321210267543461754987631611427191371921286136904225726012666396381699549816558028560296677824912419015468670527904167438613604543861012490881") + // i = 140 j = -3 + PrecomputedLines[0][140] = emulated.ValueOf[emulated.BW6761Fp]("6454652960715100287228837394962613813864449960018709130892915714253696419021214154832054652581717272410134436821903319869939794983804221461689920428475008780101250011049231364935669307075928640376378229913679881637394701636737270") + PrecomputedLines[1][140] = emulated.ValueOf[emulated.BW6761Fp]("4330478820859849211086998075624162533796478046322369047534830098235600627275134989518159311577872109499354455070789488270437693314611100870115090161624057461733224827616650657400092680610335909558941615999861146131436385364638808") + PrecomputedLines[2][140] = emulated.ValueOf[emulated.BW6761Fp]("858293139758627108215187205269437801824125169052800366429342716136717086471757029929438123993480089882929860186241630315111576602418646305030928478620782188276456539301601359470127933194535982283045054240161101411651017048286627") + PrecomputedLines[3][140] = emulated.ValueOf[emulated.BW6761Fp]("5227206335183556341740750730697286334874771364666852277057794888432894496595221781561994053642178607365548748041278338868871460536603201138958142818647966045713904898925900602596824373321003503289242417763131324999615120446619142") + // i = 139 j = 0 + PrecomputedLines[0][139] = emulated.ValueOf[emulated.BW6761Fp]("6061183520623123725589769021285572001479789372351644764609461692939780384165064167719928513044495562706770395832575876513874629527231851623713680740316664822309168015470185638170819110129556875969482829992633944123125377558202569") + PrecomputedLines[1][139] = emulated.ValueOf[emulated.BW6761Fp]("6638743522580856341158564937403879228051631568619778907717013129201344333030523763340630776780970212886218386113539324071014532332375465792484714829314652990224787865019761417248495535237113451295259942116314962267177857737645266") + // i = 138 j = -3 + PrecomputedLines[0][138] = emulated.ValueOf[emulated.BW6761Fp]("4613201178001957129220355825431588428522349943345108194412238432864585090458272273751900747338897976889754065236440077466509756318978363238292987387245777302440033792016880363771237380814734752042079287291688686090965516430849448") + PrecomputedLines[1][138] = emulated.ValueOf[emulated.BW6761Fp]("1665407548870793045474076587401340889263185099880576303565184262174562655936391826075556576588193249562650706614327662566000797730855714827683876175311660437080511764204863560130199676589127312887632418700943424352628228848843582") + PrecomputedLines[2][138] = emulated.ValueOf[emulated.BW6761Fp]("1655914238938042682721551953036253645052537746651874939938037114097148347201468500148537117382150061034475405427296650617729834662783132704955872642876325349014004370043171807288606000041903234533546215958118359510613083090230570") + PrecomputedLines[3][138] = emulated.ValueOf[emulated.BW6761Fp]("4849745136522473566966399310625020500289818714428592684220005115425817386657142570582575781909747557971016323109154388103509887878174433513364641252184356415172199372371474037346808680920949717207937405738624457011320349705365166") + // i = 137 j = 0 + PrecomputedLines[0][137] = emulated.ValueOf[emulated.BW6761Fp]("901736082201449083754819885340801075704408863998813128158005442577313496494289416471869404758956530411763940080091857539300845778190660401951269041409695446928577309363369775219696459229579351565985855170562488979593152186976780") + PrecomputedLines[1][137] = emulated.ValueOf[emulated.BW6761Fp]("1001009910919327523032462813652578972145867187256987285583595342347716674065364382437719001411863024220494990944043998634009881763715881903122679211258938475917286800934054659626133958862607032250059477031442995482587302562197710") + // i = 136 j = 0 + PrecomputedLines[0][136] = emulated.ValueOf[emulated.BW6761Fp]("1260704208583585454923573340419048846830684418938706770827984464987809006471346576124097325137037851487326970768765206484805325033050293845686253143665118237882360616375617892187324896995348006498509282714830651433149367764383338") + PrecomputedLines[1][136] = emulated.ValueOf[emulated.BW6761Fp]("4146904343070780886774548283636255552210562066782568724140086706931842548150893909742775877375612341303901723683529979271573649898254285900026209098018635582129262100543849831967889807912740957081744133511298450256079900667190703") + // i = 135 j = 0 + PrecomputedLines[0][135] = emulated.ValueOf[emulated.BW6761Fp]("5934405004285776331630280637109070463626784739136943282051799037098249132104511122554510074244540327876064527736509150528218419867398859662403698876411373576157376672220445952316857363235511588325272279467223393469732469391589454") + PrecomputedLines[1][135] = emulated.ValueOf[emulated.BW6761Fp]("3853300763714466280621239533765003576889560379702791099289094890338630225166520693233826140813720348044192201950244579487184637298409839680878533788041179595053520650058145442845460866315657638463420171773555348977817651593893300") + // i = 134 j = 0 + PrecomputedLines[0][134] = emulated.ValueOf[emulated.BW6761Fp]("4071393582225441537259129740348136928083015327075675947139818040266037848160886082345792501192485369996749135042646411769614852944973561715055093656354124149362614454203390246905090905099260897928953398827135826072133621101404195") + PrecomputedLines[1][134] = emulated.ValueOf[emulated.BW6761Fp]("163064925070158099254952264886241234943555067376205015997612082620013849480487004418622298793050415738026006109172604997325700501036308991398166284975128590614460778829761389732430123500510222705991473579068874904567649913483108") + // i = 133 j = 0 + PrecomputedLines[0][133] = emulated.ValueOf[emulated.BW6761Fp]("1418055253056202074595134691646273080389280058925225243405714343036410124848094320257507899566341757290227427489602264479370376697910695572778903093150605203445190132527551908453832799926697589813708187671591577598951527150266462") + PrecomputedLines[1][133] = emulated.ValueOf[emulated.BW6761Fp]("4155270220852296426069284095173099018808860752865568102978836008384241124182786514727621135289096926318101983300917387205733381093054071103209226340513432729873159990132982563319636636190627483048554684794275717730537206864972738") + // i = 132 j = 0 + PrecomputedLines[0][132] = emulated.ValueOf[emulated.BW6761Fp]("2330886315626139608560488604463307042069439883877339752259227046211913815357101972673368411245947078845325193301125955376122337450853187562598776619960568843510592987176521049173659607909314337783813131204696221681811462179088691") + PrecomputedLines[1][132] = emulated.ValueOf[emulated.BW6761Fp]("6711986147911438355804568563575069333271094896862220019688012927363594088853071294014660796060718276350481094706075223562102712977643602867969637013015964531826665436102237060386185882331244978345250975580544289533439462447279257") + // i = 131 j = 0 + PrecomputedLines[0][131] = emulated.ValueOf[emulated.BW6761Fp]("2459194991253184572352513073915488362359413982519643806017800096086250895884084333687760994466920538720187471885744206660334045092314159558800220531380876636659371375141085558696675851052370181877646188303218710742352859633964342") + PrecomputedLines[1][131] = emulated.ValueOf[emulated.BW6761Fp]("1798907412470330186181222251574418897779031365954162228532991291868214950565615990966265694580335533045365235494965998595149631566706795045987443791903411003279763085924753692636782886218284245215570054489439163164726739330788532") + // i = 130 j = 0 + PrecomputedLines[0][130] = emulated.ValueOf[emulated.BW6761Fp]("6200227010995243292534007463442727301124348508335846975763125380726292582060924747024816702710789047477511252369137204867777169795203823829426651503047600098271481218636673525637213754344807277089122252066488429612476303820804543") + PrecomputedLines[1][130] = emulated.ValueOf[emulated.BW6761Fp]("6183339963554257352986053313573779205151353739061345212410014518341183574948205574318625281851356054888090433850265460982453347498040819644197823676451772870994706563880697505569357671552697683052366439579377964497653703657273613") + // i = 129 j = 0 + PrecomputedLines[0][129] = emulated.ValueOf[emulated.BW6761Fp]("3179418169374060428648479451680112473165868327208755428080303333059122452066341836393363276607814567151445118342356457006231585027268454793027792028314743348462081282511356597993842079243330612103229465856549806800369725036881516") + PrecomputedLines[1][129] = emulated.ValueOf[emulated.BW6761Fp]("1074440876243557992296297656666926523894847888827413838964195035038704675426637330906565122688220683942757851797564811398813653924552049073155628743092598727862440471499620621997136194644441450660880437794868985296365645976407463") + // i = 128 j = 0 + PrecomputedLines[0][128] = emulated.ValueOf[emulated.BW6761Fp]("2656992808450134623226107811040851573895052777253445321570646190215821367927767059861417586581046541647458203705873809488287636783989040464371827681455548309973545083153900514460616688467563484862556513683924061416596829297761281") + PrecomputedLines[1][128] = emulated.ValueOf[emulated.BW6761Fp]("4844145081209902609505524435169548005333832418493246907706701276792579054383871336339479329969744299945519805206076736030684085411371482252463723707639516591389550691017538009029350110030156955501292575958216281584402435928015282") + // i = 127 j = 3 + PrecomputedLines[0][127] = emulated.ValueOf[emulated.BW6761Fp]("2011974253550710793841707614319665694450124220622509460641614259810013401810340721144190324466096151717630769235513591278366843097918633665449346052590698996537616877458386727075369760392809275502298638397577883974589386211297224") + PrecomputedLines[1][127] = emulated.ValueOf[emulated.BW6761Fp]("6759883189410489244241895710608314487646636558844672718364985082787669104517170096580376692069609041874604017863262445678624384432339609560673325735146824752213644659396489133994780351405505353963175199534520940811983252265624615") + PrecomputedLines[2][127] = emulated.ValueOf[emulated.BW6761Fp]("3768192070758720977362743048128269743652792172865782600408421041647100863768584258831661376266155960653323061159024418693369114489150496403396836022703123674697982050710775271545733019404845648384932763373012576844034843014226663") + PrecomputedLines[3][127] = emulated.ValueOf[emulated.BW6761Fp]("4996817090270925204398124591351581650929992386659735131162538961779700303447917693269175016025592612215970500208241974120205022765011244960015994932562080528715942642394954932931189953414547282447001732552340521661044540625436489") + // i = 126 j = 0 + PrecomputedLines[0][126] = emulated.ValueOf[emulated.BW6761Fp]("61030029175891269410795092194963931753546458573367290675218972211227148812010246092773830125486215763104854597618372825714261360352033773901926014862100801844180418267711567376566306882863926155995425312804691687033445561167871") + PrecomputedLines[1][126] = emulated.ValueOf[emulated.BW6761Fp]("6833240418488948066920209892932139246256027105595326635285212540472436879581105716676144073328231777396096543811943596316039625629371094363456649650678572026035768513691047630818277854926997745495397934394176338037573909064834929") + // i = 125 j = 0 + PrecomputedLines[0][125] = emulated.ValueOf[emulated.BW6761Fp]("3926949350427962695585990018450101508532431417991343152535182583499353333354294146067703587959518218515009410854982144126409191309216905586664235278880727278347038082001447608618625591165227564209266868241878519280739454627913589") + PrecomputedLines[1][125] = emulated.ValueOf[emulated.BW6761Fp]("6309962043798723795945059552956223798505736132181994182186095961257327116478835558103704420245912232701804081120812215729741361816214804019930911187892496733432694004299106132040377518510660396793612942971514054694208602506501019") + // i = 124 j = 0 + PrecomputedLines[0][124] = emulated.ValueOf[emulated.BW6761Fp]("4488999804938223839571472535025735012412690342654669760528351297118412155837701581755042574106398162784062944826655321643381618013246981215604421509557111054819740593053659366060345354377072326398031111319075975298655323375246802") + PrecomputedLines[1][124] = emulated.ValueOf[emulated.BW6761Fp]("6444989125456244545425394136098901739857497273657531818611817253918235907497599508119892300164686750638921852969593078482345991870378784799499090518807626656978947039049504439625214712037653614746128104900832879863879988276182218") + // i = 123 j = 3 + PrecomputedLines[0][123] = emulated.ValueOf[emulated.BW6761Fp]("48096764148903583755143048977709097805863506367881065875494375242161357807338208699249280067124184831538166831199608365878819269574548732670387717402535874913726374579033072827257006439676023227938395708736234029547128657519024") + PrecomputedLines[1][123] = emulated.ValueOf[emulated.BW6761Fp]("2885079913666629029378113424257839642578093922710003917421662173598381441374102964233268828960354312449054455561942853958742180460052092915358427157540615228259129038054859726324662666857362491353586815224330417492336147671076074") + PrecomputedLines[2][123] = emulated.ValueOf[emulated.BW6761Fp]("3043841471664770240132695293319503660849549600823254092922959329815228043755665146670109099702358612421893385494755435653299330932454771820778776072989501322684889256922094409958313718801462190749096656504789733891018826678061754") + PrecomputedLines[3][123] = emulated.ValueOf[emulated.BW6761Fp]("3964755972180176436323966637057860893750008381404580562869395428071031238803305006501831093580555070806000231292428936249019977282011769187032253227866421572275838519495412036349811651394855424599078120762727736839420292263462485") + // i = 122 j = 0 + PrecomputedLines[0][122] = emulated.ValueOf[emulated.BW6761Fp]("1147423570305216155966394089416724605760345045141884306377095836246940645217647257876825855936992089170612653283726654429767384651124940326503491901401745219743034675518742507445403697342358921463061860241593761756336289164522119") + PrecomputedLines[1][122] = emulated.ValueOf[emulated.BW6761Fp]("1400653168263070557443196511526506706425351336877605966710891264057367498166955652141609738759131845843222045163086919342411712873806170027252549920037193747798496958648686134783555143951150979662348022397600974087120311815298929") + // i = 121 j = 3 + PrecomputedLines[0][121] = emulated.ValueOf[emulated.BW6761Fp]("4848629817189004581190031618928252397691169400292273151462737052201317336592222326669501659224975531820399232409938503793212004554476876145926583490328399513338352933529323398645256274996466572940688236571797060498039826415776303") + PrecomputedLines[1][121] = emulated.ValueOf[emulated.BW6761Fp]("5544527487950438540786567976353286513309130241252879438584120222061584982747917620707291880053404671809935496576304302790977416756689184737791798686432959793818214479200283466088031571739784074765935407802091843712383310733299433") + PrecomputedLines[2][121] = emulated.ValueOf[emulated.BW6761Fp]("5683179697250901409522989575566573888308266359124141125691463931015868417522055941441646787069096352388787048945382148685200643438128420640514442659752004342617047060157100278358653282807481368362519403945976463760841519256358005") + PrecomputedLines[3][121] = emulated.ValueOf[emulated.BW6761Fp]("4015193801627401118529067340400153381185658888661374065658744995657432104911672137199213869471732156067461339474772923041603387145645908396491661843740044507679652139062696511448585565788502969536240872541545150524475461142991914") + // i = 120 j = 0 + PrecomputedLines[0][120] = emulated.ValueOf[emulated.BW6761Fp]("4288837439335358966022320970682704322501253708708512179864196425364227488774750648885019477197521514769857662684703447368455232541366519594011028463745666196187644303062377133432655952115338413364077019586210481492151153882533394") + PrecomputedLines[1][120] = emulated.ValueOf[emulated.BW6761Fp]("6711230786907893007908748623469748328179715213743529169694920667477525813744104801178474617246982814360083701328157531202085430108276078940886323514181053003891998099407640230767176745670810179914758840362614799568490407762073099") + // i = 119 j = 0 + PrecomputedLines[0][119] = emulated.ValueOf[emulated.BW6761Fp]("413554378658776595501164876190088919308414438064968589716948868117411538135330443667704485985944759138845581832799144492471703206122605761277212923300910870363233962333416257118355152435167571980116339519218448320568686091522060") + PrecomputedLines[1][119] = emulated.ValueOf[emulated.BW6761Fp]("6297262784542072715661554435028140949302827861690375106363013216167140265214400108019462805799107026733661225399127154732488782092163034727732105989690602332517101639676965271114862669193714613581755099466534874261129391687766781") + // i = 118 j = 3 + PrecomputedLines[0][118] = emulated.ValueOf[emulated.BW6761Fp]("2983298753229977152323135951705985466035371741485329442751503135857043227825215884279541420174496098174791077537084819659951247261120623346318386113107777075030888060458782579622749753862574748396898338124366976276144133985861273") + PrecomputedLines[1][118] = emulated.ValueOf[emulated.BW6761Fp]("3937244092143115338932004671719774355768669736676487982793276339336791586253378335050493388635279555326187388831599651140722899137211057678112288706659103180262966143161461781843109792871183956304191755744274332064783999126226281") + PrecomputedLines[2][118] = emulated.ValueOf[emulated.BW6761Fp]("2733696733608866087289260002991705592224758347592178237334152606732051902582973806868260081916851181879380681091249755495212273063349966523361751113544397800785886443510833030691223760659900815227739498061573912216017141950639322") + PrecomputedLines[3][118] = emulated.ValueOf[emulated.BW6761Fp]("3447346116023692597027327949601168437105182566602907072746064824432937885591845713123463314504333303435002189386048554911879434635935212953780216832655075848382621368493386691645655672987605820386850955801239102880102299086389298") + // i = 117 j = 0 + PrecomputedLines[0][117] = emulated.ValueOf[emulated.BW6761Fp]("290772385992205989003842268392935069805600713676054168995478891788672038548991216826202976641995154972645344657841986735832516063908191336543573825112421034745006181624889096669351734716264231721373274177150459199663763065478094") + PrecomputedLines[1][117] = emulated.ValueOf[emulated.BW6761Fp]("3292019506796543851445430702320993553807948365652526759401921872579331881880870165659982836507133155007686741550814318483452571439127334976403549783875675171500812427161759032411335276957921133870128540763629302400287478387857054") + // i = 116 j = 0 + PrecomputedLines[0][116] = emulated.ValueOf[emulated.BW6761Fp]("4933337379982622525808367378171514734682006772891408711624036888695073657453715290052198254814244894561471536382000141283035744955831857691284830165285170418958890877344092768820121758525762010992645431371368379192630527450391567") + PrecomputedLines[1][116] = emulated.ValueOf[emulated.BW6761Fp]("660850871449753192123029385048560910163777949424658222423062565370599825313381090514485712636189623877350319216784062698362526297988273758467813066947250372551363684027442866225073557812481149224724185611740850384821128407685024") + // i = 115 j = 0 + PrecomputedLines[0][115] = emulated.ValueOf[emulated.BW6761Fp]("481326854764450570772068592260461055406637567541766934153921077562581658404153295111099308704366026875413962255916341670032055787704869744412189455698518435213580395015910187380289385944733548582699453517614844953747524636966060") + PrecomputedLines[1][115] = emulated.ValueOf[emulated.BW6761Fp]("3420809227688631341669679740644412647134601097109661165923282832112778809885638792228909313681711757177052386648346649475521497838129590818669774544604962973921717806873457767712621599594811960795930366883511052271155980303038721") + // i = 114 j = 3 + PrecomputedLines[0][114] = emulated.ValueOf[emulated.BW6761Fp]("684809418026755822824253058438625412417627268961531835854114498136804562817130170611497035677023178044635114102224716079155167463023788690123629198688642789173614157135366218960438999272216205071157116652777625492047310641469166") + PrecomputedLines[1][114] = emulated.ValueOf[emulated.BW6761Fp]("5166640696283905919399620082219859368947649651772166224210712571165517842457751521392173879950792298566506230141458101552956587780134231473703997680234294099476701153920453787845822302486959392040307239726389452068888390962896457") + PrecomputedLines[2][114] = emulated.ValueOf[emulated.BW6761Fp]("1225848359831922306214457248004716386615308053365154620155093059738513399631997830664341068529519872751146070928817179211667300976162683350033065380748927997960100452502445405076083585662063240916615131996881707770541479836532906") + PrecomputedLines[3][114] = emulated.ValueOf[emulated.BW6761Fp]("3183467980405159459382133484543120947419935505894893093005336721935126843695893593395289759680648960257687962761513412564242438535941446306048253411519706815016857930329241932935979124772476367891799889429968788126781556590159057") + // i = 113 j = 0 + PrecomputedLines[0][113] = emulated.ValueOf[emulated.BW6761Fp]("4626517608297074303998465222691929513752132880642038668324913066825087276695067156470979826318812539456317325461721593605469284549411324254599330937034993001421930210744360228548043152368219046417783422209127357231636814511442814") + PrecomputedLines[1][113] = emulated.ValueOf[emulated.BW6761Fp]("3983180414026994442339773406369498782480430663961036515210761780953055939419716704607245841367824985551004597998080550979782500959769863562745076097943094990258671204643135180911363102591744932177410392029554146511771754571301543") + // i = 112 j = 0 + PrecomputedLines[0][112] = emulated.ValueOf[emulated.BW6761Fp]("700906576124412811291086856659842275195258622895397636425353544093733685670245448363084087741581123052363188833111664735668171799136833943150612925072937471265266461004370773546132263292121859184858316146896828999548280381303452") + PrecomputedLines[1][112] = emulated.ValueOf[emulated.BW6761Fp]("4692134649081002476703999452330156046805727119237089354081639317452715546964599231153077130319567327586394710165251636039202551157971022041837579426842866207332269843223882426757561323716848393489694788769608479640858276714369717") + // i = 111 j = 0 + PrecomputedLines[0][111] = emulated.ValueOf[emulated.BW6761Fp]("1634336593167212469726111087588237678289979788278360678182919348642884347360479796581910754301108780131592369234473369879828281697717419752310591911301483800386694367816173405109239641530977162725545732544176131812255789205336540") + PrecomputedLines[1][111] = emulated.ValueOf[emulated.BW6761Fp]("5184725322013784060417949167414529593680372589157689476355315098744927077860773135884503916427189964053154110415376925663936201076402862521450607376690027367635571761072728824076432872648592119843172389551071898313845735081029724") + // i = 110 j = 3 + PrecomputedLines[0][110] = emulated.ValueOf[emulated.BW6761Fp]("4953319302934812575038999165295500575676791409774699647543406528425762068872173726634472807001304647993529966624124694243595249743482491962931348850325767446611679661248874226095803976227891393066925992955597935289828823988957470") + PrecomputedLines[1][110] = emulated.ValueOf[emulated.BW6761Fp]("737693528334438898431278286504826143269591546431662377619978515983630360302832003327633229277274683374824551063238506172875438613867639447412012507544780812924210275380644728627103410245362072050600051053667454750114777947030141") + PrecomputedLines[2][110] = emulated.ValueOf[emulated.BW6761Fp]("3001622656638585857511801217301789708361697320249270374331911200105561040742864289651902240628576874070436165017228217112117151443162706218054774400918978527706874258973333719683737630175847920511647263686994660958560642384777280") + PrecomputedLines[3][110] = emulated.ValueOf[emulated.BW6761Fp]("2955371716766235083595476802782512312694024877961802653530637208613750128600047733743309160011712940356611152581452205642405634198799174743558191212422888690146861411211423483451438377119945438228140796896482696879292216734966012") + // i = 109 j = 0 + PrecomputedLines[0][109] = emulated.ValueOf[emulated.BW6761Fp]("566212511094651326103650324402513675260768764551684782856361232500767090053613654126562334984732125275054601697072126248934983773636047526079992446589428578795051228008677129219092223154897142521267350970627097886575925934072398") + PrecomputedLines[1][109] = emulated.ValueOf[emulated.BW6761Fp]("4694230344175341667542145113737216365363426662846928492809719984029782449958204469882253866698488012712926302037291695259127133435421044549374748189772077500141102408895386792102637462624844856073187158713496155412172255268322943") + // i = 108 j = -3 + PrecomputedLines[0][108] = emulated.ValueOf[emulated.BW6761Fp]("1563334932490323038084899924732619173549214924802037645248404609061373133331192708939390980051099418970255195294348796506729828980175564189388228950685471575211313645348571431181819230671011458906431929755248519529988950383043558") + PrecomputedLines[1][108] = emulated.ValueOf[emulated.BW6761Fp]("2311412559152774300375788856023758369822521516908975703173975766181607168537907050081509698867728203522923410694651912929457339252559503759095125494059547321573842664360913179904229958963585141627873969617228770993980676261040500") + PrecomputedLines[2][108] = emulated.ValueOf[emulated.BW6761Fp]("6175484932576535008332986939485368186545707346006529500809917972874655208202311421127303934742877907503092285518028735492781966447056993288871485474747566840336074917424118940873858912878005715629316527794406816857815430726070562") + PrecomputedLines[3][108] = emulated.ValueOf[emulated.BW6761Fp]("123461782351177634930277079148606395206607419259034123695631887799047384654128845614852300409005752420063210643732168643901531437376507326408479569877882472340918116769163900404373761149085456763553576075603356470050914486784227") + // i = 107 j = 0 + PrecomputedLines[0][107] = emulated.ValueOf[emulated.BW6761Fp]("2266072688291883158927069701738151065911783517304966453663389480757753978872156166093957720219822740237193161041788615078014605864407228585164871986379055520939547980130537619246537828306531563606741183765665629373453332588449192") + PrecomputedLines[1][107] = emulated.ValueOf[emulated.BW6761Fp]("515429094414778904504609179778459693982980053019434816014412238894402419646194981385097367993278713184436989416609354918183746310547812647612954769802713599858825453698318280499717719480495694699890143977308826388860019070165085") + // i = 106 j = 0 + PrecomputedLines[0][106] = emulated.ValueOf[emulated.BW6761Fp]("6010267371716213143093695909678402022513093521998387240126661727715132008960698736705154118695232557573323370026042606069770730852857100118375407703868918255082260850714722276087906262762688500835963093332420245740853593545194085") + PrecomputedLines[1][106] = emulated.ValueOf[emulated.BW6761Fp]("4281963311419191872029752866318517841992413134196684576181285159343053945111111164695870787018911219416379353456306717845371543351196013101034879448657779720874779538361445158199969059295153478919890914171037486882185535487756052") + // i = 105 j = 0 + PrecomputedLines[0][105] = emulated.ValueOf[emulated.BW6761Fp]("5033109025773232254183934091944700239231856713098605055038668877944288565346733607655796479139731856249627912046360970116447647293949316671166763832369647495992541316808331439387103495416443655791884868522333160346837144982107819") + PrecomputedLines[1][105] = emulated.ValueOf[emulated.BW6761Fp]("4669900460535830482515520442133880272923400282670458928700419218226824432636715019798554131251396295806505565951980674546657468029856175682486586838325399578768679827829147144533201256575038610283060702643703231384047071770887732") + // i = 104 j = 0 + PrecomputedLines[0][104] = emulated.ValueOf[emulated.BW6761Fp]("5454443657579324363377843236533931015151471915547623055046017633786313387883877403182401915377828483292869801167098529527030796069300191051702745938365410289948973748970664626027385576751322712014625670196018632538000348121102474") + PrecomputedLines[1][104] = emulated.ValueOf[emulated.BW6761Fp]("3133241298818377302733257481994044946647420520220237736360138875613702232726634637562049820513369241910571891206808742626182652789898777775231819638504462620062570796618509435949988642581412369087353609039469091753836564773020135") + // i = 103 j = -3 + PrecomputedLines[0][103] = emulated.ValueOf[emulated.BW6761Fp]("2301849413909855674162488780732225201562676616006104959243094303277358428323322327050305311892348065869912402322473064362096306853774859357097769623712937529819889534187399774499938603998860479544485756019870113355345199898509876") + PrecomputedLines[1][103] = emulated.ValueOf[emulated.BW6761Fp]("4769376100721203643584138490105487969288775420140134181589021687554748064461808608512166592229118932062808573234716616135297734707261031144278742781947854336453418853179094948475856324990828579869914078888963150707167390109400684") + PrecomputedLines[2][103] = emulated.ValueOf[emulated.BW6761Fp]("2376323046012143545352049957873875519653973825890489638934761380270654841992811869649376088018241846893967207987918990869977981056879466904242398708764880914800026189406407451663484394823027179271301159428644427692814343803894277") + PrecomputedLines[3][103] = emulated.ValueOf[emulated.BW6761Fp]("4089518776757651749022544766981095582075669030595540409340632143941619427587141177823314253624223163160914150731354990886243264294641208260814057423926152825198636019975190702372273183995827896803754787380902888458019918022259926") + // i = 102 j = 0 + PrecomputedLines[0][102] = emulated.ValueOf[emulated.BW6761Fp]("4872199633786861961320061137660550246969907345425715365233729280812060243137569195452166883931936492293633659728504486689475848164870096347435687001947281602000240086205222906666275934244033627794198544576450550622706826706744731") + PrecomputedLines[1][102] = emulated.ValueOf[emulated.BW6761Fp]("764164273079444455641809357552234779774544144626136395465042268465909675126831728736916684032579180470274953297527142632930920341077843559622145430140345601412258845118977809703379078517406445366917273261372686452599903434432351") + // i = 101 j = 3 + PrecomputedLines[0][101] = emulated.ValueOf[emulated.BW6761Fp]("1138481222158043909429825005297113087949881539191043655773323092458960243267437165694019939226385184659165150538578267029713533666280319072044638200327467170930049042745419600787617447150439577689985541557660176482532702243908440") + PrecomputedLines[1][101] = emulated.ValueOf[emulated.BW6761Fp]("4455300025758454639534519022413757975745594248460153197224377412273277761607410730038687015774965120910762436343695971192449449581489423823076517762372006348621691049173740475074002313383891237225650118962005615659136329845120303") + PrecomputedLines[2][101] = emulated.ValueOf[emulated.BW6761Fp]("3210463767655913107954350922295873010088631300544162634401958500459347283641926261600873742133062376991887707975154177358003118834068347936603770092842361580469026013565559516593432428767485752729404150412697514672519818123078244") + PrecomputedLines[3][101] = emulated.ValueOf[emulated.BW6761Fp]("3897236351686991693518980759085912950422402810098004492800141105249577706175658694833724887157670303699721554411373719170290642099070031256637485458934521636211171385007596172461292876808305757268752259666777373248236573371376195") + // i = 100 j = 0 + PrecomputedLines[0][100] = emulated.ValueOf[emulated.BW6761Fp]("4043095220781872017006054807966746640757748818620819777748666199334959371264455489659963392402393583448358132611434041099616898661511397735360831151484256897717931065631791608561310183754921837329313928655091009601540228499784445") + PrecomputedLines[1][100] = emulated.ValueOf[emulated.BW6761Fp]("5406649262179124359509896426133671689896244828529858894945031345510404419121406838049367696542348655513507453913868421112342916164549144087743758521137512378354638215435021249013569643525879309796069922143806676478667222192222627") + // i = 99 j = -3 + PrecomputedLines[0][99] = emulated.ValueOf[emulated.BW6761Fp]("6210611583500209950609099283083845754343386775986644996495973338504113861060796404338115170687756692399051620877679985388589228603401046696724398332298367241779996900625383743212074129854630884637637834417311509095398640463885161") + PrecomputedLines[1][99] = emulated.ValueOf[emulated.BW6761Fp]("5726501072077245235974761111635837696862742235341594290128351842267071162099966467965240252887788857612159886954447208395890185461804071482524387871898110396013302861010041459411476947082162369157387993930218181389615650844996067") + PrecomputedLines[2][99] = emulated.ValueOf[emulated.BW6761Fp]("630551511715376341788910558092626996370594613151666291681132040303198169792227073324695905614334843012564821616006131002919719731220925580296216363445909093410447010514067814768040564189299416092344973942045797304024180032472823") + PrecomputedLines[3][99] = emulated.ValueOf[emulated.BW6761Fp]("5344695873199798841654560855251216660354077383457151943536727881807747920346947783746112228393404410367589543239351918147241201379773853540214926322257942419416881562554531461956796366484295771413814611929735360541285281978298830") + // i = 98 j = 0 + PrecomputedLines[0][98] = emulated.ValueOf[emulated.BW6761Fp]("5334290801535344889401594778161065566537566644991298653727568118562592494615354555268880955374003825857184099538522225799941746892657595206702260559374159572912535022880679585339184831134613632067204404819265411895337983336609908") + PrecomputedLines[1][98] = emulated.ValueOf[emulated.BW6761Fp]("812946561548650974446001480996438589466020365155339960556949264789840358258764309798690481447224157262762104835659476455805690459602602633404803923871683159977667513178598859177598979021970955761740649000476953948228711910642458") + // i = 97 j = 0 + PrecomputedLines[0][97] = emulated.ValueOf[emulated.BW6761Fp]("1533139684897494629678126084843256462502256189980921509779110976093152867744125844762823787644759271036497420339783684287306011082423746624405280667715393784673969283191523855933717625505860175484093850931748781157432915379826438") + PrecomputedLines[1][97] = emulated.ValueOf[emulated.BW6761Fp]("3105206394189523431792859755297651248693196651309981540394912521441506361420010730689916624600680638584780245486097926768401597105192310093466212663729743276706806660677048576741583148240985569812346899056456916105502341086472784") + // i = 96 j = 3 + PrecomputedLines[0][96] = emulated.ValueOf[emulated.BW6761Fp]("6642986649277737896895099841157338849756884624223791368568736753789869171621550347374886762871833481613998771197991983362560977795424796669802885544577533937046979408514940747205131194031961638448378971158617113729301655128080500") + PrecomputedLines[1][96] = emulated.ValueOf[emulated.BW6761Fp]("763985849257341256339914715065287204175615450272088510224646579871565477113676143092909961490765340157702889934511575518026834191072633853340695222233058383715662860411394565262571083422252868386657072058634490756103926667752273") + PrecomputedLines[2][96] = emulated.ValueOf[emulated.BW6761Fp]("2111894809768291771234699635254368429563023687238202955565452114391863181226788809996109782522385019551444976548355096590573442756394819488413598826438205621159997034216435139330286194346631796724090675989720829218090134771863380") + PrecomputedLines[3][96] = emulated.ValueOf[emulated.BW6761Fp]("5834555584761253568870488261794415860686576893238808172507818342859167083726502551898547690129966013381778535451716397180093294298482738946917975249397680840145468832566368453858268282532976383934291095736173876314444871374184585") + // i = 95 j = 0 + PrecomputedLines[0][95] = emulated.ValueOf[emulated.BW6761Fp]("4554372301264166241512694625156409973186315660619955572770107356027531185393192097476447792179708528750421296628447477553088287851446168233462584388645512846895928787087711505667449324227903556816251973558826672520856058023864978") + PrecomputedLines[1][95] = emulated.ValueOf[emulated.BW6761Fp]("4072237732151310407138039382271373859807467820006593734390759255740489586569827077454370070958985964666982375219512156249416895894472792797738360119987295845072417172187075203407891945077491781719818062695939265060511011749334741") + // i = 94 j = 0 + PrecomputedLines[0][94] = emulated.ValueOf[emulated.BW6761Fp]("6510899928553873687067597313066987710884205441615256395333009047888231930982473929562832370792158708449142959652644902462625671956767270578113087761349674803610849401013694504995254886310324009708372106250447336111780658151174117") + PrecomputedLines[1][94] = emulated.ValueOf[emulated.BW6761Fp]("3716985894399679835698500839130297246131682628773524156193981743084313332999772166147103952311960776512057328690694812579802783619226508819684456878978751163445988521637432649774939222893766338589698431788169606678099020184105659") + // i = 93 j = 3 + PrecomputedLines[0][93] = emulated.ValueOf[emulated.BW6761Fp]("5047907951035141625499289119782080570696114189985987894831790618317825143013530787999864121162182855689345001033675318300842414213335676262887456422130547583822551127905049725472675504164411132220200872844876890004454445202796848") + PrecomputedLines[1][93] = emulated.ValueOf[emulated.BW6761Fp]("744443940877440155852839401495473654127016275733517126510710750437701880197577129845552629949810062222202490220131566785418840939241657307884394435645700947546109375214237696967397028953889952749537267956254343296685593919841751") + PrecomputedLines[2][93] = emulated.ValueOf[emulated.BW6761Fp]("405397734594187466210599173815534415630420994025799265503903411229120704575608241682072998863209399054805243315634503312190255263288112805779208908719331201965374614272443482000461635138424130384687385847020666368559442786917459") + PrecomputedLines[3][93] = emulated.ValueOf[emulated.BW6761Fp]("2696790088947677357321226274149871437764533101159544129892669487973615527359684570609962654836322323267289273293861803643578367774704764762323540015313991701554039225880648603140983994202127233185175940529764493913084770994846593") + // i = 92 j = 0 + PrecomputedLines[0][92] = emulated.ValueOf[emulated.BW6761Fp]("1778118225306862528653893340786894106058338634993301064843865041738491171509980090474610613329922231665151447630778779262936323295114184794655109554630765558760483766521331613645137958731849488238994508990367916232936238599801931") + PrecomputedLines[1][92] = emulated.ValueOf[emulated.BW6761Fp]("4105725370160443778676147094095182156751981636198800700531093765665160264349249532223141327824925114515260913439303886102256066220953645440240448328492268791346838243081878928038732773595681209352663512771678125612393529622126991") + // i = 91 j = 0 + PrecomputedLines[0][91] = emulated.ValueOf[emulated.BW6761Fp]("4600362245729613938347146383617215424783938802440644201628282454478556194351323702906397430611076765509583670325072759904615561268681689452702508532237183921664395384926920354024313452086066345089232241798541690259686322185162273") + PrecomputedLines[1][91] = emulated.ValueOf[emulated.BW6761Fp]("6872426908284539953361622842949373424213870462893858001102304251139191574488922086389439832568944595393555449548692141770650107664958472796235253782268295422026242252114475104252428564451697348055550628555203918153412459460534810") + // i = 90 j = 0 + PrecomputedLines[0][90] = emulated.ValueOf[emulated.BW6761Fp]("972238710294108691821576508451222523325936231175169103221318647177961008156434479190946297408489211903837717946830240136791276876475825835667664504728158588476278139573707686410501625612772061520730264742612657926085350356866829") + PrecomputedLines[1][90] = emulated.ValueOf[emulated.BW6761Fp]("1510181743744786712591585351931772930337667831480848631490664096747364724169957789195294155846340729886205788034650789517010294174011248598866252696801023552494521982312355974258161015191981410765377192634370898289227817443383417") + // i = 89 j = 0 + PrecomputedLines[0][89] = emulated.ValueOf[emulated.BW6761Fp]("5557506933950386763218483231725931480648969308465314223083209985767634902903171046015407999810542542424552137025017112365331409457975213648715299200605178027178239773253939711354993413274425279759617940801473937158802339625367106") + PrecomputedLines[1][89] = emulated.ValueOf[emulated.BW6761Fp]("1025310966880184379491710688665364390880187078341049897527558452857000641929877892487550584006724822722919195503446161976221790997828334084748364502503873679170302110669101931916815125014568929775499684740501231187934011945575051") + // i = 88 j = 0 + PrecomputedLines[0][88] = emulated.ValueOf[emulated.BW6761Fp]("6737609173935499145966651085828355289411807624339037123166739321112841227388499205045307103023431429178709920183892337531364898858875360100153697870543555199239854099013655686595855597020820721037875702137136100623742392146547113") + PrecomputedLines[1][88] = emulated.ValueOf[emulated.BW6761Fp]("2659485018438977675047464196576279015694875536253089601427805835350948531419304111132951968008946706276266572073652283425392817348050712696346759099142455444582291114849705129489012295302147939302874413708743102287044740793505384") + // i = 87 j = 0 + PrecomputedLines[0][87] = emulated.ValueOf[emulated.BW6761Fp]("4441151071852276306298209823658750857395590614091535791519200966732884109870930067806083302428289470764924232908921416597828389865940540731878474135112792372490962316681415335864107748995061444417275473434317270174982090608385971") + PrecomputedLines[1][87] = emulated.ValueOf[emulated.BW6761Fp]("4839626429060666648820106860500068908374996974648867422022249155639435134527692095743868784198679123972177173528127658202666042870879477969349613000705157462873430251563459916032762882346570618335137910474671866635860602585367349") + // i = 86 j = 0 + PrecomputedLines[0][86] = emulated.ValueOf[emulated.BW6761Fp]("4509661640469976049870221335189922597441654768402802889795384802615621925508423937026572412307489914032465870638670846049030819747364558475564741645709494331762666674404206046263976688444921433821673714720135048965856377229611628") + PrecomputedLines[1][86] = emulated.ValueOf[emulated.BW6761Fp]("3487870318670087220135642823070529587459810126962147894043319535952413604020763422619481168610564465706302879778881124586413423575607096272785188951321134130211744054584817344450446549888147090168331179905987560684942220425658421") + // i = 85 j = 0 + PrecomputedLines[0][85] = emulated.ValueOf[emulated.BW6761Fp]("1519001864958777427421059765226923260984666053267206576476800868702285323411256327154788822327142278385782412542329663866678475529205929176640075196345571119467595511484440425730279526442276708660390690638808053358152183571843864") + PrecomputedLines[1][85] = emulated.ValueOf[emulated.BW6761Fp]("339618474481374233408653340286365880775983516984272638090335996177627212968251512436111248759221452748989633180342131444085812774053387887544791732345486839018109192502385591161195444367505365221930689157110125467756547838849643") + // i = 84 j = 0 + PrecomputedLines[0][84] = emulated.ValueOf[emulated.BW6761Fp]("5383707318799980417127243658341454006060231601550683158066499356314664342842964473973932129039747978225819860930748841019638264473752363178099272936190083263481626973641022407497920053616402716500855599794452310459194652961209200") + PrecomputedLines[1][84] = emulated.ValueOf[emulated.BW6761Fp]("1349646528445772325728229877724689627031743523671417772446520129629622853654155091055539773980844706622058488772223461616911849918670516363806854356777662287718009008499554764276532161656086868870990280218941085533631039125075371") + // i = 83 j = 0 + PrecomputedLines[0][83] = emulated.ValueOf[emulated.BW6761Fp]("1919037141000340015532240777217515754000237076628800751689553494755852848880370990254326438067118347725611618358609018159924461271671767113830563241617627756081864476029381221519946219301823123343613957296437308054897105359297269") + PrecomputedLines[1][83] = emulated.ValueOf[emulated.BW6761Fp]("6193622106953370645488984456888022941558613899911812237087544262038237716517979317041586005330078927094776330098927225611063600454806197793086266746766986884718391839124951252821859030591980191698766819150227459130246310661346190") + // i = 82 j = 0 + PrecomputedLines[0][82] = emulated.ValueOf[emulated.BW6761Fp]("5604334867184504283101885762199377590849044920799905376603908315985233209750313152086865257550332811558800330290623287217389163687434328275776318719371903699144276765062224548014642774106630025931227796845462926473453370878217054") + PrecomputedLines[1][82] = emulated.ValueOf[emulated.BW6761Fp]("5121837820524244380633559304962861949517011132126154417129457763839557571454772023100009470380570446804371859803558477476971571907123955873440777537352059389278975949057671557100601956373951179598842161422364345955824227067821780") + // i = 81 j = 0 + PrecomputedLines[0][81] = emulated.ValueOf[emulated.BW6761Fp]("6067039568545267838181130403676950028372045277906346948660766639906798973444122264583559461246057301784807410742590458307288485379144153357711779574231768382567438208479868245345445057744643748674697515903978614414114874814062307") + PrecomputedLines[1][81] = emulated.ValueOf[emulated.BW6761Fp]("6880991716540357902750875467195505448773685328233955890419023527621077685216030913665134779737441555378073137742920581503827375790310573288149146115129419180701788936806954316041205676346559444100063149390039298206525864640726623") + // i = 80 j = 0 + PrecomputedLines[0][80] = emulated.ValueOf[emulated.BW6761Fp]("6869953723292757957249624864306641209156649908829060369956669082858146731880752960326281552744617251261860654696462155414768691078479849090594684751909462835964342943852143551132859576077229991406111489281697940032774868436176999") + PrecomputedLines[1][80] = emulated.ValueOf[emulated.BW6761Fp]("5614709334833871645446373840318419298283648712925576675331700402875726133202590836378902568367322443705376968142770629571737036781249560011213286630255110198242550012818111109270093534607349695639612197752174557798917852647048250") + // i = 79 j = 0 + PrecomputedLines[0][79] = emulated.ValueOf[emulated.BW6761Fp]("4980594522020125085337838265204820771621076729908105705805854424277251459039298036884763908300381600489511459131191940043084494214578610644783861208395158996603441510107318092064373656027862697953744766191485979617155122884562584") + PrecomputedLines[1][79] = emulated.ValueOf[emulated.BW6761Fp]("4042813205981909877511870025047100983992651260939106481383083811949412209961122330723238535322973674617223461600442099649058372802059938570280751988245230293274349336371780245459427109587547862323557335478307216536115035786329427") + // i = 78 j = 0 + PrecomputedLines[0][78] = emulated.ValueOf[emulated.BW6761Fp]("4736014237661124474919325837323255333663929534870042114903754288732701949896601812125046300196794235153463997276154765275115330368384777526897346573674986337596087801165856657251875469708964755706589951812065576029605368880307330") + PrecomputedLines[1][78] = emulated.ValueOf[emulated.BW6761Fp]("1957240675950958452986564830220902690609413863957547458393400303485176228644772005244521114574569019356081726863230132145863503283354289808843407877002866564348243635922163753294688198520892938154896179149523387763071879026192281") + // i = 77 j = 0 + PrecomputedLines[0][77] = emulated.ValueOf[emulated.BW6761Fp]("1896794919986848060393220222925542229088427272504480911387044183891613533212038341442571592078156720914592673025986869785902458920643524703635569058217402002541817375023305033040017178091785230876069201061027396160787380383913485") + PrecomputedLines[1][77] = emulated.ValueOf[emulated.BW6761Fp]("952118686796326173491479294145358823162844313805069901337052929645198665143877135883241333497438934099314203159402316956587916577387445642834370029253487949574816963413865062830574107352788125037245193643382776835744743710856415") + // i = 76 j = 0 + PrecomputedLines[0][76] = emulated.ValueOf[emulated.BW6761Fp]("1410246511278881873376558292841685790024282857896728342149001813635677847218604552303527026098386447762274364822183314739464128137999683837672885487186967847641717287157617576045185484714232591611567434185430519840283187165531887") + PrecomputedLines[1][76] = emulated.ValueOf[emulated.BW6761Fp]("4119705459903045226159670551727402718128564082540864855361906266183721299733904153174114690207935029103638899913664705930838982224621762050269570580809489779078132146847473253910183165605063622774796014387328879207695757913799201") + // i = 75 j = 0 + PrecomputedLines[0][75] = emulated.ValueOf[emulated.BW6761Fp]("2072609478320001524320717173962571134711535941640067489637092238715040645907960807893942921283120113507698307002805316256993964956771568079631701795714954017552892285562026519328855416196541482143510574319254978277137455571778001") + PrecomputedLines[1][75] = emulated.ValueOf[emulated.BW6761Fp]("5894714538160407682287836474406214563671983369063207757521309934251027316132097965174061393141629129163127240409904888123486261567289075202307479756611875308157695683933323381961136441686218200020586708364128872743199583389014647") + // i = 74 j = 0 + PrecomputedLines[0][74] = emulated.ValueOf[emulated.BW6761Fp]("5106682279392935961031812161867418543946492460670149936266921188569667729835222767203306768397343414328957015700647431747733838833765379422556785306560650405609290868938919653580246910237324702779521559387178746171315070090318153") + PrecomputedLines[1][74] = emulated.ValueOf[emulated.BW6761Fp]("3153429055905459861967098840411143630797549188658234822637249428037854322116749300600880527338092057104138600571085314159325239783013442224777221397555888980848209009841714998763507800323867871420465284679548596540839296513048642") + // i = 73 j = 0 + PrecomputedLines[0][73] = emulated.ValueOf[emulated.BW6761Fp]("2309558581515697525806105514114501512441326764395967792860310494360211491927848765373205741433111502114874822272883456630217937235469291046785367236821204903743476909251764658419705892503554921768097444904548609966973388109109543") + PrecomputedLines[1][73] = emulated.ValueOf[emulated.BW6761Fp]("4406788481817611645656548143390177310692203397871589459859491665399736118029103811823041128102326412085453737102272295417521168777404767030473867233710801470981825554027554437439988384534097338870387847812447345301715080956941196") + // i = 72 j = 0 + PrecomputedLines[0][72] = emulated.ValueOf[emulated.BW6761Fp]("70143801456641879605852143513249994253942952838517305812198390578544244249425463845374750697973710144773229658121146105375537294250338032985216846454878464214405585464566225251859329111605976969190077368551199988262487332213472") + PrecomputedLines[1][72] = emulated.ValueOf[emulated.BW6761Fp]("4207471934217394013864415886266297391321574222138493922397775146262852761128641397456358704993180728128223826867603382191837257839100565460087185314954741131340867336141963696867361333547653842005819524697714614979577888292080256") + // i = 71 j = 0 + PrecomputedLines[0][71] = emulated.ValueOf[emulated.BW6761Fp]("5171411332384846522009308600171271717775656813462368729163310066978131550784041262095622048016317730789429706142084360644722417875472080028887935535933365638293239021887851352065226215946911104922399560360027754023282146751173127") + PrecomputedLines[1][71] = emulated.ValueOf[emulated.BW6761Fp]("3308699969390408117425675800034852620724152781693526802125365703296957537619549597281048430703238670047566480207149204289787144466800450766728863836132831936610209196903041136317436653727727840725501386858902193854262207493660820") + // i = 70 j = 0 + PrecomputedLines[0][70] = emulated.ValueOf[emulated.BW6761Fp]("2157730189967321961590154478681014647736161701291572661503691745828224724570690384542207251893871380972626395818536195165244317011459475104080304183755778781271580095682599104560528323388969877297057997164532517457316922473122843") + PrecomputedLines[1][70] = emulated.ValueOf[emulated.BW6761Fp]("6166779349533636404079090091198084654595551428484168319226064024022096225476313444140911411739596624384367001596401007457610736149102582292110622423512521528219451589757047219684985131650713379980437753685311223968789274660506189") + // i = 69 j = 0 + PrecomputedLines[0][69] = emulated.ValueOf[emulated.BW6761Fp]("3693545555393262149090091890796988367031750870154119151938382046789970646209103750043260735558026634783406780454806183283167148193304651913829826658787420693591783609897868879309931049485834140104439475735084228792342105678001859") + PrecomputedLines[1][69] = emulated.ValueOf[emulated.BW6761Fp]("3454579417845988721145112391791524361723943876865961783871147642976222000744550940707001904868088850717020216623985541442884486667224875720068404843684775351971798041419211492654134593068373225320963803238503760103944066915840816") + // i = 68 j = 0 + PrecomputedLines[0][68] = emulated.ValueOf[emulated.BW6761Fp]("4316617566488630990186800831569323180961600325467315782520452617705469884682333093818828825578494920297533166697413661723199052237884768061452289253625270126685130694402411159604161939860148014467733374435152294915689788782393563") + PrecomputedLines[1][68] = emulated.ValueOf[emulated.BW6761Fp]("4190664687932908342055565978870270182687821852907651804349091805994979881604990178094590921338075567591395798595057534786427763408202515738478533832353247698077120923788657200258079726159139609904292140147845063993802634358775287") + // i = 67 j = 0 + PrecomputedLines[0][67] = emulated.ValueOf[emulated.BW6761Fp]("84944449596492710546484059085756868056952590611144907344324523446407105322952117478388042103840653905762546055111744847509194704507606178767471978158159198402631558743814408643460715249083238104344191220434949959361481291146430") + PrecomputedLines[1][67] = emulated.ValueOf[emulated.BW6761Fp]("5649170502978062479430942300569604180065205047447957641053208532178356906593461516672428743249571520776794263298225069421489557741598532601998487905123429777536764343411900779513959797677672828995577356475491526821338663890518141") + // i = 66 j = 0 + PrecomputedLines[0][66] = emulated.ValueOf[emulated.BW6761Fp]("2603576310901490311292135150953384774277318221989961250590025834406631430863918568717331918023797152480102239064769151694292624247190535708671290590222799687160437151107174463689862580134536805376273262065253737549576152380168367") + PrecomputedLines[1][66] = emulated.ValueOf[emulated.BW6761Fp]("5309265426363429763309068708763050192687298963793233029854386808966038869914249931311696251275662316034717601938626668997195338046712132642237298096821232686285400955651349643194976882039249387172924417706285549777109346548006226") + // i = 65 j = 0 + PrecomputedLines[0][65] = emulated.ValueOf[emulated.BW6761Fp]("1341657118469604693612921962537974911241109483254412491247268402952466550719072000823262496503676334235050127694663208876164873088040296471826215794963805354093057402941426034809948776619882576406112722703520543634493032593845711") + PrecomputedLines[1][65] = emulated.ValueOf[emulated.BW6761Fp]("4035700358596121039901170893467410034085393540830625930959994152433747836022300185439015628417359035385883786247038684823876857274515720164396191583808172302780457926458796266440756634932368614447938075150295060770207412294411668") + // i = 64 j = 0 + PrecomputedLines[0][64] = emulated.ValueOf[emulated.BW6761Fp]("2930660277843850381294621887137108135255461149809829909821377112483523980356050649163697177214375058704749326539551334374875177166039847118094070598604192734655753566201882039674293825146145171463504903948558735526872827397160225") + PrecomputedLines[1][64] = emulated.ValueOf[emulated.BW6761Fp]("5988701523929581560610984180294379754868966835887441912002458746197296016937703656915666559413702631263625903364061495057456665933686956303365386760706868014348860600757271537167901231685104290021746466078585138178535287513704928") + // i = 63 j = 1 + PrecomputedLines[0][63] = emulated.ValueOf[emulated.BW6761Fp]("80235409916131560952531634926187129854936220704751108971854293712013441645643075028881285775953897977173679734102951555672688501659893316342601229001442455921225014886258110730476599533006387167516464943028770988848119565334752") + PrecomputedLines[1][63] = emulated.ValueOf[emulated.BW6761Fp]("5645697547669765807051699414436415014546553640139306333111837902631467486478350968354759006591722977647564900224120645931064019336202965082317873586323451596595667262804678770022271564685698580435094170621340150377645331219716242") + PrecomputedLines[2][63] = emulated.ValueOf[emulated.BW6761Fp]("3515801869518688586368276649287501032232253490995929213693667666745539961609686866702375975294290905260844112197318970382348421828085682589212450872191370133742395530357304231481107985085801003563913938437804645550415387469710798") + PrecomputedLines[3][63] = emulated.ValueOf[emulated.BW6761Fp]("2768627463899769153777616923498767453844638297259238819533268522374182517088993723558739521443440612431052316999526415454061437419946607135767416256326448665426899699928320017014638884327653368630859958928958084940598054664681103") + // i = 62 j = 0 + PrecomputedLines[0][62] = emulated.ValueOf[emulated.BW6761Fp]("6267498047223543516085913656269800094985799102121638318066218769205732496341485331266997143838105658275997583482766391902624817799699593190625745845913736192058521667981439900412382902440513697704842470946713488669728244855268941") + PrecomputedLines[1][62] = emulated.ValueOf[emulated.BW6761Fp]("3172495843608365767106104222699963433932721840779494687213669710696273729035389365567085042159376306508312592323212373903543196676243559717338575217311945824019683796186729014449875006088163914305245567518115276847148486469675416") + // i = 61 j = 0 + PrecomputedLines[0][61] = emulated.ValueOf[emulated.BW6761Fp]("6393185299135236521600743898901902046235665740374863049386140970534683752073956151008823232102598322138334082399707737488005513141361456335662035360356893792369078552986570350323576340398072873307536750890795347659446381995315753") + PrecomputedLines[1][61] = emulated.ValueOf[emulated.BW6761Fp]("3166198729288470960815119159020024849761120775693905362946234701033914940383170229751355601834908988520196149525495490902436617272651934048057104969447501071537789210462295263005230176802247844207966015330849259576240004475861347") + // i = 60 j = 0 + PrecomputedLines[0][60] = emulated.ValueOf[emulated.BW6761Fp]("5933242427515184158698502243153492757724275395762347259631780046452701832550257433588929361739397299375972619193115264312711663457102177517470028944343796283322063886686941390353639393464956981156323711880431964717275018387701682") + PrecomputedLines[1][60] = emulated.ValueOf[emulated.BW6761Fp]("3384876030528746801023594675065685553683456870336735909759862775565134849351740466668332316834305760899655594942159390338717729639578479114411185241112463275424847947000663178421045281441831580381246432601271822070328156849153352") + // i = 59 j = 0 + PrecomputedLines[0][59] = emulated.ValueOf[emulated.BW6761Fp]("5672202940327026564296098467511666610927719124345712212606253393184823723416117442821889411272955540222239709079800765814222617482044871298073480040855267633335674499253454179232541413791707149482883674890351686946817110883000228") + PrecomputedLines[1][59] = emulated.ValueOf[emulated.BW6761Fp]("1154543563423153595437057912539134386506457788445374295296155704509782049894375963754530214242479483842535098794085962783214265333207301572819449746417430876867561757821076871333678284882744783958574290553769899855932651808879891") + // i = 58 j = 1 + PrecomputedLines[0][58] = emulated.ValueOf[emulated.BW6761Fp]("5494523358602689993082488462159401069622259263261384810777691036023979885833406510370827546720060333368038309757690955479832125270239855687629856598464690979905305508291102673871528818355512749132999808077739161976625609154087442") + PrecomputedLines[1][58] = emulated.ValueOf[emulated.BW6761Fp]("3889513228073300846774483449675405165412370889711908059928933828601820970272115969635282636780877852444112625854996590991089931313708119870086461825677989172964379008275991306511578482249736098151784221021103020403191511383026715") + PrecomputedLines[2][58] = emulated.ValueOf[emulated.BW6761Fp]("6817348152092217125383764111706396839178624284239986872074018495517405857300894298296426805941999338096243515994114168580699629757784818540853912737100720606253305749066953497290110273659748415521327759821798449434028823512899749") + PrecomputedLines[3][58] = emulated.ValueOf[emulated.BW6761Fp]("408441016040698462301045541028863542738760857595465526119973702087540331044892790379588279572626169110948234965342045564413136195279230719397130789960699105297260241439215852283327432361713263160794764750134587346851224358168016") + // i = 57 j = 0 + PrecomputedLines[0][57] = emulated.ValueOf[emulated.BW6761Fp]("904014207283845546342517026949002301938840680520511007708789153126108214252896375908459045568106923898014652347188514281811055130725468054629924731218357339456293621824634651205185088647436836303533136737375420850357755914019482") + PrecomputedLines[1][57] = emulated.ValueOf[emulated.BW6761Fp]("3021463156942607689893536499084418526149371959265368563253318239787696616342699376770327365917759552439071753047224832836277854106558072813015494652155076805617663734851882419677941639618620577565736429647390393259125626739995359") + // i = 56 j = 1 + PrecomputedLines[0][56] = emulated.ValueOf[emulated.BW6761Fp]("4002254150287973611774892958734645146330948131326012867658556213227131674782652425357472920865508721513207580088282234592571562733081211701929443114916737662262590051958147515409912013925831184717537636821719275182204257920247826") + PrecomputedLines[1][56] = emulated.ValueOf[emulated.BW6761Fp]("6650995954704541882285423797183833389117409382304102437747875050599180612204257823713527910062803417144644199841410840855424379322391268435824808936698654249193961626271529930100257320177120483894753385159475230006681183908486693") + PrecomputedLines[2][56] = emulated.ValueOf[emulated.BW6761Fp]("1731856182637246105907196771656973728242430938769551437479371960396568886577161343428444049436329132240591124171249486296193200751962793098717390700602949957161885965620835197484237467335434836086020378040872702181906955321234084") + PrecomputedLines[3][56] = emulated.ValueOf[emulated.BW6761Fp]("5904794864916998236634031354032726563042662734348742454636711408463512681053968061466581910826684656141140069538364115772383786783233506239578341464005935090344661046021052566188151710733410539127138076584927344794043393631875335") + // i = 55 j = 0 + PrecomputedLines[0][55] = emulated.ValueOf[emulated.BW6761Fp]("4184818315804686687449311868339826992614263927786079890563005955265008801366363259097525486014913084688352218860421066209106161062321318169379423339655758015838219920065913612147776347966059741489852222087074304658538242226141107") + PrecomputedLines[1][55] = emulated.ValueOf[emulated.BW6761Fp]("1729436668893735497596788528818242711527297679305950102487681037960820908794154296615421895498309550090534641956418734833716440103307336957108046195961644346661843459382175352900679912453154393503174508250710602341932630635617104") + // i = 54 j = 0 + PrecomputedLines[0][54] = emulated.ValueOf[emulated.BW6761Fp]("4028424328816457105275613631807939250693277717816481016645317988637513044482980841560103858167381177450927995343298109011721354600136597205734427808625992848504337217529784604969243272238302559636203132991964128317728845228827737") + PrecomputedLines[1][54] = emulated.ValueOf[emulated.BW6761Fp]("4137264771117858981063062933914663397753332258247457310892238685897247783937428646711199169425232185719356414250990689144775713028143107210691682540723085798000784988249533331875788444818880345959266557470221936184860579434674460") + // i = 53 j = 0 + PrecomputedLines[0][53] = emulated.ValueOf[emulated.BW6761Fp]("5492090801797351752381614128547027199049948697414385519539604670596301854328273645242249057084866409918562550964885250930785720674325209012952987352495642374866705080584069469415536679936119019685669733912277135561695115666877642") + PrecomputedLines[1][53] = emulated.ValueOf[emulated.BW6761Fp]("4905858371248820162192245612107925452187136738566936712274495298277389232348237517367524132804376317049991057289942326502212212603680615088237906000153324370826140103086371117307680591832523208571162756505635031406643093406864846") + // i = 52 j = 0 + PrecomputedLines[0][52] = emulated.ValueOf[emulated.BW6761Fp]("359178835294545969889883772667754428563026611682186272095920083011124469414808094313939667524828315558286118909738292340246984351389375075929819337763337885079238445278970206970834537857532461850702562141060178054445757061261025") + PrecomputedLines[1][52] = emulated.ValueOf[emulated.BW6761Fp]("1312200326277970534697540779331716736924580549853524235566261452574147442129502008927617292522196385102231014786673938381594831519849263038105855332568555929441096380204787419249922531962882779612974600959752032033331017307660602") + // i = 51 j = 1 + PrecomputedLines[0][51] = emulated.ValueOf[emulated.BW6761Fp]("2026038287010327744329738730229318435937175762944665742194585349571584968982882870102399978673469017390514886249305090405249521202188733937933343484364277244414353591563737610732282221596182900183491534814411847004967926819041106") + PrecomputedLines[1][51] = emulated.ValueOf[emulated.BW6761Fp]("2178832958114348900982310012790318964711295868435660304593515141455040525510122631088935523812017659055054869618428180241202047242515973139827366745153207759156458851652815475377013527980632538198101805204073000208867957404386275") + PrecomputedLines[2][51] = emulated.ValueOf[emulated.BW6761Fp]("6837231239325802714630592231544125999222471865115356607824139515924873828006966495355988144154645822325580936288573467243496622172897585520569534506877539567995164641501010714466115854526102902238483776009362232490056033478896718") + PrecomputedLines[3][51] = emulated.ValueOf[emulated.BW6761Fp]("1136729064180660079227012334655245987722990291836584884006658508324055189594139195698011309590407342768440767239608918245864512620648063831439081245116083355457158423089087145379675587830588630375397838727299113445737968051491153") + // i = 50 j = 0 + PrecomputedLines[0][50] = emulated.ValueOf[emulated.BW6761Fp]("5322920300404976083455522685211004055552447217897022767313521102034409124300458651590360645227025719456716157334813011739843876771661830172780260273424774671669669534448129130732400610261443507441704074262583228691779432057383210") + PrecomputedLines[1][50] = emulated.ValueOf[emulated.BW6761Fp]("4045151325025661254472879018016993563055206037649637764035519770429754358392671351288941058414802815561896489955008560794303386044981416397960378880429579440775177944716333205061343212765352728742991869524081985800570044775720863") + // i = 49 j = 0 + PrecomputedLines[0][49] = emulated.ValueOf[emulated.BW6761Fp]("5226258037103858585709485815951884774065463282206055949572296863155702213812699262254794092005380545671610118552173661619682770665961766560277265965652287468044256177657874378353508735265637291703944234691599675417196965708036341") + PrecomputedLines[1][49] = emulated.ValueOf[emulated.BW6761Fp]("1341647445164990334095065980943134038946017316256003003135703740054153232828619291579732165647244341657832232629026147040555154939494114670981128721804157219231387954429790542158383077677618175725451315626705948038674690506434713") + // i = 48 j = 1 + PrecomputedLines[0][48] = emulated.ValueOf[emulated.BW6761Fp]("2669141213632483177349464677526063038092995516731288530055965072139720542416799558066265333235760093953926755294654977219704355890957195369092533432110164420763741078175284654109721413728820682856532679498998179023064591536008179") + PrecomputedLines[1][48] = emulated.ValueOf[emulated.BW6761Fp]("2134420113812914182827401634702786987961092298592150550562829030382818487856108796955415571408297807671429201199583453019724819973429332912629032447937549705209104608221786349214759677126963931958078901227230879866726885052445052") + PrecomputedLines[2][48] = emulated.ValueOf[emulated.BW6761Fp]("5301401015473682032740337209437935700074314236716542138678584038605557667425729743099901506999562233726460077935119644107638279830497233673245936694865259874079692208240393499480211839576495398325812230566957742585600576497712497") + PrecomputedLines[3][48] = emulated.ValueOf[emulated.BW6761Fp]("316237245987919229539675151585623375736684226237110958836699548130958545274822742432583398578011769251755976215515832642492191082965378422712648320438656571781374716032787691902701793949747039075058956435281006837601921806263940") + // i = 47 j = 0 + PrecomputedLines[0][47] = emulated.ValueOf[emulated.BW6761Fp]("2469284189539122802768267519758265300210938499804273343134547008175934793307225662932029768301730928461879508577001059172355557259081543667750994109716250014560670437893272354177453387546897128436862489793983234949990285254606452") + PrecomputedLines[1][47] = emulated.ValueOf[emulated.BW6761Fp]("1830146984962850968453585106583448314487150616750022651699991784484596999485896101930222322503775558473596214757083742949464480904175941842926026220500858275904724239903775931730004030363174733148460092172413335286081342076154919") + // i = 46 j = -1 + PrecomputedLines[0][46] = emulated.ValueOf[emulated.BW6761Fp]("4808750674906929458479780393808057785998730391052017477551570952987054533257389747936619777106961884229021401047614317646231862764036952175790162498761774530665237875686878919242872642150254465632443227768599747003266550581405388") + PrecomputedLines[1][46] = emulated.ValueOf[emulated.BW6761Fp]("1891884882567828239204882760611347865028360372463103495087043571568211658813632369204338450133243507396041732069576309883702980103212039678198834251217568056933974768947024453446363465598515701337006224040584611704892464155922646") + PrecomputedLines[2][46] = emulated.ValueOf[emulated.BW6761Fp]("1782677192263174653114679652738148432045753091242027102628660919836353128448423680787560496210687690962650265604122324792301295723979276692851810069753634590299780709582183458343210520766053802972379144341478689122268621722390302") + PrecomputedLines[3][46] = emulated.ValueOf[emulated.BW6761Fp]("4601255220474206175154039807739446217437347898090772634618295737347141908903052922685778628767090029489974230859891899402569132880114132773342251471175144576481058945676007839307009592378129694341000415552823241628198058676301864") + // i = 45 j = 0 + PrecomputedLines[0][45] = emulated.ValueOf[emulated.BW6761Fp]("3313817549411916259720752897067881926542086622610865958536519796066914607656859212778668206734368535950822735444718824275919603099081215658685318755509674356243561416633830397196231266219511716007749348762645681330551126790870376") + PrecomputedLines[1][45] = emulated.ValueOf[emulated.BW6761Fp]("3252042130416329114631484338445192030646251740098948126128650603635321744781873461724605669481911071863328041692968946348818588320589336163492834872989177013461385360402636605130251922052366702656283468969500573764322346005244605") + // i = 44 j = 0 + PrecomputedLines[0][44] = emulated.ValueOf[emulated.BW6761Fp]("6338761415503833574158993491917412731054381446353019200951359861863236217623802777827212821773685337418478392924282420674032240090520869982950877201025879182995853168001568974019311667681399378678658372257019594917864973673309193") + PrecomputedLines[1][44] = emulated.ValueOf[emulated.BW6761Fp]("5145208095186900983628565732407350308228178211161732689086638870941412642066547959806176703516458076133350439172796500965758264802653323628323907233788177515377158457742460353846552206469110836809410531151609398126545913664188630") + // i = 43 j = 0 + PrecomputedLines[0][43] = emulated.ValueOf[emulated.BW6761Fp]("5196926067227948572519208302811648552049645971471523826735711828398229766956255254268762415203531419391167114934194578362433366430246628887120782279447934571699353681118100448979965849493514365905012300253959326090661874306957037") + PrecomputedLines[1][43] = emulated.ValueOf[emulated.BW6761Fp]("3119923259263949969749529244369104226493012793832028159004381001301808535947984715041184266613240861351948168589053862674232986171291382714515365072351226441523796700113665533763234694043303056681109280325373132113235807147635810") + // i = 42 j = 0 + PrecomputedLines[0][42] = emulated.ValueOf[emulated.BW6761Fp]("3701751799991197886131283811604024072336985353686243907343722147267167812517345955394536600725894396109453469705947913105619652148410105985236906738662670825757973774071236977402882686728704766160168756235357103252696719757853080") + PrecomputedLines[1][42] = emulated.ValueOf[emulated.BW6761Fp]("4328206745347948324047675202212800610600103831121253704737929492249698600108545495006620418550929295799787934661360306902163955205189032671319126960886388468587745108551360815318758470615289870220975458457801506393380445587040331") + // i = 41 j = 0 + PrecomputedLines[0][41] = emulated.ValueOf[emulated.BW6761Fp]("5243931540637815073298817765538453903847853699668038453409399793026947683823322658059258449070462262268954906925773665150004113080811722220228212123648000373937683836475968100704584922681517218113636325461148739730351900244430486") + PrecomputedLines[1][41] = emulated.ValueOf[emulated.BW6761Fp]("696303549522548469135732799411259466465742009157193344130063238624867632912352618408234824957796454146834346585274133110077926048562040046657743412672416024185699629924036777170476680150185390702804937742220109575805994018565880") + // i = 40 j = 0 + PrecomputedLines[0][40] = emulated.ValueOf[emulated.BW6761Fp]("6556291914440995915077184566874121406081010955823879975783964894145083820107493360375502209467776811107722009865129010270289679418541453362001848171476122313995482719748410193307819143390117395993923732784399598145000236756770778") + PrecomputedLines[1][40] = emulated.ValueOf[emulated.BW6761Fp]("5030629984485491079543487382652657069214793759421195749161710920620485872796101353201771155469317379128888992818420203178859483332221009950916704606376754774957762349644308776768989209075348134409711677264115878561405610708211224") + // i = 39 j = 0 + PrecomputedLines[0][39] = emulated.ValueOf[emulated.BW6761Fp]("6091140106374548700858627680855718215758601972928087739044474141367987574926899958501533551620021166028801571176174643811954610349422547026888525945983551983557194042193263052574485407169478221268356892419346794783326123815016201") + PrecomputedLines[1][39] = emulated.ValueOf[emulated.BW6761Fp]("5948828392020302673989780887295980588705729037684336347591433010129397932006719799073817921549578471357750078662933637274249244592252477864992911501358565593172036494701721206192757798992471786865192758135034397631178011919037343") + // i = 38 j = 0 + PrecomputedLines[0][38] = emulated.ValueOf[emulated.BW6761Fp]("386805651598570530441388340523805281783272334115584252445736467104669895592428739901130798336649030556456963006714317965660902340316250431631905429626265279629592255272091423988229378924855038046417269484741767052733920691109051") + PrecomputedLines[1][38] = emulated.ValueOf[emulated.BW6761Fp]("4525766759081710684536631674589709318407224846654587977942741430894023706116118416593023639529015026174903223073533435099953484358304277428041608689829938708753912201343208781545656144848211632310624653753477849722613987324257405") + // i = 37 j = 0 + PrecomputedLines[0][37] = emulated.ValueOf[emulated.BW6761Fp]("5540817150529726185507644807907302646359758202921121879259874245826174565233088540473190697869957879229268506603897981980937573633705421083663690135557685806659327424996048555355538859184738301549091170119312258022116464715530897") + PrecomputedLines[1][37] = emulated.ValueOf[emulated.BW6761Fp]("6718515479962911505845755520433037626091003178541819639197260331297301140454264418644497190790920589684999232040565875697918943358405976806940756273856093062633628493384732067302799962517254636294396334020560227976261698493244667") + // i = 36 j = 0 + PrecomputedLines[0][36] = emulated.ValueOf[emulated.BW6761Fp]("3869131705462453083574569418603668066852580690436301014856841457006371403127016588221185386053660625244216382527839982867270233918401458075631441051875942486236219721422637941272410781283146192147117345683725174317594008155342822") + PrecomputedLines[1][36] = emulated.ValueOf[emulated.BW6761Fp]("4314980459231262879559055441934626715524184332706886138545230734708327298356917201957592166096564220843513786090287049232111040932514169912041600500621718076064675458551242521198970649693981760632428180630077255231913463976015069") + // i = 35 j = 0 + PrecomputedLines[0][35] = emulated.ValueOf[emulated.BW6761Fp]("2427320334264300402360662887694403717302806847738543014302091243118106695361301072589835989745366595724889306543713826268811885589885050074000488307245969105060415804657357182570924945821477542418707894862854377873456920736211574") + PrecomputedLines[1][35] = emulated.ValueOf[emulated.BW6761Fp]("6547475230634432672084652762025718565231530626989882119549953997097318533514829027216502342143054309787065484993195775491031212236423786271414014282819624182157087622098437337674637018686871012527469005617383148581769811191593996") + // i = 34 j = 0 + PrecomputedLines[0][34] = emulated.ValueOf[emulated.BW6761Fp]("5470420367198776800379064913602474642539418292471993924725551625772329613971394974002758597493934979843640341499153207410684010265570101454360050504825249909503161673795412448516621962569000123192182861955528503541040067830525054") + PrecomputedLines[1][34] = emulated.ValueOf[emulated.BW6761Fp]("4494982168085199701292806408185286692801752874423548008688924102595293266188792567032936624939523382062636324480917794944337731206576837066847574356728903291949351878926970103254919020595781292363245971532897673992090860126702369") + // i = 33 j = 0 + PrecomputedLines[0][33] = emulated.ValueOf[emulated.BW6761Fp]("4369399103458313145656091760726423066162809996158810942119411369347168469388517928848081249440078394350933082794925208252565075936062188001383128598970827204559006905350310028359496169149633766918948630158683088828171365354841158") + PrecomputedLines[1][33] = emulated.ValueOf[emulated.BW6761Fp]("2836043989963268489265367048075271632950100267112285454252249322774245764330267772073010634431547940389182825464390146593630054274866401588089959215419663648196282197240743898943526789692085504083282842684412950862097306893801599") + // i = 32 j = 0 + PrecomputedLines[0][32] = emulated.ValueOf[emulated.BW6761Fp]("3161593094746195465078500452987750093476067375325753560737558405536356806261984517162664827224212982791699115483519562494689316083005486820021595184605498108610292650087193437653864612092819356371768033919819822231437319340885818") + PrecomputedLines[1][32] = emulated.ValueOf[emulated.BW6761Fp]("3819644466956975667540265504039562915781357246138129664042550004125848703662511727184229838152701048608638539116167218942926958298935218078169210956945303241453470842592034811786186043813389041735902241531389493661689183192201314") + // i = 31 j = 0 + PrecomputedLines[0][31] = emulated.ValueOf[emulated.BW6761Fp]("2392062469462140923741025287719046671043506623791654187355906507999444953112425089053401530697483613645070888237426395063497315567081124795079610087638568678421297497052700149306919191659789394545701135340977814109862320695165568") + PrecomputedLines[1][31] = emulated.ValueOf[emulated.BW6761Fp]("4832734711312326287607356828166645428191493014408322512729086123056982141909670178885145013471528991848522869327843883017611143777932437309043998670383657756265882429166098447206115408339702334049715325324660913143518801609993303") + // i = 30 j = 0 + PrecomputedLines[0][30] = emulated.ValueOf[emulated.BW6761Fp]("1477192912994877076584299443047904339823768878392720469166065838890013031033549366840136627511260130996211450274999164360986943039558119431141104532916419870109971379707260906139507809969364754825798030720845059282506581319339014") + PrecomputedLines[1][30] = emulated.ValueOf[emulated.BW6761Fp]("4848580278471725966778152640625454583315272135939414014193260350786313591179868891886266751431003335469607271916394805253759620275236580688976609102137853719201345285969924928810598190847426584959246874917721341230807553992359714") + // i = 29 j = 0 + PrecomputedLines[0][29] = emulated.ValueOf[emulated.BW6761Fp]("4612548657730367186580082714976803926576026636075855117464342397130243372418284273429514705085325850879471479551060605745051440773485204605835344072407837553391635924303211107514927355596277812147946061939813370285122706400351567") + PrecomputedLines[1][29] = emulated.ValueOf[emulated.BW6761Fp]("3008760023968267493067473651579101210329508467951113202776757502253763787362685970341712629429191134564062267116595653720696704870511446188226366583198831759752337733919632784312043665011553157853074013972697508251271557897622116") + // i = 28 j = 0 + PrecomputedLines[0][28] = emulated.ValueOf[emulated.BW6761Fp]("389611075583305447184379239616257601802679758494373323193694192448587463258095924560882091875803061182347014161474858927897859287672456186458075155526027559450439890855840286219699926277529927908082352293235902086001952356050806") + PrecomputedLines[1][28] = emulated.ValueOf[emulated.BW6761Fp]("5233051879124279727841750931002413127344266604685137080535552109067249081614590411077745857610971661785764119168546299127950916764399413545456303778964651840043599767943011212201541649077025031848271498549479500822460404193902059") + // i = 27 j = 0 + PrecomputedLines[0][27] = emulated.ValueOf[emulated.BW6761Fp]("2051301240417715203541474847440511788275661712622367960004326252954683168118911315500500243454295796557207383513952291223465916233842646702105985061178036318276199181260966664406697541077931483773674540551675446385720134709377958") + PrecomputedLines[1][27] = emulated.ValueOf[emulated.BW6761Fp]("5992530410566193246175858444555670259253173691580655391949633577253373894334340401965049824695681254882694373651839548752052832871651221221812084861753932936474756435842548719584127897656636378524589912899548493768584550162322026") + // i = 26 j = 0 + PrecomputedLines[0][26] = emulated.ValueOf[emulated.BW6761Fp]("503203931329244869313993274825431763771286678085922857991598333658068092455874867591344068855550890716224607734741433955654810857307893638961108425713685811873615289262937183421557667697024010944533996098418759629130851637575061") + PrecomputedLines[1][26] = emulated.ValueOf[emulated.BW6761Fp]("536831931203209005383817884414882142673174308352835930285382972591114428623647610993209440256536563763832989938629068274445183810773409133208605690434866892161798632072112140226565160045865984238975881552054351099184102165059436") + // i = 25 j = 0 + PrecomputedLines[0][25] = emulated.ValueOf[emulated.BW6761Fp]("6463430776614704091043359647080337102302641202721165935467039486666890211843177302823757119636439288705089663618847623612548484578512254481774726375701260571814452623968715666812055458971750746812300447755820525065477082096459844") + PrecomputedLines[1][25] = emulated.ValueOf[emulated.BW6761Fp]("5902012351077562723668140155016090312343270371330172213810546725833050020604407548694284789195431375873831632478747356230690076169624436201213110122483265871607190241540211077771119739664258665822909426121943576686432308367091097") + // i = 24 j = 0 + PrecomputedLines[0][24] = emulated.ValueOf[emulated.BW6761Fp]("1209702480415440437280489808669256969507660822534277955404713353804298133175875997191147737012533706181789004010533635994123995990432083493750059042395030464593576198103569451570698867736371126667175609011467744130522758870853541") + PrecomputedLines[1][24] = emulated.ValueOf[emulated.BW6761Fp]("5845837029777750013459224043580536350325856602315686949865177782686518032836923907289639741918375475746894480686951269100539861615728782101044640290022570112468403517712710278153623346554615057772599871299926382057138771273059968") + // i = 23 j = 0 + PrecomputedLines[0][23] = emulated.ValueOf[emulated.BW6761Fp]("3669387921716642077111586784755843238178249266660604244928408338075745917801994751512648659830513119488679504346125606638021820335690130566721813481244030389132982533363253276690643464035331736992004545492718069275288785687228650") + PrecomputedLines[1][23] = emulated.ValueOf[emulated.BW6761Fp]("3369368532599196794572924381408197965372261616436988602166850446579339042019774376973479635755731937177394762130890209583682974918000358029127015486759788882988281657019114821541855861455914246754318200512618966432747665826090334") + // i = 22 j = 0 + PrecomputedLines[0][22] = emulated.ValueOf[emulated.BW6761Fp]("32137541075810451210446631093663209561764748864766127373571218650766496266539912695937374318328906120216942929280088227478321482057801830310739684316581870072030933254887610177835453963146235372951835561368946061826869054994273") + PrecomputedLines[1][22] = emulated.ValueOf[emulated.BW6761Fp]("2839372605182823507158689651507665403454475643571309601972684294775968522205529181272037381032573635287150715729752526401082948508959773316413692414263445724674361578902109659588751006031022891518347158904500276267507223131882814") + // i = 21 j = 0 + PrecomputedLines[0][21] = emulated.ValueOf[emulated.BW6761Fp]("879823418448820628913406122843694317479068503824026559023059280487219225830568787775707080393802920806947189116423374891425653917985305390655018604269987777967653564492499222855808235485800357626429555167604348700507317738364124") + PrecomputedLines[1][21] = emulated.ValueOf[emulated.BW6761Fp]("2019956656507533513044304804620987498029973043800218793681083360349414431549401110624447096623879229407017520219447735283040387709926060527484378025503775835133246120572603569300328786465231048952510222155243444173812217480165304") + // i = 20 j = 0 + PrecomputedLines[0][20] = emulated.ValueOf[emulated.BW6761Fp]("3281290454511032928255125679760136566984128957779591366529207528731731533134600793218349205877224552035737108314702343961052787202471730566503327144726244281919972166384619563047210740822691567734907997537667405058497230480289404") + PrecomputedLines[1][20] = emulated.ValueOf[emulated.BW6761Fp]("217080071594348728220072331086701386987927732572324377989762242979853186956477828898073406659897247062969966133672899633849635715234824558816960723953080058711086889272221299802755516710526712507948703791745209040706597214332330") + // i = 19 j = 0 + PrecomputedLines[0][19] = emulated.ValueOf[emulated.BW6761Fp]("1905514651993040879610862104071383816957968412400965216285472577079609325869467288166983369846807418882918776026807359896903271153547170142288644907690025278163828398651066616389966680857524053164685016151033284702480515801556854") + PrecomputedLines[1][19] = emulated.ValueOf[emulated.BW6761Fp]("2816143647967364054193437905594006222302401544874073334309295347924554450783714363828797748288245483156141622680173960026092101940922703363424723805341028843408807897111158221058652334715877973135223266203838528902175225909759962") + // i = 18 j = 0 + PrecomputedLines[0][18] = emulated.ValueOf[emulated.BW6761Fp]("3185808100707270488220861346228500651936696674923812981059355392187544269657634677839676505285448730714790787439659560131209743667640836808911335723448482317036276477849802480942477940635945348500165742930167046760547595579852815") + PrecomputedLines[1][18] = emulated.ValueOf[emulated.BW6761Fp]("4675202760094531043725176831180663495747953794115218641026912893113692505533433572730388712820529906426007256557488721698280794278442275319560802351096319033340475048755930586257216038466820741811485638189993825149184613794329637") + // i = 17 j = 0 + PrecomputedLines[0][17] = emulated.ValueOf[emulated.BW6761Fp]("5635658802467315943564921159563504596167530609164425115569295836814932817517998408089848297249529008411035640913039177309966228255844915487089460291854988530924191714935421333239626728042785267863664613844331879333468065828136587") + PrecomputedLines[1][17] = emulated.ValueOf[emulated.BW6761Fp]("720713426151036602883179170721367739928889749190344988870442595628309715631339336820381795599137000774024829673311066906838972976105051292835974314413053140708619118427273632229673686697031873977463153954458230306803727050851428") + // i = 16 j = 0 + PrecomputedLines[0][16] = emulated.ValueOf[emulated.BW6761Fp]("1864711373715322185727274146635291781641810573108074514648212957755487300184821907927510708219825618515375569210937185934033739458982263275450097382506867527239195625939073554812307989458145792940160837796399384109722154239294444") + PrecomputedLines[1][16] = emulated.ValueOf[emulated.BW6761Fp]("3988928910093052639189864720356237097930818779307457740646973707354026597452250119882899767842529798799120303306317581346856759878215117702725337077786120241471309210981341652385909293913974337040166447032309609692911278481656743") + // i = 15 j = 0 + PrecomputedLines[0][15] = emulated.ValueOf[emulated.BW6761Fp]("2111972138277766886255103394204659019008608985884205156966330625852224588149790528146858336924984139926163787488158760744677956554133213334043208795767747352197710008701920443139616096988941724903568076255790986324464909342967443") + PrecomputedLines[1][15] = emulated.ValueOf[emulated.BW6761Fp]("5406106584029549219347956561279481223128337834964176867650670161471450936720592598327285296544680137446958348252242981441799765208341620018997343168946611305335188418690624746362103485058997168051562245500340938652938314680722183") + // i = 14 j = 0 + PrecomputedLines[0][14] = emulated.ValueOf[emulated.BW6761Fp]("3629209394650575177871518222330833326877272533769509791125750744516401250131698579821377134314239098363156873823213748090243637996097460542124115773096078025667715694475433979863616799378688818851677492251164758634054259820876040") + PrecomputedLines[1][14] = emulated.ValueOf[emulated.BW6761Fp]("900520437221008886800548981520464166064858146226480487932023831792015057459315475857825023928512447856980411862732432661615834630191260295728391491807141110984333046029829426377271854520636332926473064932965950054970820564717103") + // i = 13 j = 0 + PrecomputedLines[0][13] = emulated.ValueOf[emulated.BW6761Fp]("4734810408896001811214566767480966766041181785201049779275187432208928522103518291617951325971697525128808099150421788098814049436302285080209136174985981442006550818503120059546064823364852550394073259460444805722962320216100180") + PrecomputedLines[1][13] = emulated.ValueOf[emulated.BW6761Fp]("5865422391872697062771425723030860241442900546731041779331290548814431425813297913622309171769973340126174074338614151460314530384011864184662712730246129508578512966671421774525444186666845500694876618359233199037734923319542130") + // i = 12 j = 0 + PrecomputedLines[0][12] = emulated.ValueOf[emulated.BW6761Fp]("3887760817750950677847894913143049797433989451093334387487644754302973690605338116344551865359585356915941906643556393078331726567201153915719711775804769226746078185240704547266041135408371775007735155408569785634346533933769753") + PrecomputedLines[1][12] = emulated.ValueOf[emulated.BW6761Fp]("441119206465350316882549165505604924459590172296413032937959602659532459930471493451092782512822884287867392074500585076321765177451793068061537323654024059980774359938586494921359509928655652059494641269369538241172898663035826") + // i = 11 j = 0 + PrecomputedLines[0][11] = emulated.ValueOf[emulated.BW6761Fp]("5404607077394994431251930587789042811217547570736433054557556586640813553286600053424846928225504205015146922567129918850573490092069170627610722116586131146412273453343423884209114457640062111469595924927482095035407809633089661") + PrecomputedLines[1][11] = emulated.ValueOf[emulated.BW6761Fp]("4992128273422493973515931677118934188850228139267165517174754416283071528379160023746908453872389870166264699098349225348378962525412348488383512774325517097683340882619777769544551984620309329567701220099486813789650124085796078") + // i = 10 j = 0 + PrecomputedLines[0][10] = emulated.ValueOf[emulated.BW6761Fp]("763087849666179695979461625611996769804051713411038929855288244851316550162353575442631464450493032671382048273857185992916619387491118179703485106865943075546997886658458038757728898639967205066970190473160703633384738620148070") + PrecomputedLines[1][10] = emulated.ValueOf[emulated.BW6761Fp]("5227577585574071525091789577898879814968696426931472551503701053187708164510382055306306677871415061750278454070694425088294917207001686901252368465737693589876043800563096468947400702841366495844730967898132123037353361003037153") + // i = 9 j = 0 + PrecomputedLines[0][9] = emulated.ValueOf[emulated.BW6761Fp]("1781035943207167749034352233955519642504059203171184362796879188386098053396193436351030321164897281134193269966421005829822879973054651990134383036098787832244877022488241951982491796164194311656925620981313650251332551878174568") + PrecomputedLines[1][9] = emulated.ValueOf[emulated.BW6761Fp]("5114307858760558505028218493878912344149957944171727828461223441276920205074473499369381914839980338882527506453259283811795570014423568561039994119506118381158174777340851075528982301957519935733692661412135735978386601659268788") + // i = 8 j = 0 + PrecomputedLines[0][8] = emulated.ValueOf[emulated.BW6761Fp]("2430911614625980011269596384381272763610223985279525352706093989400397822031992694866243135866708876193081539441025415164907568506854410818419160033732911927209720487744352610460298957595825863662700086358463670994353428472440400") + PrecomputedLines[1][8] = emulated.ValueOf[emulated.BW6761Fp]("4335308494558780318705382979642947854233414946868553910346431996037254174896634678952788540674877155437972939636950656367011448361678973639684527046756791677216648694827372273647715817383867045823075818246233445668898931395594767") + // i = 7 j = 0 + PrecomputedLines[0][7] = emulated.ValueOf[emulated.BW6761Fp]("2547848118024519988008929564919046377764734130919966703992247478777865133666285581651380910908199249341499527419093291036315824924421071456781798698355988988002895935285715260847170096816249495158282329971766659257600058565068551") + PrecomputedLines[1][7] = emulated.ValueOf[emulated.BW6761Fp]("3815466815949541446803214953758427869213605006338708824999472457517728301413792590192240627877768012849217868883227016924672094847656511519302987719666096177580954550515172474878462121442935388043933185441940709007646866467092779") + // i = 6 j = 0 + PrecomputedLines[0][6] = emulated.ValueOf[emulated.BW6761Fp]("1167077108768000995083265583371639051810160024459730164220078524025147365405410381409052481083355483954665138347762470353560327129943034303256612708531477178495683237179173891252090025088913287905696936171629029789627847133819510") + PrecomputedLines[1][6] = emulated.ValueOf[emulated.BW6761Fp]("406035093196089307103307474615059642877702648396642703148247458824638532132971266746290563977553124594299791531448357641367954689961054917535551770937131785613156413700799861618692902162324871023202201085410049092483775186775952") + // i = 5 j = 0 + PrecomputedLines[0][5] = emulated.ValueOf[emulated.BW6761Fp]("5399341071441425470861999462932780848935761306689901676544176243299434057444898651559397277961428534569943877757152043198433156075278927379555113704692063491709798852962308451848477392431046008507089543777328572800164773821369044") + PrecomputedLines[1][5] = emulated.ValueOf[emulated.BW6761Fp]("4380995413647501953014022884907895222277727851243307821429762028849533207304346950118872297176501204979430324923154815692850079796456195295072263401006293048513882137313361959316342663746522965665112134206267715375474513680925695") + // i = 4 j = 0 + PrecomputedLines[0][4] = emulated.ValueOf[emulated.BW6761Fp]("2355007528001232760618455850480455889878756159911741489782272804065790095521530856067754068562930027102143923048583996593049902939282091755328298564310219091115127704570377575491630759741851478137224714576608472109143163134241888") + PrecomputedLines[1][4] = emulated.ValueOf[emulated.BW6761Fp]("4665912899792899455625749301506444981281230867228863943483114163843120887049405431893135842230516235480754063251618399643766179241982200138031389011412159570814247143899438305766243708072447877797562498822515933133270397139433709") + // i = 3 j = 0 + PrecomputedLines[0][3] = emulated.ValueOf[emulated.BW6761Fp]("5207187280572824372922051863935983381154066518638866240941664431364801000621438981955438756540826259217505053518567779146110599177398084779430644956972778562746737866546243227275793645030333378961907301442124256757681561601565509") + PrecomputedLines[1][3] = emulated.ValueOf[emulated.BW6761Fp]("321949009317111361310696929365446618488074131566023032183338451206958310261692318478202604047493903733315880544029393577274070422541105562622923873179574100613877657615274865330644412569925819105384033887215110851435430212659905") + // i = 2 j = 0 + PrecomputedLines[0][2] = emulated.ValueOf[emulated.BW6761Fp]("171604085768836918994906821282906210004863151431034585830650164619711973937327792462246851932953079312423762492731678184930260011996638026853655426999132818363829420786676633528967828111050957233996306406638128544950132334484364") + PrecomputedLines[1][2] = emulated.ValueOf[emulated.BW6761Fp]("6792516736168200991229865079892385619624765294352391225236313718501707168858243912006527224359947264023889148380617685904914239210331300865987121974424549842263067180080608154025402396835373163617005222364670324911365935342379026") + // i = 1 j = 1 + PrecomputedLines[0][1] = emulated.ValueOf[emulated.BW6761Fp]("944621309521718726195869903203401589998750731769836683468553680511480125984458901894639375146584126764337783845625156474446875926698203520367014172302770224257630091870263675889741986266367203690209222830467843091715825765754640") + PrecomputedLines[1][1] = emulated.ValueOf[emulated.BW6761Fp]("151717716188583117580213009248663088871479025716844040691055056011917190000249071366607378973846869587747315850766873926578345476515479690657211936017801049784750197114270922646933630675513004231692030241645441850213408222685097") + PrecomputedLines[2][1] = emulated.ValueOf[emulated.BW6761Fp]("28240591223645435153733073361277903317564755913445487767068483539533044479138410276002907219108075484110392505549278277324143093596737443922825188690657065974320811475073328849015404093116839406804166651895001751574250775184455") + PrecomputedLines[3][1] = emulated.ValueOf[emulated.BW6761Fp]("4589649554746237140826435156384262430368105502118975572158357768261496043366512718050170319247735279131481525972354418839271370133844112357832425807335185239692216417703857721339378872509221452776525897587323689256311521904146265") + // i = 0 j = -3 + PrecomputedLines[0][0] = emulated.ValueOf[emulated.BW6761Fp]("640465197860141616651943019830167468662071649572960866185680631674097967785961934733973617225820045841013082657022103021262545174913240689397685222285103870309492801349613227748170760454313475424456095547502930115483054035772110") + PrecomputedLines[1][0] = emulated.ValueOf[emulated.BW6761Fp]("2388999626928869692387267530849694884621760071213154089905933112027488411260288999793599348532958544993763708471553506858810523705218284141900132752266650669434242141637113380204971907782122979495643127445776903903628231219718439") + return PrecomputedLines +} diff --git a/std/algebra/emulated/sw_emulated/doc_test.go b/std/algebra/emulated/sw_emulated/doc_test.go index 2d63969ecf..db44dbf209 100644 --- a/std/algebra/emulated/sw_emulated/doc_test.go +++ b/std/algebra/emulated/sw_emulated/doc_test.go @@ -18,7 +18,7 @@ type ExampleCurveCircuit[Base, Scalar emulated.FieldParams] struct { } func (c *ExampleCurveCircuit[B, S]) Define(api frontend.API) error { - curve, err := sw_emulated.New[B, S](api, sw_emulated.GetCurveParams[emulated.BN254Fp]()) + curve, err := sw_emulated.New[B, S](api, sw_emulated.GetCurveParams[emulated.Secp256k1Fp]()) if err != nil { panic("initalize new curve") } @@ -42,8 +42,8 @@ func ExampleCurve() { circuit := ExampleCurveCircuit[emulated.Secp256k1Fp, emulated.Secp256k1Fr]{} witness := ExampleCurveCircuit[emulated.Secp256k1Fp, emulated.Secp256k1Fr]{ Res: sw_emulated.AffinePoint[emulated.Secp256k1Fp]{ - X: emulated.ValueOf[emulated.Secp256k1Fp](g.X), - Y: emulated.ValueOf[emulated.Secp256k1Fp](g.Y), + X: emulated.ValueOf[emulated.Secp256k1Fp](Q.X), + Y: emulated.ValueOf[emulated.Secp256k1Fp](Q.Y), }, } ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) @@ -82,4 +82,11 @@ func ExampleCurve() { } else { fmt.Println("verify") } + // Output: + // result ([5101572491822586484 7988715582840633164 10154617462969804093 9788323565107423858], [12871521579461060004 12592355681102286208 17300415163085174132 96321138099943804])compiled + // setup done + // secret witness + // public witness + // proof + // verify } diff --git a/std/algebra/emulated/sw_emulated/params.go b/std/algebra/emulated/sw_emulated/params.go index efce7a1566..44a5c8cd61 100644 --- a/std/algebra/emulated/sw_emulated/params.go +++ b/std/algebra/emulated/sw_emulated/params.go @@ -6,6 +6,7 @@ import ( bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bn254" + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/secp256k1" "github.com/consensys/gnark/std/math/emulated" ) @@ -96,6 +97,20 @@ func GetP384Params() CurveParams { } } +// GetBW6761Params returns the curve parameters for the curve BW6-761. +// When initialising new curve, use the base field [emulated.BW6761Fp] and scalar +// field [emulated.BW6761Fr]. +func GetBW6761Params() CurveParams { + _, _, g1aff, _ := bw6761.Generators() + return CurveParams{ + A: big.NewInt(0), + B: big.NewInt(-1), + Gx: g1aff.X.BigInt(new(big.Int)), + Gy: g1aff.Y.BigInt(new(big.Int)), + Gm: computeBW6761Table(), + } +} + // GetCurveParams returns suitable curve parameters given the parametric type // Base as base field. It caches the parameters and modifying the values in the // parameters struct leads to undefined behaviour. @@ -112,6 +127,8 @@ func GetCurveParams[Base emulated.FieldParams]() CurveParams { return p256Params case emulated.P384Fp{}.Modulus().String(): return p384Params + case emulated.BW6761Fp{}.Modulus().String(): + return bw6761Params default: panic("no stored parameters") } @@ -123,6 +140,7 @@ var ( bls12381Params CurveParams p256Params CurveParams p384Params CurveParams + bw6761Params CurveParams ) func init() { @@ -131,4 +149,5 @@ func init() { bls12381Params = GetBLS12381Params() p256Params = GetP256Params() p384Params = GetP384Params() + bw6761Params = GetBW6761Params() } diff --git a/std/algebra/emulated/sw_emulated/params_compute.go b/std/algebra/emulated/sw_emulated/params_compute.go index 5eaf21e87b..88a514c7bc 100644 --- a/std/algebra/emulated/sw_emulated/params_compute.go +++ b/std/algebra/emulated/sw_emulated/params_compute.go @@ -6,6 +6,7 @@ import ( bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bn254" + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/secp256k1" ) @@ -130,3 +131,29 @@ func computeP384Table() [][2]*big.Int { } return table } + +func computeBW6761Table() [][2]*big.Int { + Gjac, _, _, _ := bw6761.Generators() + table := make([][2]*big.Int, 377) + tmp := new(bw6761.G1Jac).Set(&Gjac) + aff := new(bw6761.G1Affine) + jac := new(bw6761.G1Jac) + for i := 1; i < 377; i++ { + tmp = tmp.Double(tmp) + switch i { + case 1, 2: + jac.Set(tmp).AddAssign(&Gjac) + aff.FromJacobian(jac) + table[i-1] = [2]*big.Int{aff.X.BigInt(new(big.Int)), aff.Y.BigInt(new(big.Int))} + case 3: + jac.Set(tmp).SubAssign(&Gjac) + aff.FromJacobian(jac) + table[i-1] = [2]*big.Int{aff.X.BigInt(new(big.Int)), aff.Y.BigInt(new(big.Int))} + fallthrough + default: + aff.FromJacobian(tmp) + table[i] = [2]*big.Int{aff.X.BigInt(new(big.Int)), aff.Y.BigInt(new(big.Int))} + } + } + return table +} diff --git a/std/algebra/emulated/sw_emulated/point.go b/std/algebra/emulated/sw_emulated/point.go index 0dbf653b6a..2b396488ad 100644 --- a/std/algebra/emulated/sw_emulated/point.go +++ b/std/algebra/emulated/sw_emulated/point.go @@ -6,6 +6,7 @@ import ( "github.com/consensys/gnark/frontend" "github.com/consensys/gnark/std/math/emulated" + "golang.org/x/exp/slices" ) // New returns a new [Curve] instance over the base field Base and scalar field @@ -86,6 +87,38 @@ type AffinePoint[Base emulated.FieldParams] struct { X, Y emulated.Element[Base] } +// MarshalScalar marshals the scalar into bits. Compatible with scalar +// marshalling in gnark-crypto. +func (c *Curve[B, S]) MarshalScalar(s emulated.Element[S]) []frontend.Variable { + var fr S + nbBits := 8 * ((fr.Modulus().BitLen() + 7) / 8) + sReduced := c.scalarApi.Reduce(&s) + res := c.scalarApi.ToBits(sReduced)[:nbBits] + for i, j := 0, nbBits-1; i < j; { + res[i], res[j] = res[j], res[i] + i++ + j-- + } + return res +} + +// MarshalG1 marshals the affine point into bits. The output is compatible with +// the point marshalling in gnark-crypto. +func (c *Curve[B, S]) MarshalG1(p AffinePoint[B]) []frontend.Variable { + var fp B + nbBits := 8 * ((fp.Modulus().BitLen() + 7) / 8) + x := c.baseApi.Reduce(&p.X) + y := c.baseApi.Reduce(&p.Y) + bx := c.baseApi.ToBits(x)[:nbBits] + by := c.baseApi.ToBits(y)[:nbBits] + slices.Reverse(bx) + slices.Reverse(by) + res := make([]frontend.Variable, 2*nbBits) + copy(res, bx) + copy(res[len(bx):], by) + return res +} + // Neg returns an inverse of p. It doesn't modify p. func (c *Curve[B, S]) Neg(p *AffinePoint[B]) *AffinePoint[B] { return &AffinePoint[B]{ @@ -201,6 +234,12 @@ func (c *Curve[B, S]) AddUnified(p, q *AffinePoint[B]) *AffinePoint[B] { return &result } +// Add calls [Curve.AddUnified]. It is defined for implementing the generic +// curve interface. +func (c *Curve[B, S]) Add(p, q *AffinePoint[B]) *AffinePoint[B] { + return c.AddUnified(p, q) +} + // double doubles p and return it. It doesn't modify p. // // ⚠️ p.Y must be nonzero. @@ -585,3 +624,27 @@ func (c *Curve[B, S]) JointScalarMulBase(p *AffinePoint[B], s2, s1 *emulated.Ele return c.add(res1, R0) } + +// MultiScalarMul computes the multi scalar multiplication of the points P and +// scalars s. It returns an error if the length of the slices mismatch. If the +// input slices are empty, then returns point at infinity. +// +// For the points and scalars the same considerations apply as for +// [Curve.AddUnified] and [Curve.SalarMul]. +func (c *Curve[B, S]) MultiScalarMul(p []*AffinePoint[B], s []*emulated.Element[S]) (*AffinePoint[B], error) { + if len(p) != len(s) { + return nil, fmt.Errorf("mismatching points and scalars slice lengths") + } + if len(p) == 0 { + return &AffinePoint[B]{ + X: *c.baseApi.Zero(), + Y: *c.baseApi.Zero(), + }, nil + } + res := c.ScalarMul(p[0], s[0]) + for i := 1; i < len(p); i++ { + q := c.ScalarMul(p[i], s[i]) + c.AddUnified(res, q) + } + return res, nil +} diff --git a/std/algebra/emulated/sw_emulated/point_test.go b/std/algebra/emulated/sw_emulated/point_test.go index 0f16765816..249adbfae7 100644 --- a/std/algebra/emulated/sw_emulated/point_test.go +++ b/std/algebra/emulated/sw_emulated/point_test.go @@ -11,6 +11,9 @@ import ( fr_bls381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark-crypto/ecc/bn254" fr_bn "github.com/consensys/gnark-crypto/ecc/bn254/fr" + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + fp_bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761/fp" + fr_bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark-crypto/ecc/secp256k1" fp_secp "github.com/consensys/gnark-crypto/ecc/secp256k1/fp" fr_secp "github.com/consensys/gnark-crypto/ecc/secp256k1/fr" @@ -21,6 +24,88 @@ import ( var testCurve = ecc.BN254 +type MarshalScalarTest[T, S emulated.FieldParams] struct { + X emulated.Element[S] + R []frontend.Variable +} + +func (c *MarshalScalarTest[T, S]) Define(api frontend.API) error { + cr, err := New[T, S](api, GetCurveParams[T]()) + if err != nil { + return err + } + br := cr.MarshalScalar(c.X) + for i := 0; i < len(c.R); i++ { + api.AssertIsEqual(c.R[i], br[i]) + } + return nil +} + +func TestMarshalScalar(t *testing.T) { + assert := test.NewAssert(t) + var r fr_bw6761.Element + r.SetRandom() + rBytes := r.Marshal() + nbBytes := fr_bw6761.Bytes + nbBits := nbBytes * 8 + circuit := &MarshalScalarTest[emulated.BW6761Fp, emulated.BW6761Fr]{ + R: make([]frontend.Variable, nbBits), + } + witness := &MarshalScalarTest[emulated.BW6761Fp, emulated.BW6761Fr]{ + X: emulated.ValueOf[emulated.BW6761Fr](r), + R: make([]frontend.Variable, nbBits), + } + for i := 0; i < nbBytes; i++ { + for j := 0; j < 8; j++ { + witness.R[i*8+j] = (rBytes[i] >> (7 - j)) & 1 + } + } + err := test.IsSolved(circuit, witness, testCurve.ScalarField()) + assert.NoError(err) +} + +type MarshalG1Test[T, S emulated.FieldParams] struct { + G AffinePoint[T] + R []frontend.Variable +} + +func (c *MarshalG1Test[T, S]) Define(api frontend.API) error { + cr, err := New[T, S](api, GetCurveParams[T]()) + if err != nil { + return err + } + br := cr.MarshalG1(c.G) + for i := 0; i < len(c.R); i++ { + api.AssertIsEqual(c.R[i], br[i]) + } + return nil +} + +func TestMarshalG1(t *testing.T) { + assert := test.NewAssert(t) + _, _, g, _ := bw6761.Generators() + gBytes := g.Marshal() + nbBytes := 2 * fp_bw6761.Bytes + nbBits := nbBytes * 8 + circuit := &MarshalG1Test[emulated.BW6761Fp, emulated.BW6761Fr]{ + R: make([]frontend.Variable, nbBits), + } + witness := &MarshalG1Test[emulated.BW6761Fp, emulated.BW6761Fr]{ + G: AffinePoint[emulated.BW6761Fp]{ + X: emulated.ValueOf[emulated.BW6761Fp](g.X), + Y: emulated.ValueOf[emulated.BW6761Fp](g.Y), + }, + R: make([]frontend.Variable, nbBits), + } + for i := 0; i < nbBytes; i++ { + for j := 0; j < 8; j++ { + witness.R[i*8+j] = (gBytes[i] >> (7 - j)) & 1 + } + } + err := test.IsSolved(circuit, witness, testCurve.ScalarField()) + assert.NoError(err) +} + type NegTest[T, S emulated.FieldParams] struct { P, Q AffinePoint[T] } @@ -400,7 +485,7 @@ func TestScalarMulBase2(t *testing.T) { func TestScalarMulBase3(t *testing.T) { assert := test.NewAssert(t) _, _, g, _ := bls12381.Generators() - var r fr_bn.Element + var r fr_bls381.Element _, _ = r.SetRandom() s := new(big.Int) r.BigInt(s) @@ -419,6 +504,28 @@ func TestScalarMulBase3(t *testing.T) { assert.NoError(err) } +func TestScalarMulBase4(t *testing.T) { + assert := test.NewAssert(t) + _, _, g, _ := bw6761.Generators() + var r fr_bw6761.Element + _, _ = r.SetRandom() + s := new(big.Int) + r.BigInt(s) + var S bw6761.G1Affine + S.ScalarMultiplication(&g, s) + + circuit := ScalarMulBaseTest[emulated.BW6761Fp, emulated.BW6761Fr]{} + witness := ScalarMulBaseTest[emulated.BW6761Fp, emulated.BW6761Fr]{ + S: emulated.ValueOf[emulated.BW6761Fr](s), + Q: AffinePoint[emulated.BW6761Fp]{ + X: emulated.ValueOf[emulated.BW6761Fp](S.X), + Y: emulated.ValueOf[emulated.BW6761Fp](S.Y), + }, + } + err := test.IsSolved(&circuit, &witness, testCurve.ScalarField()) + assert.NoError(err) +} + type ScalarMulTest[T, S emulated.FieldParams] struct { P, Q AffinePoint[T] S emulated.Element[S] @@ -558,6 +665,32 @@ func TestScalarMul5(t *testing.T) { assert.NoError(err) } +func TestScalarMul6(t *testing.T) { + assert := test.NewAssert(t) + var r fr_bw6761.Element + _, _ = r.SetRandom() + s := new(big.Int) + r.BigInt(s) + var res bw6761.G1Affine + _, _, gen, _ := bw6761.Generators() + res.ScalarMultiplication(&gen, s) + + circuit := ScalarMulTest[emulated.BW6761Fp, emulated.BW6761Fr]{} + witness := ScalarMulTest[emulated.BW6761Fp, emulated.BW6761Fr]{ + S: emulated.ValueOf[emulated.BW6761Fr](s), + P: AffinePoint[emulated.BW6761Fp]{ + X: emulated.ValueOf[emulated.BW6761Fp](gen.X), + Y: emulated.ValueOf[emulated.BW6761Fp](gen.Y), + }, + Q: AffinePoint[emulated.BW6761Fp]{ + X: emulated.ValueOf[emulated.BW6761Fp](res.X), + Y: emulated.ValueOf[emulated.BW6761Fp](res.Y), + }, + } + err := test.IsSolved(&circuit, &witness, testCurve.ScalarField()) + assert.NoError(err) +} + type ScalarMulEdgeCasesTest[T, S emulated.FieldParams] struct { P, R AffinePoint[T] S emulated.Element[S] diff --git a/std/algebra/interfaces.go b/std/algebra/interfaces.go new file mode 100644 index 0000000000..2cfbf60a6a --- /dev/null +++ b/std/algebra/interfaces.go @@ -0,0 +1,67 @@ +package algebra + +import "github.com/consensys/gnark/frontend" + +type ScalarT any +type GroupElementT any +type G1ElementT GroupElementT +type G2ElementT GroupElementT +type GtElementT GroupElementT + +// Curve defines group operations on an elliptic curve. +type Curve[S ScalarT, G1El G1ElementT] interface { + // Add adds two points and returns the sum. It does not modify the input + // points. + Add(*G1El, *G1El) *G1El + + // AssertIsEqual asserts that two points are equal. + AssertIsEqual(*G1El, *G1El) + + // Neg negates the points and returns a negated point. It does not modify + // the input. + Neg(*G1El) *G1El + + // ScalarMul returns the scalar multiplication of the point by a scalar. It + // does not modify the inputs. + ScalarMul(*G1El, *S) *G1El + + // ScalarMulBase returns the scalar multiplication of the curve base point + // by a scalar. It does not modify the scalar. + ScalarMulBase(*S) *G1El + + // MultiScalarMul computes the sum ∑ s_i P_i for the input + // scalars s_i and points P_i. It returns an error if the input lengths + // mismatch. + MultiScalarMul([]*G1El, []*S) (*G1El, error) + + // MarshalG1 returns the binary decomposition G1.X || G1.Y. It matches the + // output of gnark-crypto's Marshal method on G1 points. + MarshalG1(G1El) []frontend.Variable + + // MarshalScalar returns the binary decomposition of the argument. + MarshalScalar(S) []frontend.Variable +} + +// Pairing allows to compute the bi-linear pairing of G1 and G2 elements. +// Additionally, the interface provides steps used in pairing computation and a +// dedicated optimised pairing check. +type Pairing[G1El G1ElementT, G2El G2ElementT, GtEl GtElementT] interface { + // MillerLoop computes the Miller loop of the input pairs. It returns error + // when the inputs are of mismatching length. It does not modify the inputs. + MillerLoop([]*G1El, []*G2El) (*GtEl, error) + + // FinalExponentiation computes the final step in the pairing. It does not + // modify the inputs. + FinalExponentiation(*GtEl) *GtEl + + // Pair computes the full pairing of the input pairs. It returns error when + // the inputs are of mismatching length. It does not modify the inputs. + Pair([]*G1El, []*G2El) (*GtEl, error) + + // PairingCheck asserts that the pairing result is 1. It returns an error + // when the inputs are of mismatching length. It does not modify the inputs. + PairingCheck([]*G1El, []*G2El) error + + // AssertIsEqual asserts the equality of the inputs. + AssertIsEqual(*GtEl, *GtEl) +} diff --git a/std/algebra/native/fields_bls12377/e12.go b/std/algebra/native/fields_bls12377/e12.go index ed787978d5..3dd9675bd7 100644 --- a/std/algebra/native/fields_bls12377/e12.go +++ b/std/algebra/native/fields_bls12377/e12.go @@ -263,28 +263,42 @@ func (e *E12) CyclotomicSquareCompressed(api frontend.API, x E12) *E12 { // Decompress Karabina's cyclotomic square result func (e *E12) Decompress(api frontend.API, x E12) *E12 { - // TODO: hadle the g3==0 case with MUX - var t [3]E2 + var _t [2]E2 var one E2 one.SetOne() - // t0 = g1² + // if g3 == 0 + // t0 = 2 * g1 * g5 + // t1 = g2 + selector1 := x.C1.B0.IsZero(api) + _t[0].Square(api, x.C0.B1) + _t[0].Double(api, _t[0]) + _t[1] = x.C0.B2 + + // if g3 != 0 + // t0 = E * g5^2 + 3 * g1^2 - 2 * g2 + // t1 = 4 * g3 t[0].Square(api, x.C0.B1) - // t1 = 3 * g1² - 2 * g2 t[1].Sub(api, t[0], x.C0.B2). Double(api, t[1]). Add(api, t[1], t[0]) - // t0 = E * g5² + t1 t[2].Square(api, x.C1.B2) t[0].MulByNonResidue(api, t[2]). Add(api, t[0], t[1]) - // t1 = 4 * g3 t[1].Double(api, x.C1.B0). Double(api, t[1]) - // z4 = g4 / t1 + + // g4 = (E * g5^2 + 3 * g1^2 - 2 * g2)/4g3 or (2 * g1 * g5)/g2 + t[0].Select(api, selector1, _t[0], t[0]) + t[1].Select(api, selector1, _t[1], t[1]) + // if g2 == g3 == 0 we do nothing as DivUnchecked sets g4 to 0 + // and all gi to 0 returning, correctly in this case, at the end: + // e = E * (2 * g4² + g3 * g5 - 3 * g2 * g1) + 1 = 1 + e.C1.B1.DivUnchecked(api, t[0], t[1]) + // Rest of the computation for all cases // t1 = g2 * g1 t[1].Mul(api, x.C0.B2, x.C0.B1) // t2 = 2 * g4² - 3 * g2 * g1 @@ -522,18 +536,8 @@ func (e *E12) DivUnchecked(api frontend.API, e1, e2 E12) *E12 { // Select sets e to r1 if b=1, r2 otherwise func (e *E12) Select(api frontend.API, b frontend.Variable, r1, r2 E12) *E12 { - e.C0.B0.A0 = api.Select(b, r1.C0.B0.A0, r2.C0.B0.A0) - e.C0.B0.A1 = api.Select(b, r1.C0.B0.A1, r2.C0.B0.A1) - e.C0.B1.A0 = api.Select(b, r1.C0.B1.A0, r2.C0.B1.A0) - e.C0.B1.A1 = api.Select(b, r1.C0.B1.A1, r2.C0.B1.A1) - e.C0.B2.A0 = api.Select(b, r1.C0.B2.A0, r2.C0.B2.A0) - e.C0.B2.A1 = api.Select(b, r1.C0.B2.A1, r2.C0.B2.A1) - e.C1.B0.A0 = api.Select(b, r1.C1.B0.A0, r2.C1.B0.A0) - e.C1.B0.A1 = api.Select(b, r1.C1.B0.A1, r2.C1.B0.A1) - e.C1.B1.A0 = api.Select(b, r1.C1.B1.A0, r2.C1.B1.A0) - e.C1.B1.A1 = api.Select(b, r1.C1.B1.A1, r2.C1.B1.A1) - e.C1.B2.A0 = api.Select(b, r1.C1.B2.A0, r2.C1.B2.A0) - e.C1.B2.A1 = api.Select(b, r1.C1.B2.A1, r2.C1.B2.A1) + e.C0.Select(api, b, r1.C0, r2.C0) + e.C1.Select(api, b, r1.C1, r2.C1) return e } diff --git a/std/algebra/native/fields_bls12377/e2.go b/std/algebra/native/fields_bls12377/e2.go index fbb282fae8..a536291e9a 100644 --- a/std/algebra/native/fields_bls12377/e2.go +++ b/std/algebra/native/fields_bls12377/e2.go @@ -45,6 +45,11 @@ func (e *E2) SetOne() *E2 { return e } +// IsZero returns 1 if the element is equal to 0 and 0 otherwise +func (e *E2) IsZero(api frontend.API) frontend.Variable { + return api.And(api.IsZero(e.A0), api.IsZero(e.A1)) +} + func (e *E2) assign(e1 []frontend.Variable) { e.A0 = e1[0] e.A1 = e1[1] diff --git a/std/algebra/native/fields_bls12377/e6.go b/std/algebra/native/fields_bls12377/e6.go index f11dbbd9ee..d27038002a 100644 --- a/std/algebra/native/fields_bls12377/e6.go +++ b/std/algebra/native/fields_bls12377/e6.go @@ -365,3 +365,13 @@ func Mul01By01(api frontend.API, c0, c1, d0, d1 E2) *E6 { B2: t2, } } + +// Select sets e to r1 if b=1, r2 otherwise +func (e *E6) Select(api frontend.API, b frontend.Variable, r1, r2 E6) *E6 { + + e.B0.Select(api, b, r1.B0, r2.B0) + e.B1.Select(api, b, r1.B1, r2.B1) + e.B2.Select(api, b, r1.B2, r2.B2) + + return e +} diff --git a/std/algebra/native/fields_bls24315/e2.go b/std/algebra/native/fields_bls24315/e2.go index 441f575948..eb548b5f8f 100644 --- a/std/algebra/native/fields_bls24315/e2.go +++ b/std/algebra/native/fields_bls24315/e2.go @@ -45,6 +45,11 @@ func (e *E2) SetOne() *E2 { return e } +// IsZero returns 1 if the element is equal to 0 and 0 otherwise +func (e *E2) IsZero(api frontend.API) frontend.Variable { + return api.And(api.IsZero(e.A0), api.IsZero(e.A1)) +} + func (e *E2) assign(e1 []frontend.Variable) { e.A0 = e1[0] e.A1 = e1[1] diff --git a/std/algebra/native/fields_bls24315/e24.go b/std/algebra/native/fields_bls24315/e24.go index ea21cf09c5..171ae164c7 100644 --- a/std/algebra/native/fields_bls24315/e24.go +++ b/std/algebra/native/fields_bls24315/e24.go @@ -261,23 +261,38 @@ func (e *E24) CyclotomicSquareCompressed(api frontend.API, x E24) *E24 { func (e *E24) Decompress(api frontend.API, x E24) *E24 { var t [3]E4 + var _t [2]E4 var one E4 one.SetOne() - // t0 = g1² + // if g3 == 0 + // t0 = 2 * g1 * g5 + // t1 = g2 + selector1 := x.D1.C0.IsZero(api) + _t[0].Square(api, x.D0.C1) + _t[0].Double(api, _t[0]) + _t[1] = x.D0.C2 + + // if g3 != 0 + // t0 = E * g5^2 + 3 * g1^2 - 2 * g2 + // t1 = 4 * g3 t[0].Square(api, x.D0.C1) - // t1 = 3 * g1² - 2 * g2 t[1].Sub(api, t[0], x.D0.C2). Double(api, t[1]). Add(api, t[1], t[0]) - // t0 = E * g5² + t1 t[2].Square(api, x.D1.C2) t[0].MulByNonResidue(api, t[2]). Add(api, t[0], t[1]) - // t1 = 4 * g3 t[1].Double(api, x.D1.C0). Double(api, t[1]) - // z4 = g4 / t1 + + // g4 = (E * g5^2 + 3 * g1^2 - 2 * g2)/4g3 or (2 * g1 * g5)/g2 + t[0].Select(api, selector1, _t[0], t[0]) + t[1].Select(api, selector1, _t[1], t[1]) + // if g2 == g3 == 0 we do nothing as DivUnchecked sets g4 to 0 + // and all gi to 0 returning, correctly in this case, at the end: + // e = E * (2 * g4² + g3 * g5 - 3 * g2 * g1) + 1 = 1 + e.D1.C1.DivUnchecked(api, t[0], t[1]) // t1 = g2 * g1 diff --git a/std/algebra/native/fields_bls24315/e4.go b/std/algebra/native/fields_bls24315/e4.go index f6fc78c405..a01c6b24dd 100644 --- a/std/algebra/native/fields_bls24315/e4.go +++ b/std/algebra/native/fields_bls24315/e4.go @@ -43,6 +43,11 @@ func (e *E4) SetOne() *E4 { return e } +// IsZero returns 1 if the element is equal to 0 and 0 otherwise +func (e *E4) IsZero(api frontend.API) frontend.Variable { + return api.And(e.B0.IsZero(api), e.B1.IsZero(api)) +} + func (e *E4) assign(e1 []frontend.Variable) { e.B0.A0 = e1[0] e.B0.A1 = e1[1] diff --git a/std/algebra/native/sw_bls12377/g1_test.go b/std/algebra/native/sw_bls12377/g1_test.go index 7583904b9c..a1056cbe25 100644 --- a/std/algebra/native/sw_bls12377/g1_test.go +++ b/std/algebra/native/sw_bls12377/g1_test.go @@ -21,6 +21,7 @@ import ( "testing" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fp" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/frontend" @@ -31,6 +32,76 @@ import ( bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377" ) +// ------------------------------------------------------------------------------------------------- +// Marshalling + +type MarshalScalarTest struct { + X frontend.Variable + R [fr.Bytes * 8]frontend.Variable +} + +func (c *MarshalScalarTest) Define(api frontend.API) error { + ec := NewCurve(api) + r := ec.MarshalScalar(c.X) + for i := range c.R { + api.AssertIsEqual(r[i], c.R[i]) + } + return nil +} + +func TestMarshalScalar(t *testing.T) { + assert := test.NewAssert(t) + var r fr.Element + r.SetRandom() + rBytes := r.Marshal() + var witness MarshalScalarTest + witness.X = r.String() + for i := 0; i < fr.Bytes; i++ { + for j := 0; j < 8; j++ { + witness.R[i*8+j] = (rBytes[i] >> (7 - j)) & 1 + } + } + var circuit MarshalScalarTest + assert.CheckCircuit(&circuit, test.WithValidAssignment(&witness), test.WithCurves(ecc.BW6_761)) +} + +type MarshalG1Test struct { + P G1Affine + R [2 * 8 * fp.Bytes]frontend.Variable +} + +func (c *MarshalG1Test) Define(api frontend.API) error { + ec := NewCurve(api) + // the bits are layed out exactly as in gnark-crypto + r := ec.MarshalG1(c.P) + for i := range c.R { + api.AssertIsEqual(r[i], c.R[i]) + } + return nil +} + +func TestMarshalG1(t *testing.T) { + assert := test.NewAssert(t) + + // sample a random point + var r fr.Element + r.SetRandom() + var br big.Int + r.BigInt(&br) + _, _, g, _ := bls12377.Generators() + g.ScalarMultiplication(&g, &br) + gBytes := g.Marshal() + var witness MarshalG1Test + witness.P.Assign(&g) + for i := 0; i < 96; i++ { + for j := 0; j < 8; j++ { + witness.R[i*8+j] = (gBytes[i] >> (7 - j)) & 1 + } + } + var circuit MarshalG1Test + assert.CheckCircuit(&circuit, test.WithValidAssignment(&witness), test.WithCurves(ecc.BW6_761)) +} + // ------------------------------------------------------------------------------------------------- // Add jacobian diff --git a/std/algebra/native/sw_bls12377/pairing.go b/std/algebra/native/sw_bls12377/pairing.go index d22fe3be26..271280aee7 100644 --- a/std/algebra/native/sw_bls12377/pairing.go +++ b/std/algebra/native/sw_bls12377/pairing.go @@ -273,6 +273,22 @@ func Pair(api frontend.API, P []G1Affine, Q []G2Affine) (GT, error) { return FinalExponentiation(api, f), nil } +// PairingCheck calculates the reduced pairing for a set of points and asserts if the result is One +// ∏ᵢ e(Pᵢ, Qᵢ) =? 1 +// +// This function doesn't check that the inputs are in the correct subgroups. See AssertIsOnG1 and AssertIsOnG2. +func PairingCheck(api frontend.API, P []G1Affine, Q []G2Affine) error { + f, err := Pair(api, P, Q) + if err != nil { + return err + } + var one GT + one.SetOne() + f.AssertIsEqual(api, one) + + return nil +} + // doubleAndAddStep doubles p1 and adds p2 to the result in affine coordinates, and evaluates the line in Miller loop // https://eprint.iacr.org/2022/1162 (Section 6.1) func doubleAndAddStep(api frontend.API, p1, p2 *G2Affine) (G2Affine, lineEvaluation, lineEvaluation) { diff --git a/std/algebra/native/sw_bls12377/pairing2.go b/std/algebra/native/sw_bls12377/pairing2.go new file mode 100644 index 0000000000..f2916e5e79 --- /dev/null +++ b/std/algebra/native/sw_bls12377/pairing2.go @@ -0,0 +1,252 @@ +package sw_bls12377 + +import ( + "fmt" + + "github.com/consensys/gnark-crypto/ecc" + bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377" + fr_bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/std/algebra/native/fields_bls12377" + "github.com/consensys/gnark/std/math/bits" +) + +// Curve allows G1 operations in BLS12-377. +type Curve struct { + api frontend.API +} + +// NewCurve initializes a new [Curve] instance. +func NewCurve(api frontend.API) *Curve { + return &Curve{ + api: api, + } +} + +// MarshalScalar returns +func (c *Curve) MarshalScalar(s Scalar) []frontend.Variable { + nbBits := 8 * ((ecc.BLS12_377.ScalarField().BitLen() + 7) / 8) + x := bits.ToBinary(c.api, s, bits.WithNbDigits(nbBits)) + for i, j := 0, nbBits-1; i < j; { + x[i], x[j] = x[j], x[i] + i++ + j-- + } + return x +} + +// MarshalG1 returns [P.X || P.Y] in binary. Both P.X and P.Y are +// in little endian. +func (c *Curve) MarshalG1(P G1Affine) []frontend.Variable { + nbBits := 8 * ((ecc.BLS12_377.BaseField().BitLen() + 7) / 8) + res := make([]frontend.Variable, 2*nbBits) + x := bits.ToBinary(c.api, P.X, bits.WithNbDigits(nbBits)) + y := bits.ToBinary(c.api, P.Y, bits.WithNbDigits(nbBits)) + for i := 0; i < nbBits; i++ { + res[i] = x[nbBits-1-i] + res[i+nbBits] = y[nbBits-1-i] + } + return res +} + +// Add points P and Q and return the result. Does not modify the inputs. +func (c *Curve) Add(P, Q *G1Affine) *G1Affine { + res := &G1Affine{ + X: P.X, + Y: P.Y, + } + res.AddAssign(c.api, *Q) + return res +} + +// AssertIsEqual asserts the equality of P and Q. +func (c *Curve) AssertIsEqual(P, Q *G1Affine) { + P.AssertIsEqual(c.api, *Q) + panic("todo") +} + +// Neg negates P and returns the result. Does not modify P. +func (c *Curve) Neg(P *G1Affine) *G1Affine { + res := &G1Affine{ + X: P.X, + Y: P.Y, + } + res.Neg(c.api, *P) + return res +} + +// ScalarMul computes scalar*P and returns the result. It doesn't modify the +// inputs. +func (c *Curve) ScalarMul(P *G1Affine, scalar *Scalar) *G1Affine { + res := &G1Affine{ + X: P.X, + Y: P.Y, + } + res.ScalarMul(c.api, *P, *scalar) + return res +} + +// ScalarMulBase computes scalar*G where G is the standard base point of the +// curve. It doesn't modify the scalar. +func (c *Curve) ScalarMulBase(scalar *Scalar) *G1Affine { + res := new(G1Affine) + res.ScalarMulBase(c.api, *scalar) + return res +} + +// MultiScalarMul computes ∑scalars_i * P_i and returns it. It doesn't modify +// the inputs. It returns an error if there is a mismatch in the lengths of the +// inputs. +func (c *Curve) MultiScalarMul(P []*G1Affine, scalars []*Scalar) (*G1Affine, error) { + if len(P) != len(scalars) { + return nil, fmt.Errorf("mismatching points and scalars slice lengths") + } + if len(P) == 0 { + return &G1Affine{ + X: 0, + Y: 0, + }, nil + } + res := c.ScalarMul(P[0], scalars[0]) + for i := 1; i < len(P); i++ { + q := c.ScalarMul(P[i], scalars[i]) + c.Add(res, q) + } + return res, nil +} + +// Pairing allows computing pairing-related operations in BLS12-377. +type Pairing struct { + api frontend.API +} + +// NewPairing initializes a [Pairing] instance. +func NewPairing(api frontend.API) *Pairing { + return &Pairing{ + api: api, + } +} + +// MillerLoop computes the Miller loop between the pairs of inputs. It doesn't +// modify the inputs. It returns an error if there is a mismatch betwen the +// lengths of the inputs. +func (p *Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GT, error) { + inP := make([]G1Affine, len(P)) + for i := range P { + inP[i] = *P[i] + } + inQ := make([]G2Affine, len(Q)) + for i := range Q { + inQ[i] = *Q[i] + } + res, err := MillerLoop(p.api, inP, inQ) + return &res, err +} + +// FinalExponentiation performs the final exponentiation on the target group +// element. It doesn't modify the input. +func (p *Pairing) FinalExponentiation(e *GT) *GT { + res := FinalExponentiation(p.api, *e) + return &res +} + +// Pair computes a full multi-pairing on the input pairs. +func (p *Pairing) Pair(P []*G1Affine, Q []*G2Affine) (*GT, error) { + inP := make([]G1Affine, len(P)) + for i := range P { + inP[i] = *P[i] + } + inQ := make([]G2Affine, len(Q)) + for i := range Q { + inQ[i] = *Q[i] + } + res, err := Pair(p.api, inP, inQ) + return &res, err +} + +// PairingCheck computes the multi-pairing of the input pairs and asserts that +// the result is an identity element in the target group. It returns an error if +// there is a mismatch between the lengths of the inputs. +func (p *Pairing) PairingCheck(P []*G1Affine, Q []*G2Affine) error { + inP := make([]G1Affine, len(P)) + for i := range P { + inP[i] = *P[i] + } + inQ := make([]G2Affine, len(Q)) + for i := range Q { + inQ[i] = *Q[i] + } + res, err := Pair(p.api, inP, inQ) + if err != nil { + return err + } + var one fields_bls12377.E12 + one.SetOne() + res.AssertIsEqual(p.api, one) + return nil +} + +// AssertIsEqual asserts the equality of the target group elements. +func (p *Pairing) AssertIsEqual(e1, e2 *GT) { + e1.AssertIsEqual(p.api, *e2) +} + +// NewG1Affine allocates a witness from the native G1 element and returns it. +func NewG1Affine(v bls12377.G1Affine) G1Affine { + return G1Affine{ + X: (fr_bw6761.Element)(v.X), + Y: (fr_bw6761.Element)(v.Y), + } +} + +// NewG2Affine allocates a witness from the native G2 element and returns it. +func NewG2Affine(v bls12377.G2Affine) G2Affine { + return G2Affine{ + X: fields_bls12377.E2{ + A0: (fr_bw6761.Element)(v.X.A0), + A1: (fr_bw6761.Element)(v.X.A1), + }, + Y: fields_bls12377.E2{ + A0: (fr_bw6761.Element)(v.Y.A0), + A1: (fr_bw6761.Element)(v.Y.A1), + }, + } +} + +// NewGTEl allocates a witness from the native target group element and returns it. +func NewGTEl(v bls12377.GT) GT { + return GT{ + C0: fields_bls12377.E6{ + B0: fields_bls12377.E2{ + A0: (fr_bw6761.Element)(v.C0.B0.A0), + A1: (fr_bw6761.Element)(v.C0.B0.A1), + }, + B1: fields_bls12377.E2{ + A0: (fr_bw6761.Element)(v.C0.B1.A0), + A1: (fr_bw6761.Element)(v.C0.B1.A1), + }, + B2: fields_bls12377.E2{ + A0: (fr_bw6761.Element)(v.C0.B2.A0), + A1: (fr_bw6761.Element)(v.C0.B2.A1), + }, + }, + C1: fields_bls12377.E6{ + B0: fields_bls12377.E2{ + A0: (fr_bw6761.Element)(v.C1.B0.A0), + A1: (fr_bw6761.Element)(v.C1.B0.A1), + }, + B1: fields_bls12377.E2{ + A0: (fr_bw6761.Element)(v.C1.B1.A0), + A1: (fr_bw6761.Element)(v.C1.B1.A1), + }, + B2: fields_bls12377.E2{ + A0: (fr_bw6761.Element)(v.C1.B2.A0), + A1: (fr_bw6761.Element)(v.C1.B2.A1), + }, + }, + } +} + +// Scalar is a scalar in the groups. As the implementation is defined on a +// 2-chain, then this type is an alias to [frontend.Variable]. +type Scalar = frontend.Variable diff --git a/std/algebra/native/sw_bls12377/pairing_test.go b/std/algebra/native/sw_bls12377/pairing_test.go index 9f14f67dd6..2c7b6ef0bf 100644 --- a/std/algebra/native/sw_bls12377/pairing_test.go +++ b/std/algebra/native/sw_bls12377/pairing_test.go @@ -17,6 +17,7 @@ limitations under the License. package sw_bls12377 import ( + "fmt" "math/big" "testing" @@ -156,6 +157,41 @@ func TestPairingFixedBLS377(t *testing.T) { } +type pairingCheckBLS377 struct { + P1, P2 G1Affine `gnark:",public"` + Q1, Q2 G2Affine +} + +func (circuit *pairingCheckBLS377) Define(api frontend.API) error { + + err := PairingCheck(api, []G1Affine{circuit.P1, circuit.P2}, []G2Affine{circuit.Q1, circuit.Q2}) + + if err != nil { + return fmt.Errorf("pair: %w", err) + } + + return nil +} + +func TestPairingCheckBLS377(t *testing.T) { + + // pairing test data + P, Q := pairingCheckData() + + // create cs + var circuit, witness pairingCheckBLS377 + + // assign values to witness + witness.P1.Assign(&P[0]) + witness.P2.Assign(&P[1]) + witness.Q1.Assign(&Q[0]) + witness.Q2.Assign(&Q[1]) + + assert := test.NewAssert(t) + assert.CheckCircuit(&circuit, test.WithValidAssignment(&witness), test.WithCurves(ecc.BW6_761)) + +} + // utils func pairingData() (P bls12377.G1Affine, Q bls12377.G2Affine, milRes, pairingRes bls12377.GT) { _, _, P, Q = bls12377.Generators() @@ -164,6 +200,14 @@ func pairingData() (P bls12377.G1Affine, Q bls12377.G2Affine, milRes, pairingRes return } +func pairingCheckData() (P [2]bls12377.G1Affine, Q [2]bls12377.G2Affine) { + _, _, P[0], Q[0] = bls12377.Generators() + P[1].Neg(&P[0]) + Q[1].Set(&Q[0]) + + return +} + func triplePairingData() (P [3]bls12377.G1Affine, Q [3]bls12377.G2Affine, pairingRes bls12377.GT) { _, _, P[0], Q[0] = bls12377.Generators() var u, v fr.Element diff --git a/std/algebra/native/sw_bls24315/g1_test.go b/std/algebra/native/sw_bls24315/g1_test.go index fb94ad8a16..3653f66f06 100644 --- a/std/algebra/native/sw_bls24315/g1_test.go +++ b/std/algebra/native/sw_bls24315/g1_test.go @@ -21,6 +21,7 @@ import ( "testing" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fp" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/frontend" @@ -31,6 +32,78 @@ import ( bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315" ) +// ------------------------------------------------------------------------------------------------- +// Marshalling + +type MarshalScalarTest struct { + X frontend.Variable + R [fr.Bytes * 8]frontend.Variable +} + +func (c *MarshalScalarTest) Define(api frontend.API) error { + ec := NewCurve(api) + r := ec.MarshalScalar(c.X) + for i := range c.R { + api.AssertIsEqual(r[i], c.R[i]) + } + return nil +} + +func TestMarshalScalar(t *testing.T) { + assert := test.NewAssert(t) + var r fr.Element + r.SetRandom() + rBytes := r.Marshal() + var witness MarshalScalarTest + witness.X = r.String() + for i := 0; i < fr.Bytes; i++ { + for j := 0; j < 8; j++ { + witness.R[i*8+j] = (rBytes[i] >> (7 - j)) & 1 + } + } + var circuit MarshalScalarTest + assert.CheckCircuit(&circuit, test.WithValidAssignment(&witness), test.WithCurves(ecc.BW6_633)) +} + +type MarshalG1Test struct { + P G1Affine + R [2 * 8 * fp.Bytes]frontend.Variable +} + +func (c *MarshalG1Test) Define(api frontend.API) error { + ec := NewCurve(api) + // we want to get the same output as gnark-crypto's marshal. + // It's a point on bls12-377 so the number of bytes is 96, as the + // field of definition of bls12-377 is 48 bytes long. + r := ec.MarshalG1(c.P) + for i := range c.R { + api.AssertIsEqual(r[i], c.R[i]) + } + return nil +} + +func TestMarshalG1(t *testing.T) { + assert := test.NewAssert(t) + + // sample a random point + var r fr.Element + r.SetRandom() + var br big.Int + r.BigInt(&br) + _, _, g, _ := bls24315.Generators() + g.ScalarMultiplication(&g, &br) + gBytes := g.Marshal() + var witness MarshalG1Test + witness.P.Assign(&g) + for i := 0; i < 80; i++ { + for j := 0; j < 8; j++ { + witness.R[i*8+j] = (gBytes[i] >> (7 - j)) & 1 + } + } + var circuit MarshalG1Test + assert.CheckCircuit(&circuit, test.WithValidAssignment(&witness), test.WithCurves(ecc.BW6_633)) +} + // ------------------------------------------------------------------------------------------------- // Add jacobian diff --git a/std/algebra/native/sw_bls24315/pairing.go b/std/algebra/native/sw_bls24315/pairing.go index 4aa381e4ba..4f9f893e66 100644 --- a/std/algebra/native/sw_bls24315/pairing.go +++ b/std/algebra/native/sw_bls24315/pairing.go @@ -297,6 +297,22 @@ func Pair(api frontend.API, P []G1Affine, Q []G2Affine) (GT, error) { return FinalExponentiation(api, f), nil } +// PairingCheck calculates the reduced pairing for a set of points and asserts if the result is One +// ∏ᵢ e(Pᵢ, Qᵢ) =? 1 +// +// This function doesn't check that the inputs are in the correct subgroups. See AssertIsOnG1 and AssertIsOnG2. +func PairingCheck(api frontend.API, P []G1Affine, Q []G2Affine) error { + f, err := Pair(api, P, Q) + if err != nil { + return err + } + var one GT + one.SetOne() + f.AssertIsEqual(api, one) + + return nil +} + // doubleAndAddStep doubles p1 and adds p2 to the result in affine coordinates, and evaluates the line in Miller loop // https://eprint.iacr.org/2022/1162 (Section 6.1) func doubleAndAddStep(api frontend.API, p1, p2 *G2Affine) (G2Affine, lineEvaluation, lineEvaluation) { diff --git a/std/algebra/native/sw_bls24315/pairing2.go b/std/algebra/native/sw_bls24315/pairing2.go new file mode 100644 index 0000000000..0e0ec27674 --- /dev/null +++ b/std/algebra/native/sw_bls24315/pairing2.go @@ -0,0 +1,300 @@ +package sw_bls24315 + +import ( + "fmt" + + "github.com/consensys/gnark-crypto/ecc" + bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315" + fr_bw6633 "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/std/algebra/native/fields_bls24315" + "github.com/consensys/gnark/std/math/bits" +) + +// Curve allows G1 operations in BLS24-315. +type Curve struct { + api frontend.API +} + +// NewCurve initializes a new [Curve] instance. +func NewCurve(api frontend.API) *Curve { + return &Curve{ + api: api, + } +} + +// MarshalScalar returns +func (c *Curve) MarshalScalar(s Scalar) []frontend.Variable { + nbBits := 8 * ((ecc.BLS24_315.ScalarField().BitLen() + 7) / 8) + x := bits.ToBinary(c.api, s, bits.WithNbDigits(nbBits)) + for i, j := 0, nbBits-1; i < j; { + x[i], x[j] = x[j], x[i] + i++ + j-- + } + return x +} + +// MarshalG1 returns [P.X || P.Y] in binary. Both P.X and P.Y are +// in little endian. +func (c *Curve) MarshalG1(P G1Affine) []frontend.Variable { + nbBits := 8 * ((ecc.BLS24_315.BaseField().BitLen() + 7) / 8) + res := make([]frontend.Variable, 2*nbBits) + x := bits.ToBinary(c.api, P.X, bits.WithNbDigits(nbBits)) + y := bits.ToBinary(c.api, P.Y, bits.WithNbDigits(nbBits)) + for i := 0; i < nbBits; i++ { + res[i] = x[nbBits-1-i] + res[i+nbBits] = y[nbBits-1-i] + } + return res +} + +// Add points P and Q and return the result. Does not modify the inputs. +func (c *Curve) Add(P, Q *G1Affine) *G1Affine { + res := &G1Affine{ + X: P.X, + Y: P.Y, + } + res.AddAssign(c.api, *Q) + return res +} + +// AssertIsEqual asserts the equality of P and Q. +func (c *Curve) AssertIsEqual(P, Q *G1Affine) { + P.AssertIsEqual(c.api, *Q) + panic("todo") +} + +// Neg negates P and returns the result. Does not modify P. +func (c *Curve) Neg(P *G1Affine) *G1Affine { + res := &G1Affine{ + X: P.X, + Y: P.Y, + } + res.Neg(c.api, *P) + return res +} + +// ScalarMul computes scalar*P and returns the result. It doesn't modify the +// inputs. +func (c *Curve) ScalarMul(P *G1Affine, scalar *Scalar) *G1Affine { + res := &G1Affine{ + X: P.X, + Y: P.Y, + } + res.ScalarMul(c.api, *P, *scalar) + return res +} + +// ScalarMulBase computes scalar*G where G is the standard base point of the +// curve. It doesn't modify the scalar. +func (c *Curve) ScalarMulBase(scalar *Scalar) *G1Affine { + res := new(G1Affine) + res.ScalarMulBase(c.api, *scalar) + return res +} + +// MultiScalarMul computes ∑scalars_i * P_i and returns it. It doesn't modify +// the inputs. It returns an error if there is a mismatch in the lengths of the +// inputs. +func (c *Curve) MultiScalarMul(P []*G1Affine, scalars []*Scalar) (*G1Affine, error) { + if len(P) != len(scalars) { + return nil, fmt.Errorf("mismatching points and scalars slice lengths") + } + if len(P) == 0 { + return &G1Affine{ + X: 0, + Y: 0, + }, nil + } + res := c.ScalarMul(P[0], scalars[0]) + for i := 1; i < len(P); i++ { + q := c.ScalarMul(P[i], scalars[i]) + c.Add(res, q) + } + return res, nil +} + +// Pairing allows computing pairing-related operations in BLS24-315. +type Pairing struct { + api frontend.API +} + +// NewPairing initializes a [Pairing] instance. +func NewPairing(api frontend.API) *Pairing { + return &Pairing{ + api: api, + } +} + +// MillerLoop computes the Miller loop between the pairs of inputs. It doesn't +// modify the inputs. It returns an error if there is a mismatch betwen the +// lengths of the inputs. +func (p *Pairing) MillerLoop(P []*G1Affine, Q []*G2Affine) (*GT, error) { + inP := make([]G1Affine, len(P)) + for i := range P { + inP[i] = *P[i] + } + inQ := make([]G2Affine, len(Q)) + for i := range Q { + inQ[i] = *Q[i] + } + res, err := MillerLoop(p.api, inP, inQ) + return &res, err +} + +// FinalExponentiation performs the final exponentiation on the target group +// element. It doesn't modify the input. +func (p *Pairing) FinalExponentiation(e *GT) *GT { + res := FinalExponentiation(p.api, *e) + return &res +} + +// Pair computes a full multi-pairing on the input pairs. +func (p *Pairing) Pair(P []*G1Affine, Q []*G2Affine) (*GT, error) { + inP := make([]G1Affine, len(P)) + for i := range P { + inP[i] = *P[i] + } + inQ := make([]G2Affine, len(Q)) + for i := range Q { + inQ[i] = *Q[i] + } + res, err := Pair(p.api, inP, inQ) + return &res, err +} + +// PairingCheck computes the multi-pairing of the input pairs and asserts that +// the result is an identity element in the target group. It returns an error if +// there is a mismatch between the lengths of the inputs. +func (p *Pairing) PairingCheck(P []*G1Affine, Q []*G2Affine) error { + inP := make([]G1Affine, len(P)) + for i := range P { + inP[i] = *P[i] + } + inQ := make([]G2Affine, len(Q)) + for i := range Q { + inQ[i] = *Q[i] + } + res, err := Pair(p.api, inP, inQ) + if err != nil { + return err + } + var one fields_bls24315.E24 + one.SetOne() + res.AssertIsEqual(p.api, one) + return nil +} + +// AssertIsEqual asserts the equality of the target group elements. +func (p *Pairing) AssertIsEqual(e1, e2 *GT) { + e1.AssertIsEqual(p.api, *e2) +} + +// NewG1Affine allocates a witness from the native G1 element and returns it. +func NewG1Affine(v bls24315.G1Affine) G1Affine { + return G1Affine{ + X: (fr_bw6633.Element)(v.X), + Y: (fr_bw6633.Element)(v.Y), + } +} + +// NewG2Affine allocates a witness from the native G2 element and returns it. +func NewG2Affine(v bls24315.G2Affine) G2Affine { + return G2Affine{ + X: fields_bls24315.E4{ + B0: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.X.B0.A0), + A1: (fr_bw6633.Element)(v.X.B0.A1), + }, + B1: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.X.B1.A0), + A1: (fr_bw6633.Element)(v.X.B1.A1), + }, + }, + Y: fields_bls24315.E4{ + B0: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.Y.B0.A0), + A1: (fr_bw6633.Element)(v.Y.B0.A1), + }, + B1: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.Y.B1.A0), + A1: (fr_bw6633.Element)(v.Y.B1.A1), + }, + }, + } +} + +// NewGTEl allocates a witness from the native target group element and returns it. +func NewGTEl(v bls24315.GT) GT { + return GT{ + D0: fields_bls24315.E12{ + C0: fields_bls24315.E4{ + B0: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D0.C0.B0.A0), + A1: (fr_bw6633.Element)(v.D0.C0.B0.A1), + }, + B1: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D0.C0.B1.A0), + A1: (fr_bw6633.Element)(v.D0.C0.B1.A1), + }, + }, + C1: fields_bls24315.E4{ + B0: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D0.C1.B0.A0), + A1: (fr_bw6633.Element)(v.D0.C1.B0.A1), + }, + B1: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D0.C1.B1.A0), + A1: (fr_bw6633.Element)(v.D0.C1.B1.A1), + }, + }, + C2: fields_bls24315.E4{ + B0: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D0.C2.B0.A0), + A1: (fr_bw6633.Element)(v.D0.C2.B0.A1), + }, + B1: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D0.C2.B1.A0), + A1: (fr_bw6633.Element)(v.D0.C2.B1.A1), + }, + }, + }, + D1: fields_bls24315.E12{ + C0: fields_bls24315.E4{ + B0: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D1.C0.B0.A0), + A1: (fr_bw6633.Element)(v.D1.C0.B0.A1), + }, + B1: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D1.C0.B1.A0), + A1: (fr_bw6633.Element)(v.D1.C0.B1.A1), + }, + }, + C1: fields_bls24315.E4{ + B0: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D1.C1.B0.A0), + A1: (fr_bw6633.Element)(v.D1.C1.B0.A1), + }, + B1: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D1.C1.B1.A0), + A1: (fr_bw6633.Element)(v.D1.C1.B1.A1), + }, + }, + C2: fields_bls24315.E4{ + B0: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D1.C2.B0.A0), + A1: (fr_bw6633.Element)(v.D1.C2.B0.A1), + }, + B1: fields_bls24315.E2{ + A0: (fr_bw6633.Element)(v.D1.C2.B1.A0), + A1: (fr_bw6633.Element)(v.D1.C2.B1.A1), + }, + }, + }, + } +} + +// Scalar is a scalar in the groups. As the implementation is defined on a +// 2-chain, then this type is an alias to [frontend.Variable]. +type Scalar = frontend.Variable diff --git a/std/algebra/native/sw_bls24315/pairing_test.go b/std/algebra/native/sw_bls24315/pairing_test.go index 91362c1d97..a8b11e4bd0 100644 --- a/std/algebra/native/sw_bls24315/pairing_test.go +++ b/std/algebra/native/sw_bls24315/pairing_test.go @@ -17,6 +17,7 @@ limitations under the License. package sw_bls24315 import ( + "fmt" "math/big" "testing" @@ -157,6 +158,41 @@ func TestPairingFixedBLS315(t *testing.T) { } +type pairingCheckBLS315 struct { + P1, P2 G1Affine `gnark:",public"` + Q1, Q2 G2Affine +} + +func (circuit *pairingCheckBLS315) Define(api frontend.API) error { + + err := PairingCheck(api, []G1Affine{circuit.P1, circuit.P2}, []G2Affine{circuit.Q1, circuit.Q2}) + + if err != nil { + return fmt.Errorf("pair: %w", err) + } + + return nil +} + +func TestPairingCheckBLS315(t *testing.T) { + + // pairing test data + P, Q := pairingCheckData() + + // create cs + var circuit, witness pairingCheckBLS315 + + // assign values to witness + witness.P1.Assign(&P[0]) + witness.P2.Assign(&P[1]) + witness.Q1.Assign(&Q[0]) + witness.Q2.Assign(&Q[1]) + + assert := test.NewAssert(t) + assert.CheckCircuit(&circuit, test.WithValidAssignment(&witness), test.WithCurves(ecc.BW6_633)) + +} + // utils func pairingData() (P bls24315.G1Affine, Q bls24315.G2Affine, milRes bls24315.E24, pairingRes bls24315.GT) { _, _, P, Q = bls24315.Generators() @@ -165,6 +201,14 @@ func pairingData() (P bls24315.G1Affine, Q bls24315.G2Affine, milRes bls24315.E2 return } +func pairingCheckData() (P [2]bls24315.G1Affine, Q [2]bls24315.G2Affine) { + _, _, P[0], Q[0] = bls24315.Generators() + P[1].Neg(&P[0]) + Q[1].Set(&Q[0]) + + return +} + func triplePairingData() (P [3]bls24315.G1Affine, Q [3]bls24315.G2Affine, pairingRes bls24315.GT) { _, _, P[0], Q[0] = bls24315.Generators() var u, v fr.Element diff --git a/std/commitments/kzg/native_doc_test.go b/std/commitments/kzg/native_doc_test.go new file mode 100644 index 0000000000..8a183e8a66 --- /dev/null +++ b/std/commitments/kzg/native_doc_test.go @@ -0,0 +1,128 @@ +package kzg_test + +import ( + "crypto/rand" + + "github.com/consensys/gnark-crypto/ecc" + fr_bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + kzg_bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/consensys/gnark/std/algebra/native/sw_bls12377" + "github.com/consensys/gnark/std/commitments/kzg" +) + +// Example of using KZG verifier using 2-chains of curves. It is significantly +// more efficient than using field emulation, but requires a specific chain of +// inner and outer curves. +func Example_native() { + // !!! UNSAFE SRS. FOR EXAMPLE PURPOSES ONLY. We create a trusted SRS for + // KZG polynomial commitment scheme. In practice this must be prepared using + // MPC or by reusing existing SRS. !!! + + const ( + // size of the SRS. Defines the maximum degree of the polynomial which can be committed to + kzgSize = 128 + // degree of the random polynomial in the example + polynomialSize = 100 + ) + + // create new SRS for example purposes (NB! UNSAFE!) + alpha, err := rand.Int(rand.Reader, ecc.BLS12_377.ScalarField()) + if err != nil { + panic("sampling alpha failed: " + err.Error()) + } + srs, err := kzg_bls12377.NewSRS(kzgSize, alpha) // UNSAFE! + if err != nil { + panic("new SRS failed: " + err.Error()) + } + + // sample the random polynomial by sampling the coefficients. + f := make([]fr_bls12377.Element, polynomialSize) + for i := range f { + f[i].SetRandom() + } + + // natively commit to the polynomial using SRS + com, err := kzg_bls12377.Commit(f, srs.Pk) + if err != nil { + panic("commitment failed: " + err.Error()) + } + + // sample random evaluation point + var point fr_bls12377.Element + point.SetRandom() + + // construct a proof of correct opening. The evaluation value is proof.ClaimedValue + proof, err := kzg_bls12377.Open(f, point, srs.Pk) + if err != nil { + panic("test opening failed: " + err.Error()) + } + + // test opening proof natively + if err = kzg_bls12377.Verify(&com, &proof, point, srs.Vk); err != nil { + panic("test verify failed: " + err.Error()) + } + + // create a witness element of the commitment + wCmt, err := kzg.ValueOfCommitment[sw_bls12377.G1Affine](com) + if err != nil { + panic("commitment witness failed: " + err.Error()) + } + + // create a witness element of the opening proof and the evaluation point + wProof, err := kzg.ValueOfOpeningProof[sw_bls12377.Scalar, sw_bls12377.G1Affine](point, proof) + if err != nil { + panic("opening proof witness failed: " + err.Error()) + } + + // create a witness element of the SRS + wVk, err := kzg.ValueOfVerifyingKey[sw_bls12377.G2Affine](srs.Vk) + if err != nil { + panic("verifying key witness failed: " + err.Error()) + } + assignment := KZGVerificationCircuit[sw_bls12377.Scalar, sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT]{ + VerifyingKey: wVk, + Commitment: wCmt, + OpeningProof: wProof, + } + circuit := KZGVerificationCircuit[sw_bls12377.Scalar, sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT]{} + + // because we are using 2-chains then the outer curve must correspond to the + // inner curve. For inner BLS12-377 the outer curve is BW6-761. + ccs, err := frontend.Compile(ecc.BW6_761.ScalarField(), r1cs.NewBuilder, &circuit) + if err != nil { + panic("compile failed: " + err.Error()) + } + + // create Groth16 setup. NB! UNSAFE + pk, vk, err := groth16.Setup(ccs) // UNSAFE! Use MPC + if err != nil { + panic("setup failed: " + err.Error()) + } + + // create prover witness from the assignment + secretWitness, err := frontend.NewWitness(&assignment, ecc.BW6_761.ScalarField()) + if err != nil { + panic("secret witness failed: " + err.Error()) + } + + // create public witness from the assignment + publicWitness, err := secretWitness.Public() + if err != nil { + panic("public witness failed: " + err.Error()) + } + + // construct the groth16 proof of verifying KZG commitment opening in-circuit + circuitProof, err := groth16.Prove(ccs, pk, secretWitness) + if err != nil { + panic("proving failed: " + err.Error()) + } + + // verify the Groth16 proof + err = groth16.Verify(circuitProof, vk, publicWitness) + if err != nil { + panic("circuit verification failed: " + err.Error()) + } +} diff --git a/std/commitments/kzg/nonnative_doc_test.go b/std/commitments/kzg/nonnative_doc_test.go new file mode 100644 index 0000000000..8fd0243a81 --- /dev/null +++ b/std/commitments/kzg/nonnative_doc_test.go @@ -0,0 +1,151 @@ +package kzg_test + +import ( + "crypto/rand" + "fmt" + + "github.com/consensys/gnark-crypto/ecc" + fr_bn254 "github.com/consensys/gnark-crypto/ecc/bn254/fr" + kzg_bn254 "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/consensys/gnark/std/algebra" + "github.com/consensys/gnark/std/algebra/emulated/sw_bn254" + "github.com/consensys/gnark/std/commitments/kzg" +) + +type KZGVerificationCircuit[S algebra.ScalarT, G1El algebra.G1ElementT, G2El algebra.G2ElementT, GTEl algebra.GtElementT] struct { + kzg.VerifyingKey[G2El] + kzg.Commitment[G1El] + kzg.OpeningProof[S, G1El] +} + +func (c *KZGVerificationCircuit[S, G1El, G2El, GTEl]) Define(api frontend.API) error { + curve, err := algebra.GetCurve[S, G1El](api) + if err != nil { + return fmt.Errorf("get curve: %w", err) + } + pairing, err := algebra.GetPairing[G1El, G2El, GTEl](api) + if err != nil { + return fmt.Errorf("get pairing: %w", err) + } + verifier := kzg.NewVerifier(c.VerifyingKey, curve, pairing) + if err := verifier.AssertProof(c.Commitment, c.OpeningProof); err != nil { + return fmt.Errorf("assert proof: %w", err) + } + return nil +} + +// Example of using KZG verifier using emulated pairing implementation. +func Example_emulated() { + // !!! UNSAFE SRS. FOR EXAMPLE PURPOSES ONLY. We create a trusted SRS for + // KZG polynomial commitment scheme. In practice this must be prepared using + // MPC or by reusing existing SRS. !!! + + const ( + // size of the SRS. Defines the maximum degree of the polynomial which can be committed to + kzgSize = 128 + // degree of the random polynomial in the example + polynomialSize = 100 + ) + + // create new SRS for example purposes (NB! UNSAFE!) + alpha, err := rand.Int(rand.Reader, ecc.BN254.ScalarField()) + if err != nil { + panic("sampling alpha failed: " + err.Error()) + } + srs, err := kzg_bn254.NewSRS(kzgSize, alpha) // UNSAFE! + if err != nil { + panic("new SRS failed: " + err.Error()) + } + + // sample the random polynomial by sampling the coefficients. + f := make([]fr_bn254.Element, polynomialSize) + for i := range f { + f[i].SetRandom() + } + + // natively commit to the polynomial using SRS + com, err := kzg_bn254.Commit(f, srs.Pk) + if err != nil { + panic("commitment failed: " + err.Error()) + } + + // sample random evaluation point + var point fr_bn254.Element + point.SetRandom() + + // construct a proof of correct opening. The evaluation value is proof.ClaimedValue + proof, err := kzg_bn254.Open(f, point, srs.Pk) + if err != nil { + panic("test opening failed: " + err.Error()) + } + + // test opening proof natively + if err = kzg_bn254.Verify(&com, &proof, point, srs.Vk); err != nil { + panic("test verify failed: " + err.Error()) + } + + // create a witness element of the commitment + wCmt, err := kzg.ValueOfCommitment[sw_bn254.G1Affine](com) + if err != nil { + panic("commitment witness failed: " + err.Error()) + } + + // create a witness element of the opening proof and the evaluation point + wProof, err := kzg.ValueOfOpeningProof[sw_bn254.Scalar, sw_bn254.G1Affine](point, proof) + if err != nil { + panic("opening proof witness failed: " + err.Error()) + } + + // create a witness element of the SRS + wVk, err := kzg.ValueOfVerifyingKey[sw_bn254.G2Affine](srs.Vk) + if err != nil { + panic("verifying key witness failed: " + err.Error()) + } + assignment := KZGVerificationCircuit[sw_bn254.Scalar, sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl]{ + VerifyingKey: wVk, + Commitment: wCmt, + OpeningProof: wProof, + } + circuit := KZGVerificationCircuit[sw_bn254.Scalar, sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl]{} + + // as we are currently using the emulated implementation of BN254 + // in-circuit, then we can compile to any curve. For example purposes, here + // we use BN254. + ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) + if err != nil { + panic("compile failed: " + err.Error()) + } + + // create Groth16 setup. NB! UNSAFE + pk, vk, err := groth16.Setup(ccs) // UNSAFE! Use MPC + if err != nil { + panic("setup failed: " + err.Error()) + } + + // create prover witness from the assignment + secretWitness, err := frontend.NewWitness(&assignment, ecc.BN254.ScalarField()) + if err != nil { + panic("secret witness failed: " + err.Error()) + } + + // create public witness from the assignment + publicWitness, err := secretWitness.Public() + if err != nil { + panic("public witness failed: " + err.Error()) + } + + // construct the groth16 proof of verifying KZG commitment opening in-circuit + circuitProof, err := groth16.Prove(ccs, pk, secretWitness) + if err != nil { + panic("proving failed: " + err.Error()) + } + + // verify the Groth16 proof + err = groth16.Verify(circuitProof, vk, publicWitness) + if err != nil { + panic("circuit verification failed: " + err.Error()) + } +} diff --git a/std/commitments/kzg/verifier.go b/std/commitments/kzg/verifier.go new file mode 100644 index 0000000000..7d71ee5115 --- /dev/null +++ b/std/commitments/kzg/verifier.go @@ -0,0 +1,264 @@ +// Package kzg implements KZG polynomial commitment verification. +// +// KZG polynomial commitment allows for the prover to commit to a polynomial and +// then selectively prove evaluations of the said polynomial. The size of the +// commitment is a single G1 element and the size of the evaluation proof is +// also a single G1 element. However, KZG polynomial commitment scheme requires +// a trusted SRS. +// +// This package supersedes previous type-specific implementations and allows to +// use any implemented pairing-friendly curve implementation, being defined over +// a 2-chain (native implementation) or using field emulation. +package kzg + +import ( + "fmt" + + bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377" + fr_bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + kzg_bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381" + fr_bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + kzg_bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315" + fr_bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + kzg_bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + "github.com/consensys/gnark-crypto/ecc/bn254" + fr_bn254 "github.com/consensys/gnark-crypto/ecc/bn254/fr" + kzg_bn254 "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + fr_bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + kzg_bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + "github.com/consensys/gnark/std/algebra" + "github.com/consensys/gnark/std/algebra/emulated/sw_bls12381" + "github.com/consensys/gnark/std/algebra/emulated/sw_bn254" + "github.com/consensys/gnark/std/algebra/emulated/sw_bw6761" + "github.com/consensys/gnark/std/algebra/native/sw_bls12377" + "github.com/consensys/gnark/std/algebra/native/sw_bls24315" +) + +// Commitment is an KZG commitment to a polynomial. Use [ValueOfCommitment] to +// initialize a witness from the native commitment. +type Commitment[G1El algebra.G1ElementT] struct { + G1El G1El +} + +// ValueOfCommitment initializes a KZG commitment witness from a native +// commitment. It returns an error if there is a conflict between the type +// parameters and provided native commitment type. +func ValueOfCommitment[G1El algebra.G1ElementT](cmt any) (Commitment[G1El], error) { + var ret Commitment[G1El] + switch s := any(&ret).(type) { + case *Commitment[sw_bn254.G1Affine]: + tCmt, ok := cmt.(bn254.G1Affine) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, cmt) + } + s.G1El = sw_bn254.NewG1Affine(tCmt) + case *Commitment[sw_bls12377.G1Affine]: + tCmt, ok := cmt.(bls12377.G1Affine) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, cmt) + } + s.G1El = sw_bls12377.NewG1Affine(tCmt) + case *Commitment[sw_bls12381.G1Affine]: + tCmt, ok := cmt.(bls12381.G1Affine) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, cmt) + } + s.G1El = sw_bls12381.NewG1Affine(tCmt) + case *Commitment[sw_bw6761.G1Affine]: + tCmt, ok := cmt.(bw6761.G1Affine) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, cmt) + } + s.G1El = sw_bw6761.NewG1Affine(tCmt) + case *Commitment[sw_bls24315.G1Affine]: + tCmt, ok := cmt.(bls24315.G1Affine) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, cmt) + } + s.G1El = sw_bls24315.NewG1Affine(tCmt) + default: + return ret, fmt.Errorf("unknown type parametrization") + } + return ret, nil +} + +// OpeningProof embeds the opening proof that polynomial evaluated at Point is +// equal to ClaimedValue. Use [ValueOfOpeningProof] to initialize a witness from +// a native opening proof. +type OpeningProof[S algebra.ScalarT, G1El algebra.G1ElementT] struct { + QuotientPoly G1El + ClaimedValue S + Point S +} + +// ValueOfOpeningProof initializes an opening proof from the given proof and +// point. It returns an error if there is a mismatch between the type parameters +// and types of the provided point and proof. +func ValueOfOpeningProof[S algebra.ScalarT, G1El algebra.G1ElementT](point any, proof any) (OpeningProof[S, G1El], error) { + var ret OpeningProof[S, G1El] + switch s := any(&ret).(type) { + case *OpeningProof[sw_bn254.Scalar, sw_bn254.G1Affine]: + tProof, ok := proof.(kzg_bn254.OpeningProof) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, proof) + } + tPoint, ok := point.(fr_bn254.Element) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, point) + } + s.QuotientPoly = sw_bn254.NewG1Affine(tProof.H) + s.ClaimedValue = sw_bn254.NewScalar(tProof.ClaimedValue) + s.Point = sw_bn254.NewScalar(tPoint) + case *OpeningProof[sw_bls12377.Scalar, sw_bls12377.G1Affine]: + tProof, ok := proof.(kzg_bls12377.OpeningProof) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, proof) + } + tPoint, ok := point.(fr_bls12377.Element) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, point) + } + s.QuotientPoly = sw_bls12377.NewG1Affine(tProof.H) + s.ClaimedValue = tProof.ClaimedValue.String() + s.Point = tPoint.String() + case *OpeningProof[sw_bls12381.Scalar, sw_bls12381.G1Affine]: + tProof, ok := proof.(kzg_bls12381.OpeningProof) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, proof) + } + tPoint, ok := point.(fr_bls12381.Element) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, point) + } + s.QuotientPoly = sw_bls12381.NewG1Affine(tProof.H) + s.ClaimedValue = sw_bls12381.NewScalar(tProof.ClaimedValue) + s.Point = sw_bls12381.NewScalar(tPoint) + case *OpeningProof[sw_bw6761.Scalar, sw_bw6761.G1Affine]: + tProof, ok := proof.(kzg_bw6761.OpeningProof) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, proof) + } + tPoint, ok := point.(fr_bw6761.Element) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, point) + } + s.QuotientPoly = sw_bw6761.NewG1Affine(tProof.H) + s.ClaimedValue = sw_bw6761.NewScalar(tProof.ClaimedValue) + s.Point = sw_bw6761.NewScalar(tPoint) + case *OpeningProof[sw_bls24315.Scalar, sw_bls24315.G1Affine]: + tProof, ok := proof.(kzg_bls24315.OpeningProof) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, proof) + } + tPoint, ok := point.(fr_bls24315.Element) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, point) + } + s.QuotientPoly = sw_bls24315.NewG1Affine(tProof.H) + s.ClaimedValue = tProof.ClaimedValue.String() + s.Point = tPoint.String() + default: + return ret, fmt.Errorf("unknown type parametrization") + } + return ret, nil +} + +// VerifyingKey is the trusted setup for KZG polynomial commitment scheme. Use +// [ValueOfVerifyingKey] to initialize a witness from the native VerifyingKey. +type VerifyingKey[G2El algebra.G2ElementT] struct { + SRS [2]G2El +} + +// ValueOfVerifyingKey initializes verifying key witness from the native +// verifying key. It returns an error if there is a mismatch between the type +// parameters and the provided verifying key type. +func ValueOfVerifyingKey[G2El algebra.G2ElementT](vk any) (VerifyingKey[G2El], error) { + var ret VerifyingKey[G2El] + switch s := any(&ret).(type) { + case *VerifyingKey[sw_bn254.G2Affine]: + tVk, ok := vk.(kzg_bn254.VerifyingKey) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, vk) + } + s.SRS[0] = sw_bn254.NewG2Affine(tVk.G2[0]) + s.SRS[1] = sw_bn254.NewG2Affine(tVk.G2[1]) + case *VerifyingKey[sw_bls12377.G2Affine]: + tVk, ok := vk.(kzg_bls12377.VerifyingKey) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, vk) + } + s.SRS[0] = sw_bls12377.NewG2Affine(tVk.G2[0]) + s.SRS[1] = sw_bls12377.NewG2Affine(tVk.G2[1]) + case *VerifyingKey[sw_bls12381.G2Affine]: + tVk, ok := vk.(kzg_bls12381.VerifyingKey) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, vk) + } + s.SRS[0] = sw_bls12381.NewG2Affine(tVk.G2[0]) + s.SRS[1] = sw_bls12381.NewG2Affine(tVk.G2[1]) + case *VerifyingKey[sw_bw6761.G2Affine]: + tVk, ok := vk.(kzg_bw6761.VerifyingKey) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, vk) + } + s.SRS[0] = sw_bw6761.NewG2Affine(tVk.G2[0]) + s.SRS[1] = sw_bw6761.NewG2Affine(tVk.G2[1]) + case *VerifyingKey[sw_bls24315.G2Affine]: + tVk, ok := vk.(kzg_bls24315.VerifyingKey) + if !ok { + return ret, fmt.Errorf("mismatching types %T %T", ret, vk) + } + s.SRS[0] = sw_bls24315.NewG2Affine(tVk.G2[0]) + s.SRS[1] = sw_bls24315.NewG2Affine(tVk.G2[1]) + default: + return ret, fmt.Errorf("unknown type parametrization") + } + return ret, nil +} + +// Verifier allows verifying KZG opening proofs. +type Verifier[S algebra.ScalarT, G1El algebra.G1ElementT, G2El algebra.G2ElementT, GtEl algebra.G2ElementT] struct { + VerifyingKey[G2El] + + curve algebra.Curve[S, G1El] + pairing algebra.Pairing[G1El, G2El, GtEl] +} + +// NewVerifier initializes a new Verifier instance. +func NewVerifier[S algebra.ScalarT, G1El algebra.G1ElementT, G2El algebra.G2ElementT, GtEl algebra.G2ElementT](vk VerifyingKey[G2El], curve algebra.Curve[S, G1El], pairing algebra.Pairing[G1El, G2El, GtEl]) *Verifier[S, G1El, G2El, GtEl] { + return &Verifier[S, G1El, G2El, GtEl]{ + VerifyingKey: vk, + curve: curve, + pairing: pairing, + } +} + +// AssertProof asserts the validity of the opening proof for the given +// commitment. +func (vk *Verifier[S, G1El, G2El, GtEl]) AssertProof(commitment Commitment[G1El], proof OpeningProof[S, G1El]) error { + // [f(a)]G₁ + claimedValueG1 := vk.curve.ScalarMulBase(&proof.ClaimedValue) + + // [f(α) - f(a)]G₁ + fminusfaG1 := vk.curve.Neg(claimedValueG1) + fminusfaG1 = vk.curve.Add(fminusfaG1, &commitment.G1El) + + // [-H(α)]G₁ + negQuotientPoly := vk.curve.Neg(&proof.QuotientPoly) + + // [f(α) - f(a) + a*H(α)]G₁ + totalG1 := vk.curve.ScalarMul(&proof.QuotientPoly, &proof.Point) + totalG1 = vk.curve.Add(totalG1, fminusfaG1) + + // e([f(α)-f(a)+aH(α)]G₁], G₂).e([-H(α)]G₁, [α]G₂) == 1 + if err := vk.pairing.PairingCheck( + []*G1El{totalG1, negQuotientPoly}, + []*G2El{&vk.SRS[0], &vk.SRS[1]}, + ); err != nil { + return fmt.Errorf("pairing check: %w", err) + } + return nil +} diff --git a/std/commitments/kzg/verifier_test.go b/std/commitments/kzg/verifier_test.go new file mode 100644 index 0000000000..2fed4f36f7 --- /dev/null +++ b/std/commitments/kzg/verifier_test.go @@ -0,0 +1,413 @@ +package kzg + +import ( + "crypto/rand" + "fmt" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377" + fr_bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + kzg_bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381" + fr_bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + kzg_bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315" + fr_bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + kzg_bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + "github.com/consensys/gnark-crypto/ecc/bn254" + fr_bn254 "github.com/consensys/gnark-crypto/ecc/bn254/fr" + kzg_bn254 "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" + fr_bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + kzg_bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/std/algebra" + "github.com/consensys/gnark/std/algebra/emulated/sw_bls12381" + "github.com/consensys/gnark/std/algebra/emulated/sw_bn254" + "github.com/consensys/gnark/std/algebra/emulated/sw_bw6761" + "github.com/consensys/gnark/std/algebra/native/sw_bls12377" + "github.com/consensys/gnark/std/algebra/native/sw_bls24315" + "github.com/consensys/gnark/test" +) + +const ( + kzgSize = 128 + polynomialSize = 100 +) + +type KZGVerificationCircuit[S algebra.ScalarT, G1El algebra.G1ElementT, G2El algebra.G2ElementT, GTEl algebra.GtElementT] struct { + VerifyingKey[G2El] + Commitment[G1El] + OpeningProof[S, G1El] +} + +func (c *KZGVerificationCircuit[S, G1El, G2El, GTEl]) Define(api frontend.API) error { + curve, err := algebra.GetCurve[S, G1El](api) + if err != nil { + return fmt.Errorf("get curve: %w", err) + } + pairing, err := algebra.GetPairing[G1El, G2El, GTEl](api) + if err != nil { + return fmt.Errorf("get pairing: %w", err) + } + verifier := NewVerifier(c.VerifyingKey, curve, pairing) + if err := verifier.AssertProof(c.Commitment, c.OpeningProof); err != nil { + return fmt.Errorf("assert proof: %w", err) + } + return nil +} + +func TestKZGVerificationEmulated(t *testing.T) { + assert := test.NewAssert(t) + + alpha, err := rand.Int(rand.Reader, ecc.BN254.ScalarField()) + assert.NoError(err) + srs, err := kzg_bn254.NewSRS(kzgSize, alpha) + assert.NoError(err) + + f := make([]fr_bn254.Element, polynomialSize) + for i := range f { + f[i].SetRandom() + } + + com, err := kzg_bn254.Commit(f, srs.Pk) + assert.NoError(err) + + var point fr_bn254.Element + point.SetRandom() + proof, err := kzg_bn254.Open(f, point, srs.Pk) + assert.NoError(err) + + if err = kzg_bn254.Verify(&com, &proof, point, srs.Vk); err != nil { + t.Fatal("verify proof", err) + } + + wCmt, err := ValueOfCommitment[sw_bn254.G1Affine](com) + assert.NoError(err) + wProof, err := ValueOfOpeningProof[sw_bn254.Scalar, sw_bn254.G1Affine](point, proof) + assert.NoError(err) + wVk, err := ValueOfVerifyingKey[sw_bn254.G2Affine](srs.Vk) + assert.NoError(err) + + assignment := KZGVerificationCircuit[sw_bn254.Scalar, sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl]{ + VerifyingKey: wVk, + Commitment: wCmt, + OpeningProof: wProof, + } + assert.CheckCircuit(&KZGVerificationCircuit[sw_bn254.Scalar, sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl]{}, test.WithValidAssignment(&assignment)) +} + +func TestKZGVerificationEmulated2(t *testing.T) { + assert := test.NewAssert(t) + + alpha, err := rand.Int(rand.Reader, ecc.BLS12_381.ScalarField()) + assert.NoError(err) + srs, err := kzg_bls12381.NewSRS(kzgSize, alpha) + assert.NoError(err) + + f := make([]fr_bls12381.Element, polynomialSize) + for i := range f { + f[i].SetRandom() + } + + com, err := kzg_bls12381.Commit(f, srs.Pk) + assert.NoError(err) + + var point fr_bls12381.Element + point.SetRandom() + proof, err := kzg_bls12381.Open(f, point, srs.Pk) + assert.NoError(err) + + if err = kzg_bls12381.Verify(&com, &proof, point, srs.Vk); err != nil { + t.Fatal("verify proof", err) + } + + wCmt, err := ValueOfCommitment[sw_bls12381.G1Affine](com) + assert.NoError(err) + wProof, err := ValueOfOpeningProof[sw_bls12381.Scalar, sw_bls12381.G1Affine](point, proof) + assert.NoError(err) + wVk, err := ValueOfVerifyingKey[sw_bls12381.G2Affine](srs.Vk) + assert.NoError(err) + + assignment := KZGVerificationCircuit[sw_bls12381.Scalar, sw_bls12381.G1Affine, sw_bls12381.G2Affine, sw_bls12381.GTEl]{ + VerifyingKey: wVk, + Commitment: wCmt, + OpeningProof: wProof, + } + assert.CheckCircuit(&KZGVerificationCircuit[sw_bls12381.Scalar, sw_bls12381.G1Affine, sw_bls12381.G2Affine, sw_bls12381.GTEl]{}, test.WithValidAssignment(&assignment)) +} + +func TestKZGVerificationEmulated3(t *testing.T) { + assert := test.NewAssert(t) + + alpha, err := rand.Int(rand.Reader, ecc.BW6_761.ScalarField()) + assert.NoError(err) + srs, err := kzg_bw6761.NewSRS(kzgSize, alpha) + assert.NoError(err) + + f := make([]fr_bw6761.Element, polynomialSize) + for i := range f { + f[i].SetRandom() + } + + com, err := kzg_bw6761.Commit(f, srs.Pk) + assert.NoError(err) + + var point fr_bw6761.Element + point.SetRandom() + proof, err := kzg_bw6761.Open(f, point, srs.Pk) + assert.NoError(err) + + if err = kzg_bw6761.Verify(&com, &proof, point, srs.Vk); err != nil { + t.Fatal("verify proof", err) + } + + wCmt, err := ValueOfCommitment[sw_bw6761.G1Affine](com) + assert.NoError(err) + wProof, err := ValueOfOpeningProof[sw_bw6761.Scalar, sw_bw6761.G1Affine](point, proof) + assert.NoError(err) + wVk, err := ValueOfVerifyingKey[sw_bw6761.G2Affine](srs.Vk) + assert.NoError(err) + + assignment := KZGVerificationCircuit[sw_bw6761.Scalar, sw_bw6761.G1Affine, sw_bw6761.G2Affine, sw_bw6761.GTEl]{ + VerifyingKey: wVk, + Commitment: wCmt, + OpeningProof: wProof, + } + assert.CheckCircuit(&KZGVerificationCircuit[sw_bw6761.Scalar, sw_bw6761.G1Affine, sw_bw6761.G2Affine, sw_bw6761.GTEl]{}, test.WithValidAssignment(&assignment), test.WithCurves(ecc.BN254)) +} + +func TestKZGVerificationTwoChain(t *testing.T) { + assert := test.NewAssert(t) + + alpha, err := rand.Int(rand.Reader, ecc.BLS12_377.ScalarField()) + assert.NoError(err) + srs, err := kzg_bls12377.NewSRS(kzgSize, alpha) + assert.NoError(err) + + f := make([]fr_bls12377.Element, polynomialSize) + for i := range f { + f[i].SetRandom() + } + + com, err := kzg_bls12377.Commit(f, srs.Pk) + assert.NoError(err) + + var point fr_bls12377.Element + point.SetRandom() + proof, err := kzg_bls12377.Open(f, point, srs.Pk) + assert.NoError(err) + + if err = kzg_bls12377.Verify(&com, &proof, point, srs.Vk); err != nil { + t.Fatal("verify proof", err) + } + + wCmt, err := ValueOfCommitment[sw_bls12377.G1Affine](com) + assert.NoError(err) + wProof, err := ValueOfOpeningProof[sw_bls12377.Scalar, sw_bls12377.G1Affine](point, proof) + assert.NoError(err) + wVk, err := ValueOfVerifyingKey[sw_bls12377.G2Affine](srs.Vk) + assert.NoError(err) + + assignment := KZGVerificationCircuit[sw_bls12377.Scalar, sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT]{ + VerifyingKey: wVk, + Commitment: wCmt, + OpeningProof: wProof, + } + + assert.CheckCircuit(&KZGVerificationCircuit[sw_bls12377.Scalar, sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT]{}, test.WithValidAssignment(&assignment), test.WithCurves(ecc.BW6_761)) +} + +func TestKZGVerificationTwoChain2(t *testing.T) { + assert := test.NewAssert(t) + + alpha, err := rand.Int(rand.Reader, ecc.BLS24_315.ScalarField()) + assert.NoError(err) + srs, err := kzg_bls24315.NewSRS(kzgSize, alpha) + assert.NoError(err) + + f := make([]fr_bls24315.Element, polynomialSize) + for i := range f { + f[i].SetRandom() + } + + com, err := kzg_bls24315.Commit(f, srs.Pk) + assert.NoError(err) + + var point fr_bls24315.Element + point.SetRandom() + proof, err := kzg_bls24315.Open(f, point, srs.Pk) + assert.NoError(err) + + if err = kzg_bls24315.Verify(&com, &proof, point, srs.Vk); err != nil { + t.Fatal("verify proof", err) + } + + wCmt, err := ValueOfCommitment[sw_bls24315.G1Affine](com) + assert.NoError(err) + wProof, err := ValueOfOpeningProof[sw_bls24315.Scalar, sw_bls24315.G1Affine](point, proof) + assert.NoError(err) + wVk, err := ValueOfVerifyingKey[sw_bls24315.G2Affine](srs.Vk) + assert.NoError(err) + + assignment := KZGVerificationCircuit[sw_bls24315.Scalar, sw_bls24315.G1Affine, sw_bls24315.G2Affine, sw_bls24315.GT]{ + VerifyingKey: wVk, + Commitment: wCmt, + OpeningProof: wProof, + } + + assert.CheckCircuit(&KZGVerificationCircuit[sw_bls24315.Scalar, sw_bls24315.G1Affine, sw_bls24315.G2Affine, sw_bls24315.GT]{}, test.WithValidAssignment(&assignment), test.WithCurves(ecc.BW6_633)) +} + +func TestValueOfCommitment(t *testing.T) { + assert := test.NewAssert(t) + assert.Run(func(assert *test.Assert) { + _, _, G1, _ := bn254.Generators() + assignment, err := ValueOfCommitment[sw_bn254.G1Affine](G1) + assert.NoError(err) + _ = assignment + }, "bn254") + assert.Run(func(assert *test.Assert) { + _, _, G1, _ := bls12377.Generators() + assignment, err := ValueOfCommitment[sw_bls12377.G1Affine](G1) + assert.NoError(err) + _ = assignment + }, "bls12377") + assert.Run(func(assert *test.Assert) { + _, _, G1, _ := bls12381.Generators() + assignment, err := ValueOfCommitment[sw_bls12381.G1Affine](G1) + assert.NoError(err) + _ = assignment + }, "bls12381") + assert.Run(func(assert *test.Assert) { + _, _, G1, _ := bw6761.Generators() + assignment, err := ValueOfCommitment[sw_bw6761.G1Affine](G1) + assert.NoError(err) + _ = assignment + }, "bw6761") + assert.Run(func(assert *test.Assert) { + _, _, G1, _ := bls24315.Generators() + assignment, err := ValueOfCommitment[sw_bls24315.G1Affine](G1) + assert.NoError(err) + _ = assignment + }, "bls24315") +} + +func TestValueOfOpeningProof(t *testing.T) { + assert := test.NewAssert(t) + assert.Run(func(assert *test.Assert) { + _, _, G1, _ := bn254.Generators() + var value, point fr_bn254.Element + value.SetRandom() + point.SetRandom() + proof := kzg_bn254.OpeningProof{ + H: G1, + ClaimedValue: value, + } + assignment, err := ValueOfOpeningProof[sw_bn254.Scalar, sw_bn254.G1Affine](point, proof) + assert.NoError(err) + _ = assignment + }, "bn254") + assert.Run(func(assert *test.Assert) { + _, _, G1, _ := bls12377.Generators() + var value, point fr_bls12377.Element + value.SetRandom() + point.SetRandom() + proof := kzg_bls12377.OpeningProof{ + H: G1, + ClaimedValue: value, + } + assignment, err := ValueOfOpeningProof[sw_bls12377.Scalar, sw_bls12377.G1Affine](point, proof) + assert.NoError(err) + _ = assignment + }, "bls12377") + assert.Run(func(assert *test.Assert) { + _, _, G1, _ := bls12381.Generators() + var value, point fr_bls12381.Element + value.SetRandom() + point.SetRandom() + proof := kzg_bls12381.OpeningProof{ + H: G1, + ClaimedValue: value, + } + assignment, err := ValueOfOpeningProof[sw_bls12381.Scalar, sw_bls12381.G1Affine](point, proof) + assert.NoError(err) + _ = assignment + }, "bls12381") + assert.Run(func(assert *test.Assert) { + _, _, G1, _ := bw6761.Generators() + var value, point fr_bw6761.Element + value.SetRandom() + point.SetRandom() + proof := kzg_bw6761.OpeningProof{ + H: G1, + ClaimedValue: value, + } + assignment, err := ValueOfOpeningProof[sw_bw6761.Scalar, sw_bw6761.G1Affine](point, proof) + assert.NoError(err) + _ = assignment + }, "bw6761") + assert.Run(func(assert *test.Assert) { + _, _, G1, _ := bls24315.Generators() + var value, point fr_bls24315.Element + value.SetRandom() + point.SetRandom() + proof := kzg_bls24315.OpeningProof{ + H: G1, + ClaimedValue: value, + } + assignment, err := ValueOfOpeningProof[sw_bls24315.Scalar, sw_bls24315.G1Affine](point, proof) + assert.NoError(err) + _ = assignment + }, "bls24315") +} + +func TestValueOfSRS(t *testing.T) { + assert := test.NewAssert(t) + assert.Run(func(assert *test.Assert) { + _, _, _, G2 := bn254.Generators() + vk := kzg_bn254.VerifyingKey{ + G2: [2]bn254.G2Affine{G2, G2}, + } + assignment, err := ValueOfVerifyingKey[sw_bn254.G2Affine](vk) + assert.NoError(err) + _ = assignment + }, "bn254") + assert.Run(func(assert *test.Assert) { + _, _, _, G2 := bls12377.Generators() + vk := kzg_bls12377.VerifyingKey{ + G2: [2]bls12377.G2Affine{G2, G2}, + } + assignment, err := ValueOfVerifyingKey[sw_bls12377.G2Affine](vk) + assert.NoError(err) + _ = assignment + }, "bls12377") + assert.Run(func(assert *test.Assert) { + _, _, _, G2 := bls12381.Generators() + vk := kzg_bls12381.VerifyingKey{ + G2: [2]bls12381.G2Affine{G2, G2}, + } + assignment, err := ValueOfVerifyingKey[sw_bls12381.G2Affine](vk) + assert.NoError(err) + _ = assignment + }, "bls12381") + assert.Run(func(assert *test.Assert) { + _, _, _, G2 := bw6761.Generators() + vk := kzg_bw6761.VerifyingKey{ + G2: [2]bw6761.G2Affine{G2, G2}, + } + assignment, err := ValueOfVerifyingKey[sw_bw6761.G2Affine](vk) + assert.NoError(err) + _ = assignment + }, "bw6761") + assert.Run(func(assert *test.Assert) { + _, _, _, G2 := bls24315.Generators() + vk := kzg_bls24315.VerifyingKey{ + G2: [2]bls24315.G2Affine{G2, G2}, + } + assignment, err := ValueOfVerifyingKey[sw_bls24315.G2Affine](vk) + assert.NoError(err) + _ = assignment + }, "bls24315") +} diff --git a/std/commitments/kzg_bls12377/verifier.go b/std/commitments/kzg_bls12377/verifier.go deleted file mode 100644 index e7cffd85a3..0000000000 --- a/std/commitments/kzg_bls12377/verifier.go +++ /dev/null @@ -1,82 +0,0 @@ -/* -Copyright © 2020 ConsenSys - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package kzg_bls12377 provides a ZKP-circuit function to verify BLS12_377 KZG inside a BW6_761 circuit. -package kzg_bls12377 - -import ( - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/std/algebra/native/fields_bls12377" - "github.com/consensys/gnark/std/algebra/native/sw_bls12377" -) - -// Digest commitment of a polynomial. -type Digest = sw_bls12377.G1Affine - -// VK verification key (G2 part of SRS) -type VK struct { - G2 [2]sw_bls12377.G2Affine // [G₂, [α]G₂] -} - -// OpeningProof KZG proof for opening at a single point. -type OpeningProof struct { - // H quotient polynomial (f - f(z))/(x-z) - H sw_bls12377.G1Affine - - // ClaimedValue purported value - ClaimedValue frontend.Variable -} - -// Verify verifies a KZG opening proof at a single point -func Verify(api frontend.API, commitment Digest, proof OpeningProof, point frontend.Variable, srs VK) { - // We take the ClaimedValue and point to be frontend.Variable wich - // are elements in 𝔽_p, i.e. the BW6-761 scalar field. - // This is different from 𝔽_r, i.e. the BLS12-377 scalar field - // but r << p (p-r ≈ 377-bit) so when adding two 𝔽_r elements - // as 𝔽_p there is no reduction mod p. - // However, we should be cautious about negative elements and take - // the negative of points instead (-[f(a)]G₁ and -[a]G₂). - - // [f(a)]G₁ - var claimedValueG1Aff sw_bls12377.G1Affine - claimedValueG1Aff.ScalarMulBase(api, proof.ClaimedValue) - - // [f(α) - f(a)]G₁ - var fminusfaG1 sw_bls12377.G1Affine - fminusfaG1.Neg(api, claimedValueG1Aff) - fminusfaG1.AddAssign(api, commitment) - - // [-H(α)]G₁ - var negH sw_bls12377.G1Affine - negH.Neg(api, proof.H) - - // [f(α) - f(a) + a*H(α)]G₁ - var totalG1 sw_bls12377.G1Affine - totalG1.ScalarMul(api, proof.H, point). - AddAssign(api, fminusfaG1) - - // e([f(α)-f(a)+aH(α)]G₁], G₂).e([-H(α)]G₁, [α]G₂) == 1 - resPairing, _ := sw_bls12377.Pair( - api, - []sw_bls12377.G1Affine{totalG1, negH}, - []sw_bls12377.G2Affine{srs.G2[0], srs.G2[1]}, - ) - - var one fields_bls12377.E12 - one.SetOne() - resPairing.AssertIsEqual(api, one) - -} diff --git a/std/commitments/kzg_bls12377/verifier_test.go b/std/commitments/kzg_bls12377/verifier_test.go deleted file mode 100644 index 68cc91d44f..0000000000 --- a/std/commitments/kzg_bls12377/verifier_test.go +++ /dev/null @@ -1,165 +0,0 @@ -/* -Copyright © 2020 ConsenSys - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package kzg_bls12377 - -import ( - "crypto/rand" - "testing" - - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" - "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" - "github.com/consensys/gnark/constraint" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - "github.com/consensys/gnark/frontend/cs/scs" - "github.com/consensys/gnark/test" -) - -type verifierCircuit struct { - VerifKey VK - Proof OpeningProof - Com Digest - S frontend.Variable -} - -func (circuit *verifierCircuit) Define(api frontend.API) error { - - // create the verifier cs - Verify(api, circuit.Com, circuit.Proof, circuit.S, circuit.VerifKey) - - return nil -} - -//------------------------------------------------------- -// proof generated using gnark-crypto - -func TestVerifierDynamic(t *testing.T) { - - assert := test.NewAssert(t) - - // sizes of polynomials, kzg - const kzgSize = 128 - const polynomialSize = 100 - - // trusted setup - alpha, err := rand.Int(rand.Reader, ecc.BLS12_377.ScalarField()) - assert.NoError(err) - srs, err := kzg.NewSRS(kzgSize, alpha) - assert.NoError(err) - - // random polynomial - f := make([]fr.Element, polynomialSize) - for i := 0; i < 60; i++ { - f[i].SetRandom() - } - - // commit to the polynomial - com, err := kzg.Commit(f, srs.Pk) - assert.NoError(err) - - // create opening proof - var point fr.Element - point.SetRandom() - proof, err := kzg.Open(f, point, srs.Pk) - assert.NoError(err) - - // check that the proof is correct - err = kzg.Verify(&com, &proof, point, srs.Vk) - if err != nil { - t.Fatal(err) - } - - // verify the proof in circuit - var witness verifierCircuit - - // populate the witness - witness.Com.X = com.X.String() - witness.Com.Y = com.Y.String() - - witness.Proof.H.X = proof.H.X.String() - witness.Proof.H.Y = proof.H.Y.String() - - witness.Proof.ClaimedValue = proof.ClaimedValue.String() - - witness.S = point.String() - - witness.VerifKey.G2[0].X.A0 = srs.Vk.G2[0].X.A0.String() - witness.VerifKey.G2[0].X.A1 = srs.Vk.G2[0].X.A1.String() - witness.VerifKey.G2[0].Y.A0 = srs.Vk.G2[0].Y.A0.String() - witness.VerifKey.G2[0].Y.A1 = srs.Vk.G2[0].Y.A1.String() - witness.VerifKey.G2[1].X.A0 = srs.Vk.G2[1].X.A0.String() - witness.VerifKey.G2[1].X.A1 = srs.Vk.G2[1].X.A1.String() - witness.VerifKey.G2[1].Y.A0 = srs.Vk.G2[1].Y.A0.String() - witness.VerifKey.G2[1].Y.A1 = srs.Vk.G2[1].Y.A1.String() - - // check if the circuit is solved - var circuit verifierCircuit - assert.CheckCircuit(&circuit, test.WithValidAssignment(&witness), test.WithCurves(ecc.BW6_761)) - -} - -//------------------------------------------------------- -// harcoded values - -func TestVerifier(t *testing.T) { - - var circuit, witness verifierCircuit - - // static witness - witness.Com.X = "145429059828629443506099208441019164249918805265766585069511130101715300037889375544644493566733059056337445574142" - witness.Com.Y = "7748648670212409231552941907406345586179813940682493172078407968203200311849395869785335293628955566021478572791" - - witness.Proof.H.X = "142546216630759857020142552653688574597188212934274836451979072858880695115513802425442488457664742720974070355453" - witness.Proof.H.Y = "51742728231756961100409716107519203689800988928890924645730616869717553365749083029986151526811552917856555146906" - - witness.Proof.ClaimedValue = "7211341386127354417397285211336133449231039596179023429378585109196698597268" - witness.S = "4321" - witness.VerifKey.G2[0].X.A0 = "233578398248691099356572568220835526895379068987715365179118596935057653620464273615301663571204657964920925606294" - witness.VerifKey.G2[0].X.A1 = "140913150380207355837477652521042157274541796891053068589147167627541651775299824604154852141315666357241556069118" - witness.VerifKey.G2[0].Y.A0 = "63160294768292073209381361943935198908131692476676907196754037919244929611450776219210369229519898517858833747423" - witness.VerifKey.G2[0].Y.A1 = "149157405641012693445398062341192467754805999074082136895788947234480009303640899064710353187729182149407503257491" - witness.VerifKey.G2[1].X.A0 = "123747009012703414871739433259892117784672459657097139998749475279099125411579029748101735145753812822027512995199" - witness.VerifKey.G2[1].X.A1 = "62735868045337090199933301723513128455431585854943778977190757050206710789139082141526891028732261537358701287808" - witness.VerifKey.G2[1].Y.A0 = "212548833831227473592895134150456464278558858278752454560645447355770538424096804613692943525553353783189853308160" - witness.VerifKey.G2[1].Y.A1 = "123051654588413991319606911619099872563646143639520520553172600449178549047186983142138529976243874838154671706124" - - // cs values - assert := test.NewAssert(t) - assert.CheckCircuit(&circuit, test.WithValidAssignment(&witness), test.WithCurves(ecc.BW6_761)) - -} - -// bench -var ccsBench constraint.ConstraintSystem - -func BenchmarkVerifyKZG(b *testing.B) { - var c verifierCircuit - b.ResetTimer() - b.Run("groth16", func(b *testing.B) { - for i := 0; i < b.N; i++ { - ccsBench, _ = frontend.Compile(ecc.BW6_761.ScalarField(), r1cs.NewBuilder, &c) - } - }) - b.Log("groth16", ccsBench.GetNbConstraints()) - b.Run("plonk", func(b *testing.B) { - for i := 0; i < b.N; i++ { - ccsBench, _ = frontend.Compile(ecc.BW6_761.ScalarField(), scs.NewBuilder, &c) - } - }) - b.Log("plonk", ccsBench.GetNbConstraints()) -} diff --git a/std/commitments/kzg_bls24315/verifier.go b/std/commitments/kzg_bls24315/verifier.go deleted file mode 100644 index 260064b691..0000000000 --- a/std/commitments/kzg_bls24315/verifier.go +++ /dev/null @@ -1,82 +0,0 @@ -/* -Copyright © 2020 ConsenSys - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package kzg_bls24315 provides a ZKP-circuit function to verify BLS24_315 KZG inside a BW6_633 circuit. -package kzg_bls24315 - -import ( - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/std/algebra/native/fields_bls24315" - "github.com/consensys/gnark/std/algebra/native/sw_bls24315" -) - -// Digest commitment of a polynomial. -type Digest = sw_bls24315.G1Affine - -// VK verification key (G2 part of SRS) -type VK struct { - G2 [2]sw_bls24315.G2Affine // [G₂, [α]G₂] -} - -// OpeningProof KZG proof for opening at a single point. -type OpeningProof struct { - // H quotient polynomial (f - f(z))/(x-z) - H sw_bls24315.G1Affine - - // ClaimedValue purported value - ClaimedValue frontend.Variable -} - -// Verify verifies a KZG opening proof at a single point -func Verify(api frontend.API, commitment Digest, proof OpeningProof, point frontend.Variable, srs VK) { - // We take the ClaimedValue and point to be frontend.Variable wich - // are elements in 𝔽_p, i.e. the BW6-633 scalar field. - // This is different from 𝔽_r, i.e. the BLS24-315 scalar field - // but r << p (p-r ≈ 315-bit) so when adding two 𝔽_r elements - // as 𝔽_p there is no reduction mod p. - // However, we should be cautious about negative elements and take - // the negative of points instead (-[f(a)]G₁ and -[a]G₂). - - // [f(a)]G₁ - var claimedValueG1Aff sw_bls24315.G1Affine - claimedValueG1Aff.ScalarMulBase(api, proof.ClaimedValue) - - // [f(α) - f(a)]G₁ - var fminusfaG1 sw_bls24315.G1Affine - fminusfaG1.Neg(api, claimedValueG1Aff) - fminusfaG1.AddAssign(api, commitment) - - // [-H(α)]G₁ - var negH sw_bls24315.G1Affine - negH.Neg(api, proof.H) - - // [f(α) - f(a) + a*H(α)]G₁ - var totalG1 sw_bls24315.G1Affine - totalG1.ScalarMul(api, proof.H, point). - AddAssign(api, fminusfaG1) - - // e([f(α)-f(a)+aH(α)]G₁], G₂).e([-H(α)]G₁, [α]G₂) == 1 - resPairing, _ := sw_bls24315.Pair( - api, - []sw_bls24315.G1Affine{totalG1, negH}, - []sw_bls24315.G2Affine{srs.G2[0], srs.G2[1]}, - ) - - var one fields_bls24315.E24 - one.SetOne() - resPairing.AssertIsEqual(api, one) - -} diff --git a/std/commitments/kzg_bls24315/verifier_test.go b/std/commitments/kzg_bls24315/verifier_test.go deleted file mode 100644 index 317de90d68..0000000000 --- a/std/commitments/kzg_bls24315/verifier_test.go +++ /dev/null @@ -1,184 +0,0 @@ -/* -Copyright © 2020 ConsenSys - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package kzg_bls24315 - -import ( - "crypto/rand" - "testing" - - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" - "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" - "github.com/consensys/gnark/constraint" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - "github.com/consensys/gnark/frontend/cs/scs" - "github.com/consensys/gnark/test" -) - -type verifierCircuit struct { - VerifKey VK - Proof OpeningProof - Com Digest - S frontend.Variable -} - -func (circuit *verifierCircuit) Define(api frontend.API) error { - - // create the verifier cs - Verify(api, circuit.Com, circuit.Proof, circuit.S, circuit.VerifKey) - - return nil -} - -//------------------------------------------------------- -// proof generated using gnark-crypto - -func TestVerifierDynamic(t *testing.T) { - - assert := test.NewAssert(t) - - // sizes of polynomials, kzg - const kzgSize = 128 - const polynomialSize = 100 - - // trusted setup - alpha, err := rand.Int(rand.Reader, ecc.BLS24_315.ScalarField()) - assert.NoError(err) - srs, err := kzg.NewSRS(kzgSize, alpha) - assert.NoError(err) - - // random polynomial - f := make([]fr.Element, polynomialSize) - for i := 0; i < 60; i++ { - f[i].SetRandom() - } - - // commit to the polynomial - com, err := kzg.Commit(f, srs.Pk) - assert.NoError(err) - - // create opening proof - var point fr.Element - point.SetRandom() - proof, err := kzg.Open(f, point, srs.Pk) - assert.NoError(err) - - // check that the proof is correct - err = kzg.Verify(&com, &proof, point, srs.Vk) - if err != nil { - t.Fatal(err) - } - - // verify the proof in circuit - var witness verifierCircuit - - // populate the witness - witness.Com.X = com.X.String() - witness.Com.Y = com.Y.String() - - witness.Proof.H.X = proof.H.X.String() - witness.Proof.H.Y = proof.H.Y.String() - - witness.Proof.ClaimedValue = proof.ClaimedValue.String() - - witness.S = point.String() - - witness.VerifKey.G2[0].X.B0.A0 = srs.Vk.G2[0].X.B0.A0.String() - witness.VerifKey.G2[0].X.B0.A1 = srs.Vk.G2[0].X.B0.A1.String() - witness.VerifKey.G2[0].X.B1.A0 = srs.Vk.G2[0].X.B1.A0.String() - witness.VerifKey.G2[0].X.B1.A1 = srs.Vk.G2[0].X.B1.A1.String() - witness.VerifKey.G2[0].Y.B0.A0 = srs.Vk.G2[0].Y.B0.A0.String() - witness.VerifKey.G2[0].Y.B0.A1 = srs.Vk.G2[0].Y.B0.A1.String() - witness.VerifKey.G2[0].Y.B1.A0 = srs.Vk.G2[0].Y.B1.A0.String() - witness.VerifKey.G2[0].Y.B1.A1 = srs.Vk.G2[0].Y.B1.A1.String() - - witness.VerifKey.G2[1].X.B0.A0 = srs.Vk.G2[1].X.B0.A0.String() - witness.VerifKey.G2[1].X.B0.A1 = srs.Vk.G2[1].X.B0.A1.String() - witness.VerifKey.G2[1].X.B1.A0 = srs.Vk.G2[1].X.B1.A0.String() - witness.VerifKey.G2[1].X.B1.A1 = srs.Vk.G2[1].X.B1.A1.String() - witness.VerifKey.G2[1].Y.B0.A0 = srs.Vk.G2[1].Y.B0.A0.String() - witness.VerifKey.G2[1].Y.B0.A1 = srs.Vk.G2[1].Y.B0.A1.String() - witness.VerifKey.G2[1].Y.B1.A0 = srs.Vk.G2[1].Y.B1.A0.String() - witness.VerifKey.G2[1].Y.B1.A1 = srs.Vk.G2[1].Y.B1.A1.String() - - // check if the circuit is solved - var circuit verifierCircuit - assert.CheckCircuit(&circuit, test.WithValidAssignment(&witness), test.WithCurves(ecc.BW6_633)) - -} - -//------------------------------------------------------- -// harcoded values - -func TestVerifier(t *testing.T) { - - var circuit, witness verifierCircuit - - // static witness - witness.Com.X = "35386189147256460787905142428026982693834102687669771641361389281756222188309133371287736011496" - witness.Com.Y = "27110917293370507654960132415484655252529074592699870521959828295621560278434020539890708345149" - - witness.Proof.H.X = "237024382315576057940476197527646514934539639879200035206834755549615436908306104502862432730" - witness.Proof.H.Y = "24965199876048664783103146001620612576865473814618781613850899751573655382828001319566087837055" - - witness.Proof.ClaimedValue = "10347231107172233075459792371577505115223937655290126532055162077965558980163" - witness.S = "4321" - - witness.VerifKey.G2[0].X.B0.A0 = "24614737899199071964341749845083777103809664018538138889239909664991294445469052467064654073699" - witness.VerifKey.G2[0].X.B0.A1 = "17049297748993841127032249156255993089778266476087413538366212660716380683149731996715975282972" - witness.VerifKey.G2[0].X.B1.A0 = "11950668649125904104557740112865942804623051114821811669564995102755430514441092495782202668342" - witness.VerifKey.G2[0].X.B1.A1 = "3603055379462539802413979855826194299714805833759849528529386570240639115620788686893505938793" - witness.VerifKey.G2[0].Y.B0.A0 = "31740092748246070457677943092194030978994615503726570180895475408200863271773078192139722193079" - witness.VerifKey.G2[0].Y.B0.A1 = "30261413948955264769241509843031153941332801192447678605718183215275065425758214858190865971597" - witness.VerifKey.G2[0].Y.B1.A0 = "14195825602561496219090410113749222574308144851497375443809100117082380611212823440674391088885" - witness.VerifKey.G2[0].Y.B1.A1 = "2391152940984805871402135750194189812615420966694899795235607856168224901793030297133493038211" - - witness.VerifKey.G2[1].X.B0.A0 = "32770621494303675347306576037414743205466109457179006780112295339591667866879607994893522201077" - witness.VerifKey.G2[1].X.B0.A1 = "26234307989293079589757302086025391411007046129273969450459586440325937793578626756390716239607" - witness.VerifKey.G2[1].X.B1.A0 = "12885920290770633767625725164719407698814564441475093302178981579150678620682561869830892647708" - witness.VerifKey.G2[1].X.B1.A1 = "27040439362534196619980827988108357486576687369306457236523666215277529311368226649309430321857" - witness.VerifKey.G2[1].Y.B0.A0 = "37891043881493427277825396947634598161159358734636209357686614942355583145029806490020871408089" - witness.VerifKey.G2[1].Y.B0.A1 = "24578978782210992183339450660991675754164024355249488228592063724386132418314115963198249364981" - witness.VerifKey.G2[1].Y.B1.A0 = "2561567173101794713286533032340948733218695754942152779206184132595475750392464489574163449132" - witness.VerifKey.G2[1].Y.B1.A1 = "22410372563820522534342381636929948962663337994936763276489712608156477267640544532767398832260" - - // cs values - assert := test.NewAssert(t) - assert.CheckCircuit(&circuit, test.WithValidAssignment(&witness), test.WithCurves(ecc.BW6_633)) - -} - -// bench -var ccsBench constraint.ConstraintSystem - -func BenchmarkVerifyKZG(b *testing.B) { - var c verifierCircuit - b.ResetTimer() - b.Run("groth16", func(b *testing.B) { - for i := 0; i < b.N; i++ { - ccsBench, _ = frontend.Compile(ecc.BW6_633.ScalarField(), r1cs.NewBuilder, &c) - } - }) - b.Log("groth16", ccsBench.GetNbConstraints()) - b.Run("plonk", func(b *testing.B) { - for i := 0; i < b.N; i++ { - ccsBench, _ = frontend.Compile(ecc.BW6_633.ScalarField(), scs.NewBuilder, &c) - } - }) - b.Log("plonk", ccsBench.GetNbConstraints()) -} diff --git a/std/groth16_bls12377/verifier.go b/std/groth16_bls12377/verifier.go deleted file mode 100644 index e2f7f37338..0000000000 --- a/std/groth16_bls12377/verifier.go +++ /dev/null @@ -1,139 +0,0 @@ -/* -Copyright 2020 ConsenSys - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package groth16_bls12377 provides a ZKP-circuit function to verify BLS12_377 Groth16 inside a BW6_761 circuit. -package groth16_bls12377 - -import ( - "reflect" - - bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377" - "github.com/consensys/gnark/backend/groth16" - groth16_bls12377 "github.com/consensys/gnark/backend/groth16/bls12-377" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/std/algebra/native/fields_bls12377" - "github.com/consensys/gnark/std/algebra/native/sw_bls12377" -) - -// Proof represents a Groth16 proof -// Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf -type Proof struct { - Ar, Krs sw_bls12377.G1Affine - Bs sw_bls12377.G2Affine -} - -// VerifyingKey represents a Groth16 verifying key -// Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf -type VerifyingKey struct { - // e(α, β) - E fields_bls12377.E12 - - // -[γ]2, -[δ]2 - G2 struct { - GammaNeg, DeltaNeg sw_bls12377.G2Affine - } - - // [Kvk]1 - G1 struct { - K []sw_bls12377.G1Affine // The indexes correspond to the public wires - - } -} - -// Verify implements the verification function of Groth16. -// Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf -// publicInputs do NOT contain the ONE_WIRE -func Verify(api frontend.API, vk VerifyingKey, proof Proof, publicInputs []frontend.Variable) { - if len(vk.G1.K) == 0 { - panic("inner verifying key needs at least one point; VerifyingKey.G1 must be initialized before compiling circuit") - - } - - // compute kSum = Σx.[Kvk(t)]1 - var kSum sw_bls12377.G1Affine - - // kSum = Kvk[0] (assumes ONE_WIRE is at position 0) - kSum.X = vk.G1.K[0].X - kSum.Y = vk.G1.K[0].Y - - for k, v := range publicInputs { - var ki sw_bls12377.G1Affine - ki.ScalarMul(api, vk.G1.K[k+1], v) - kSum.AddAssign(api, ki) - - } - - // compute e(Σx.[Kvk(t)]1, -[γ]2) * e(Krs,δ) * e(Ar,Bs) - pairing, _ := sw_bls12377.Pair(api, []sw_bls12377.G1Affine{kSum, proof.Krs, proof.Ar}, []sw_bls12377.G2Affine{vk.G2.GammaNeg, vk.G2.DeltaNeg, proof.Bs}) - - // vk.E must be equal to pairing - vk.E.AssertIsEqual(api, pairing) - -} - -// Assign values to the "in-circuit" VerifyingKey from a "out-of-circuit" VerifyingKey -func (vk *VerifyingKey) Assign(_ovk groth16.VerifyingKey) { - ovk, ok := _ovk.(*groth16_bls12377.VerifyingKey) - if !ok { - panic("expected *groth16_bls12377.VerifyingKey, got " + reflect.TypeOf(_ovk).String()) - - } - - e, err := bls12377.Pair([]bls12377.G1Affine{ovk.G1.Alpha}, []bls12377.G2Affine{ovk.G2.Beta}) - if err != nil { - panic(err) - - } - vk.E.Assign(&e) - - vk.G1.K = make([]sw_bls12377.G1Affine, len(ovk.G1.K)) - for i := 0; i < len(ovk.G1.K); i++ { - vk.G1.K[i].Assign(&ovk.G1.K[i]) - - } - var deltaNeg, gammaNeg bls12377.G2Affine - deltaNeg.Neg(&ovk.G2.Delta) - gammaNeg.Neg(&ovk.G2.Gamma) - vk.G2.DeltaNeg.Assign(&deltaNeg) - vk.G2.GammaNeg.Assign(&gammaNeg) - -} - -// Allocate memory for the "in-circuit" VerifyingKey -// This is exposed so that the slices in the structure can be allocated -// before calling frontend.Compile(). -func (vk *VerifyingKey) Allocate(_ovk groth16.VerifyingKey) { - ovk, ok := _ovk.(*groth16_bls12377.VerifyingKey) - if !ok { - panic("expected *groth16_bls12377.VerifyingKey, got " + reflect.TypeOf(_ovk).String()) - - } - vk.G1.K = make([]sw_bls12377.G1Affine, len(ovk.G1.K)) - -} - -// Assign the proof values of Groth16 -func (proof *Proof) Assign(_oproof groth16.Proof) { - oproof, ok := _oproof.(*groth16_bls12377.Proof) - if !ok { - panic("expected *groth16_bls12377.Proof, got " + reflect.TypeOf(oproof).String()) - - } - proof.Ar.Assign(&oproof.Ar) - proof.Krs.Assign(&oproof.Krs) - proof.Bs.Assign(&oproof.Bs) - -} diff --git a/std/groth16_bls12377/verifier_test.go b/std/groth16_bls12377/verifier_test.go deleted file mode 100644 index 282984139e..0000000000 --- a/std/groth16_bls12377/verifier_test.go +++ /dev/null @@ -1,206 +0,0 @@ -/* -Copyright 2020 ConsenSys - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package groth16_bls12377 - -import ( - "reflect" - "testing" - - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" - "github.com/consensys/gnark-crypto/hash" - "github.com/consensys/gnark/backend" - "github.com/consensys/gnark/backend/groth16" - "github.com/consensys/gnark/constraint" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - "github.com/consensys/gnark/std/hash/mimc" - "github.com/consensys/gnark/test" -) - -const ( - preImage = "4992816046196248432836492760315135318126925090839638585255611512962528270024" - publicHash = "7831393781387060555412927989411398077996792073838215843928284475008119358174" -) - -type mimcCircuit struct { - PreImage frontend.Variable - Hash frontend.Variable `gnark:",public"` -} - -func (circuit *mimcCircuit) Define(api frontend.API) error { - mimc, err := mimc.NewMiMC(api) - if err != nil { - return err - - } - mimc.Write(circuit.PreImage) - api.AssertIsEqual(mimc.Sum(), circuit.Hash) - return nil - -} - -// Calculate the expected output of MIMC through plain invocation -func preComputeMimc(preImage frontend.Variable) interface{} { - var expectedY fr.Element - expectedY.SetInterface(preImage) - // calc MiMC - goMimc := hash.MIMC_BLS12_377.New() - goMimc.Write(expectedY.Marshal()) - expectedh := goMimc.Sum(nil) - return expectedh - -} - -type verifierCircuit struct { - InnerProof Proof - InnerVk VerifyingKey - Hash frontend.Variable -} - -func (circuit *verifierCircuit) Define(api frontend.API) error { - // create the verifier cs - Verify(api, circuit.InnerVk, circuit.InnerProof, []frontend.Variable{circuit.Hash}) - - return nil - -} - -func TestVerifier(t *testing.T) { - - // create a mock cs: knowing the preimage of a hash using mimc - var MimcCircuit mimcCircuit - r1cs, err := frontend.Compile(ecc.BLS12_377.ScalarField(), r1cs.NewBuilder, &MimcCircuit) - if err != nil { - t.Fatal(err) - - } - - var pre_assignment mimcCircuit - pre_assignment.PreImage = preImage - pre_assignment.Hash = publicHash - pre_witness, err := frontend.NewWitness(&pre_assignment, ecc.BLS12_377.ScalarField()) - if err != nil { - t.Fatal(err) - - } - - innerPk, innerVk, err := groth16.Setup(r1cs) - if err != nil { - t.Fatal(err) - - } - - proof, err := groth16.Prove(r1cs, innerPk, pre_witness) - if err != nil { - t.Fatal(err) - - } - - publicWitness, err := pre_witness.Public() - if err != nil { - t.Fatal(err) - - } - - // Check that proof verifies before continuing - if err := groth16.Verify(proof, innerVk, publicWitness); err != nil { - t.Fatal(err) - - } - - var circuit verifierCircuit - circuit.InnerVk.Allocate(innerVk) - - var witness verifierCircuit - witness.InnerProof.Assign(proof) - witness.InnerVk.Assign(innerVk) - witness.Hash = preComputeMimc(preImage) - - assert := test.NewAssert(t) - - assert.ProverSucceeded(&circuit, &witness, test.WithCurves(ecc.BW6_761), test.WithBackends(backend.GROTH16)) - -} - -func BenchmarkCompile(b *testing.B) { - - // create a mock cs: knowing the preimage of a hash using mimc - var MimcCircuit mimcCircuit - _r1cs, err := frontend.Compile(ecc.BLS12_377.ScalarField(), r1cs.NewBuilder, &MimcCircuit) - if err != nil { - b.Fatal(err) - - } - - var pre_assignment mimcCircuit - pre_assignment.PreImage = preImage - pre_assignment.Hash = publicHash - pre_witness, err := frontend.NewWitness(&pre_assignment, ecc.BLS12_377.ScalarField()) - if err != nil { - b.Fatal(err) - - } - - innerPk, innerVk, err := groth16.Setup(_r1cs) - if err != nil { - b.Fatal(err) - - } - - proof, err := groth16.Prove(_r1cs, innerPk, pre_witness) - if err != nil { - b.Fatal(err) - - } - - publicWitness, err := pre_witness.Public() - if err != nil { - b.Fatal(err) - - } - - // Check that proof verifies before continuing - if err := groth16.Verify(proof, innerVk, publicWitness); err != nil { - b.Fatal(err) - - } - - var circuit verifierCircuit - circuit.InnerVk.Allocate(innerVk) - - var ccs constraint.ConstraintSystem - b.ResetTimer() - for i := 0; i < b.N; i++ { - ccs, err = frontend.Compile(ecc.BW6_761.ScalarField(), r1cs.NewBuilder, &circuit) - if err != nil { - b.Fatal(err) - - } - - } - - b.Log(ccs.GetNbConstraints()) - -} - -var tVariable reflect.Type - -func init() { - tVariable = reflect.ValueOf(struct{ A frontend.Variable }{}).FieldByName("A").Type() - -} diff --git a/std/groth16_bls24315/verifier.go b/std/groth16_bls24315/verifier.go deleted file mode 100644 index 8e474a8979..0000000000 --- a/std/groth16_bls24315/verifier.go +++ /dev/null @@ -1,139 +0,0 @@ -/* -Copyright © 2020 ConsenSys - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package groth16_bls24315 provides a ZKP-circuit function to verify BLS24-315 Groth16 inside a BW6-633 circuit. -package groth16_bls24315 - -import ( - "reflect" - - bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315" - "github.com/consensys/gnark/backend/groth16" - groth16_bls24315 "github.com/consensys/gnark/backend/groth16/bls24-315" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/std/algebra/native/fields_bls24315" - "github.com/consensys/gnark/std/algebra/native/sw_bls24315" -) - -// Proof represents a Groth16 proof -// Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf -type Proof struct { - Ar, Krs sw_bls24315.G1Affine - Bs sw_bls24315.G2Affine -} - -// VerifyingKey represents a Groth16 verifying key -// Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf -type VerifyingKey struct { - // e(α, β) - E fields_bls24315.E24 - - // -[γ]2, -[δ]2 - G2 struct { - GammaNeg, DeltaNeg sw_bls24315.G2Affine - } - - // [Kvk]1 - G1 struct { - K []sw_bls24315.G1Affine // The indexes correspond to the public wires - - } -} - -// Verify implements the verification function of Groth16. -// Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf -// publicInputs do NOT contain the ONE_WIRE -func Verify(api frontend.API, vk VerifyingKey, proof Proof, publicInputs []frontend.Variable) { - if len(vk.G1.K) == 0 { - panic("inner verifying key needs at least one point; VerifyingKey.G1 must be initialized before compiling circuit") - - } - - // compute kSum = Σx.[Kvk(t)]1 - var kSum sw_bls24315.G1Affine - - // kSum = Kvk[0] (assumes ONE_WIRE is at position 0) - kSum.X = vk.G1.K[0].X - kSum.Y = vk.G1.K[0].Y - - for k, v := range publicInputs { - var ki sw_bls24315.G1Affine - ki.ScalarMul(api, vk.G1.K[k+1], v) - kSum.AddAssign(api, ki) - - } - - // compute e(Σx.[Kvk(t)]1, -[γ]2) * e(Krs,δ) * e(Ar,Bs) - pairing, _ := sw_bls24315.Pair(api, []sw_bls24315.G1Affine{kSum, proof.Krs, proof.Ar}, []sw_bls24315.G2Affine{vk.G2.GammaNeg, vk.G2.DeltaNeg, proof.Bs}) - - // vk.E must be equal to pairing - vk.E.AssertIsEqual(api, pairing) - -} - -// Assign values to the "in-circuit" VerifyingKey from a "out-of-circuit" VerifyingKey -func (vk *VerifyingKey) Assign(_ovk groth16.VerifyingKey) { - ovk, ok := _ovk.(*groth16_bls24315.VerifyingKey) - if !ok { - panic("expected *groth16_bls24315.VerifyingKey, got " + reflect.TypeOf(_ovk).String()) - - } - - e, err := bls24315.Pair([]bls24315.G1Affine{ovk.G1.Alpha}, []bls24315.G2Affine{ovk.G2.Beta}) - if err != nil { - panic(err) - - } - vk.E.Assign(&e) - - vk.G1.K = make([]sw_bls24315.G1Affine, len(ovk.G1.K)) - for i := 0; i < len(ovk.G1.K); i++ { - vk.G1.K[i].Assign(&ovk.G1.K[i]) - - } - var deltaNeg, gammaNeg bls24315.G2Affine - deltaNeg.Neg(&ovk.G2.Delta) - gammaNeg.Neg(&ovk.G2.Gamma) - vk.G2.DeltaNeg.Assign(&deltaNeg) - vk.G2.GammaNeg.Assign(&gammaNeg) - -} - -// Allocate memory for the "in-circuit" VerifyingKey -// This is exposed so that the slices in the structure can be allocated -// before calling frontend.Compile(). -func (vk *VerifyingKey) Allocate(_ovk groth16.VerifyingKey) { - ovk, ok := _ovk.(*groth16_bls24315.VerifyingKey) - if !ok { - panic("expected *groth16_bls24315.VerifyingKey, got " + reflect.TypeOf(_ovk).String()) - - } - vk.G1.K = make([]sw_bls24315.G1Affine, len(ovk.G1.K)) - -} - -// Assign the proof values of Groth16 -func (proof *Proof) Assign(_oproof groth16.Proof) { - oproof, ok := _oproof.(*groth16_bls24315.Proof) - if !ok { - panic("expected *groth16_bls24315.Proof, got " + reflect.TypeOf(oproof).String()) - - } - proof.Ar.Assign(&oproof.Ar) - proof.Krs.Assign(&oproof.Krs) - proof.Bs.Assign(&oproof.Bs) - -} diff --git a/std/groth16_bls24315/verifier_test.go b/std/groth16_bls24315/verifier_test.go deleted file mode 100644 index c3a5a9f3af..0000000000 --- a/std/groth16_bls24315/verifier_test.go +++ /dev/null @@ -1,206 +0,0 @@ -/* -Copyright © 2020 ConsenSys - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package groth16_bls24315 - -import ( - "reflect" - "testing" - - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" - "github.com/consensys/gnark-crypto/hash" - "github.com/consensys/gnark/backend" - "github.com/consensys/gnark/backend/groth16" - "github.com/consensys/gnark/constraint" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - "github.com/consensys/gnark/std/hash/mimc" - "github.com/consensys/gnark/test" -) - -const ( - preImage = "4992816046196248432836492760315135318126925090839638585255611512962528270024" - publicHash = "4875439939758844840941638351757981379945701574516438614845550995673793857363" -) - -type mimcCircuit struct { - PreImage frontend.Variable - Hash frontend.Variable `gnark:",public"` -} - -func (circuit *mimcCircuit) Define(api frontend.API) error { - mimc, err := mimc.NewMiMC(api) - if err != nil { - return err - - } - mimc.Write(circuit.PreImage) - api.AssertIsEqual(mimc.Sum(), circuit.Hash) - return nil - -} - -// Calculate the expected output of MIMC through plain invocation -func preComputeMimc(preImage frontend.Variable) interface{} { - var expectedY fr.Element - expectedY.SetInterface(preImage) - // calc MiMC - goMimc := hash.MIMC_BLS24_315.New() - goMimc.Write(expectedY.Marshal()) - expectedh := goMimc.Sum(nil) - return expectedh - -} - -type verifierCircuit struct { - InnerProof Proof - InnerVk VerifyingKey - Hash frontend.Variable -} - -func (circuit *verifierCircuit) Define(api frontend.API) error { - // create the verifier cs - Verify(api, circuit.InnerVk, circuit.InnerProof, []frontend.Variable{circuit.Hash}) - - return nil - -} - -func TestVerifier(t *testing.T) { - - // create a mock cs: knowing the preimage of a hash using mimc - var MimcCircuit mimcCircuit - r1cs, err := frontend.Compile(ecc.BLS24_315.ScalarField(), r1cs.NewBuilder, &MimcCircuit) - if err != nil { - t.Fatal(err) - - } - - var pre_assignment mimcCircuit - pre_assignment.PreImage = preImage - pre_assignment.Hash = publicHash - pre_witness, err := frontend.NewWitness(&pre_assignment, ecc.BLS24_315.ScalarField()) - if err != nil { - t.Fatal(err) - - } - - innerPk, innerVk, err := groth16.Setup(r1cs) - if err != nil { - t.Fatal(err) - - } - - proof, err := groth16.Prove(r1cs, innerPk, pre_witness) - if err != nil { - t.Fatal(err) - - } - - publicWitness, err := pre_witness.Public() - if err != nil { - t.Fatal(err) - - } - - // Check that proof verifies before continuing - if err := groth16.Verify(proof, innerVk, publicWitness); err != nil { - t.Fatal(err) - - } - - var circuit verifierCircuit - circuit.InnerVk.Allocate(innerVk) - - var witness verifierCircuit - witness.InnerProof.Assign(proof) - witness.InnerVk.Assign(innerVk) - witness.Hash = preComputeMimc(preImage) - - assert := test.NewAssert(t) - - assert.ProverSucceeded(&circuit, &witness, test.WithCurves(ecc.BW6_633), test.WithBackends(backend.GROTH16)) - -} - -func BenchmarkCompile(b *testing.B) { - - // create a mock cs: knowing the preimage of a hash using mimc - var MimcCircuit mimcCircuit - _r1cs, err := frontend.Compile(ecc.BLS24_315.ScalarField(), r1cs.NewBuilder, &MimcCircuit) - if err != nil { - b.Fatal(err) - - } - - var pre_assignment mimcCircuit - pre_assignment.PreImage = preImage - pre_assignment.Hash = publicHash - pre_witness, err := frontend.NewWitness(&pre_assignment, ecc.BLS24_315.ScalarField()) - if err != nil { - b.Fatal(err) - - } - - innerPk, innerVk, err := groth16.Setup(_r1cs) - if err != nil { - b.Fatal(err) - - } - - proof, err := groth16.Prove(_r1cs, innerPk, pre_witness) - if err != nil { - b.Fatal(err) - - } - - publicWitness, err := pre_witness.Public() - if err != nil { - b.Fatal(err) - - } - - // Check that proof verifies before continuing - if err := groth16.Verify(proof, innerVk, publicWitness); err != nil { - b.Fatal(err) - - } - - var circuit verifierCircuit - circuit.InnerVk.Allocate(innerVk) - - var ccs constraint.ConstraintSystem - b.ResetTimer() - for i := 0; i < b.N; i++ { - ccs, err = frontend.Compile(ecc.BW6_633.ScalarField(), r1cs.NewBuilder, &circuit) - if err != nil { - b.Fatal(err) - - } - - } - - b.Log(ccs.GetNbConstraints()) - -} - -var tVariable reflect.Type - -func init() { - tVariable = reflect.ValueOf(struct{ A frontend.Variable }{}).FieldByName("A").Type() - -} diff --git a/std/lookup/logderivlookup/doc_test.go b/std/lookup/logderivlookup/doc_test.go index e8ab1a01e4..3fa0c193a3 100644 --- a/std/lookup/logderivlookup/doc_test.go +++ b/std/lookup/logderivlookup/doc_test.go @@ -47,44 +47,27 @@ func Example() { ccs, err := frontend.Compile(field, r1cs.NewBuilder, &LookupCircuit{}) if err != nil { panic(err) - } else { - fmt.Println("compiled") } pk, vk, err := groth16.Setup(ccs) if err != nil { panic(err) - } else { - fmt.Println("setup done") } secretWitness, err := frontend.NewWitness(&witness, ecc.BN254.ScalarField()) if err != nil { panic(err) - } else { - fmt.Println("secret witness") } publicWitness, err := secretWitness.Public() if err != nil { panic(err) - } else { - fmt.Println("public witness") } proof, err := groth16.Prove(ccs, pk, secretWitness) if err != nil { panic(err) - } else { - fmt.Println("proof") } err = groth16.Verify(proof, vk, publicWitness) if err != nil { panic(err) - } else { - fmt.Println("verify") } - // Output: - // compiled - // setup done - // secret witness - // public witness - // proof - // verify + fmt.Println("done") + // Output: done } diff --git a/std/math/cmp/doc_isless_test.go b/std/math/cmp/doc_isless_test.go index 760656e687..7a25ccfcdf 100644 --- a/std/math/cmp/doc_isless_test.go +++ b/std/math/cmp/doc_isless_test.go @@ -2,12 +2,13 @@ package cmp_test import ( "fmt" + "math/big" + "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/frontend" "github.com/consensys/gnark/frontend/cs/r1cs" "github.com/consensys/gnark/std/math/cmp" - "math/big" ) // sortCheckerCircuit is a circuit that uses BoundedComparator.IsLess method to @@ -42,43 +43,27 @@ func ExampleBoundedComparator_IsLess() { ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) if err != nil { panic(err) - } else { - fmt.Println("compiled") } pk, vk, err := groth16.Setup(ccs) if err != nil { panic(err) - } else { - fmt.Println("setup done") } secretWitness, err := frontend.NewWitness(&witness, ecc.BN254.ScalarField()) if err != nil { panic(err) - } else { - fmt.Println("secret witness") } publicWitness, err := secretWitness.Public() if err != nil { panic(err) - } else { - fmt.Println("public witness") } proof, err := groth16.Prove(ccs, pk, secretWitness) if err != nil { panic(err) - } else { - fmt.Println("proof") } err = groth16.Verify(proof, vk, publicWitness) if err != nil { panic(err) - } else { - fmt.Println("verify") } - // Output: compiled - // setup done - // secret witness - // public witness - // proof - // verify + fmt.Println("done") + // Output: done } diff --git a/std/math/emulated/doc_example_field_test.go b/std/math/emulated/doc_example_field_test.go index 338c18b4ec..4be4f1f37d 100644 --- a/std/math/emulated/doc_example_field_test.go +++ b/std/math/emulated/doc_example_field_test.go @@ -41,43 +41,27 @@ func ExampleField() { ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) if err != nil { panic(err) - } else { - fmt.Println("compiled") } witnessData, err := frontend.NewWitness(&witness, ecc.BN254.ScalarField()) if err != nil { panic(err) - } else { - fmt.Println("secret witness parsed") } publicWitnessData, err := witnessData.Public() if err != nil { panic(err) - } else { - fmt.Println("public witness parsed") } pk, vk, err := groth16.Setup(ccs) if err != nil { panic(err) - } else { - fmt.Println("setup done") } proof, err := groth16.Prove(ccs, pk, witnessData, backend.WithSolverOptions(solver.WithHints(emulated.GetHints()...))) if err != nil { panic(err) - } else { - fmt.Println("proved") } err = groth16.Verify(proof, vk, publicWitnessData) if err != nil { panic(err) - } else { - fmt.Println("verified") } - // Output: compiled - // secret witness parsed - // public witness parsed - // setup done - // proved - // verified + fmt.Println("done") + // Output: done } diff --git a/std/math/emulated/emparams/emparams.go b/std/math/emulated/emparams/emparams.go index 9b888d6a4d..01088ee435 100644 --- a/std/math/emulated/emparams/emparams.go +++ b/std/math/emulated/emparams/emparams.go @@ -31,6 +31,12 @@ func (sixLimbPrimeField) NbLimbs() uint { return 6 } func (sixLimbPrimeField) BitsPerLimb() uint { return 64 } func (sixLimbPrimeField) IsPrime() bool { return true } +type twelveLimbPrimeField struct{} + +func (twelveLimbPrimeField) NbLimbs() uint { return 12 } +func (twelveLimbPrimeField) BitsPerLimb() uint { return 64 } +func (twelveLimbPrimeField) IsPrime() bool { return true } + // Goldilocks provides type parametrization for field emulation: // - limbs: 1 // - limb width: 64 bits @@ -199,3 +205,31 @@ func (P384Fp) Modulus() *big.Int { return elliptic.P384().Params().P } type P384Fr struct{ sixLimbPrimeField } func (P384Fr) Modulus() *big.Int { return elliptic.P384().Params().N } + +// BW6761Fp provides type parametrization for field emulation: +// - limbs: 12 +// - limb width: 64 bits +// +// The prime modulus for type parametrisation is: +// +// 0x122e824fb83ce0ad187c94004faff3eb926186a81d14688528275ef8087be41707ba638e584e91903cebaff25b423048689c8ed12f9fd9071dcd3dc73ebff2e98a116c25667a8f8160cf8aeeaf0a437e6913e6870000082f49d00000000008b (base 16) +// 6891450384315732539396789682275657542479668912536150109513790160209623422243491736087683183289411687640864567753786613451161759120554247759349511699125301598951605099378508850372543631423596795951899700429969112842764913119068299 (base 10) +// +// This is the base field of the BW6-761 curve. +type BW6761Fp struct{ twelveLimbPrimeField } + +func (fp BW6761Fp) Modulus() *big.Int { return ecc.BW6_761.BaseField() } + +// BW6761Fr provides type parametrization for field emulation: +// - limbs: 6 +// - limb width: 64 bits +// +// The prime modulus for type parametrisation is: +// +// 0x1ae3a4617c510eac63b05c06ca1493b1a22d9f300f5138f1ef3622fba094800170b5d44300000008508c00000000001 (base 16) +// 258664426012969094010652733694893533536393512754914660539884262666720468348340822774968888139573360124440321458177 (base 10) +// +// This is the scalar field of the BW6-761 curve. +type BW6761Fr struct{ sixLimbPrimeField } + +func (fp BW6761Fr) Modulus() *big.Int { return ecc.BW6_761.ScalarField() } diff --git a/std/math/emulated/field_hint_test.go b/std/math/emulated/field_hint_test.go index 9f2d286ec8..4fda7551b2 100644 --- a/std/math/emulated/field_hint_test.go +++ b/std/math/emulated/field_hint_test.go @@ -74,43 +74,27 @@ func ExampleField_NewHint() { ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) if err != nil { panic(err) - } else { - fmt.Println("compiled") } witnessData, err := frontend.NewWitness(&witness, ecc.BN254.ScalarField()) if err != nil { panic(err) - } else { - fmt.Println("secret witness parsed") } publicWitnessData, err := witnessData.Public() if err != nil { panic(err) - } else { - fmt.Println("public witness parsed") } pk, vk, err := groth16.Setup(ccs) if err != nil { panic(err) - } else { - fmt.Println("setup done") } proof, err := groth16.Prove(ccs, pk, witnessData, backend.WithSolverOptions(solver.WithHints(HintExample))) if err != nil { panic(err) - } else { - fmt.Println("proved") } err = groth16.Verify(proof, vk, publicWitnessData) if err != nil { panic(err) - } else { - fmt.Println("verified") } - // Output: compiled - // secret witness parsed - // public witness parsed - // setup done - // proved - // verified + fmt.Println("done") + // Output: done } diff --git a/std/math/emulated/params.go b/std/math/emulated/params.go index fc3d8abfc1..892d141d38 100644 --- a/std/math/emulated/params.go +++ b/std/math/emulated/params.go @@ -37,4 +37,6 @@ type ( P256Fr = emparams.P256Fr P384Fp = emparams.P384Fp P384Fr = emparams.P384Fr + BW6761Fp = emparams.BW6761Fp + BW6761Fr = emparams.BW6761Fr ) diff --git a/std/multicommit/doc_test.go b/std/multicommit/doc_test.go index 6d031facf6..b7860932b3 100644 --- a/std/multicommit/doc_test.go +++ b/std/multicommit/doc_test.go @@ -61,44 +61,27 @@ func ExampleWithCommitment() { ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) if err != nil { panic(err) - } else { - fmt.Println("compiled") } pk, vk, err := groth16.Setup(ccs) if err != nil { panic(err) - } else { - fmt.Println("setup done") } secretWitness, err := frontend.NewWitness(&assignment, ecc.BN254.ScalarField()) if err != nil { panic(err) - } else { - fmt.Println("secret witness") } publicWitness, err := secretWitness.Public() if err != nil { panic(err) - } else { - fmt.Println("public witness") } proof, err := groth16.Prove(ccs, pk, secretWitness) if err != nil { panic(err) - } else { - fmt.Println("proof") } err = groth16.Verify(proof, vk, publicWitness) if err != nil { panic(err) - } else { - fmt.Println("verify") } - // Output: - // compiled - // setup done - // secret witness - // public witness - // proof - // verify + fmt.Println("done") + // Output: done } diff --git a/std/recursion/doc.go b/std/recursion/doc.go new file mode 100644 index 0000000000..ea132ae717 --- /dev/null +++ b/std/recursion/doc.go @@ -0,0 +1,2 @@ +// Package recursion provides in-circuit verifiers for different proofs systems. +package recursion diff --git a/std/recursion/groth16/doc.go b/std/recursion/groth16/doc.go new file mode 100644 index 0000000000..23a3afdfc8 --- /dev/null +++ b/std/recursion/groth16/doc.go @@ -0,0 +1,2 @@ +// Package groth16 provides in-circuit Groth16 verifier. +package groth16 diff --git a/std/recursion/groth16/native_doc_test.go b/std/recursion/groth16/native_doc_test.go new file mode 100644 index 0000000000..75814eec74 --- /dev/null +++ b/std/recursion/groth16/native_doc_test.go @@ -0,0 +1,85 @@ +package groth16_test + +import ( + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/consensys/gnark/std/algebra/native/sw_bls12377" + stdgroth16 "github.com/consensys/gnark/std/recursion/groth16" +) + +// Example of verifying recursively BLS12-377 Groth16 proof in BW6-761 Groth16 +// circuit using chain of curves. It is significantly more efficient than using +// field emulation, but requires a specific chain of inner and outer curves. +func Example_native() { + // compute the proof which we want to verify recursively + innerCcs, innerVK, innerWitness, innerProof := computeInnerProof(ecc.BLS12_377.ScalarField()) + + // initialize the witness elements + circuitVk, err := stdgroth16.ValueOfVerifyingKey[sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT](innerVK) + if err != nil { + panic(err) + } + circuitWitness, err := stdgroth16.ValueOfWitness[sw_bls12377.Scalar, sw_bls12377.G1Affine](innerWitness) + if err != nil { + panic(err) + } + circuitProof, err := stdgroth16.ValueOfProof[sw_bls12377.G1Affine, sw_bls12377.G2Affine](innerProof) + if err != nil { + panic(err) + } + + outerAssignment := &OuterCircuit[sw_bls12377.Scalar, sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT]{ + InnerWitness: circuitWitness, + Proof: circuitProof, + VerifyingKey: circuitVk, + } + + // the witness size depends on the number of public variables. We use the + // compiled inner circuit to deduce the required size for the outer witness + // using functions [stdgroth16.PlaceholderWitness] and + // [stdgroth16.PlaceholderVerifyingKey] + outerCircuit := &OuterCircuit[sw_bls12377.Scalar, sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT]{ + InnerWitness: stdgroth16.PlaceholderWitness[sw_bls12377.Scalar](innerCcs), + VerifyingKey: stdgroth16.PlaceholderVerifyingKey[sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT](innerCcs), + } + + // compile the outer circuit. because we are using 2-chains then the outer + // curve must correspond to the inner curve. For inner BLS12-377 the outer + // curve is BW6-761. + ccs, err := frontend.Compile(ecc.BW6_761.ScalarField(), r1cs.NewBuilder, outerCircuit) + if err != nil { + panic("compile failed: " + err.Error()) + } + + // create Groth16 setup. NB! UNSAFE + pk, vk, err := groth16.Setup(ccs) // UNSAFE! Use MPC + if err != nil { + panic("setup failed: " + err.Error()) + } + + // create prover witness from the assignment + secretWitness, err := frontend.NewWitness(outerAssignment, ecc.BW6_761.ScalarField()) + if err != nil { + panic("secret witness failed: " + err.Error()) + } + + // create public witness from the assignment + publicWitness, err := secretWitness.Public() + if err != nil { + panic("public witness failed: " + err.Error()) + } + + // construct the groth16 proof of verifying Groth16 proof in-circuit + outerProof, err := groth16.Prove(ccs, pk, secretWitness) + if err != nil { + panic("proving failed: " + err.Error()) + } + + // verify the Groth16 proof + err = groth16.Verify(outerProof, vk, publicWitness) + if err != nil { + panic("circuit verification failed: " + err.Error()) + } +} diff --git a/std/recursion/groth16/nonnative_doc_test.go b/std/recursion/groth16/nonnative_doc_test.go new file mode 100644 index 0000000000..19cb2f15d3 --- /dev/null +++ b/std/recursion/groth16/nonnative_doc_test.go @@ -0,0 +1,166 @@ +package groth16_test + +import ( + "fmt" + "math/big" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/witness" + "github.com/consensys/gnark/constraint" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/consensys/gnark/std/algebra" + "github.com/consensys/gnark/std/algebra/emulated/sw_bn254" + stdgroth16 "github.com/consensys/gnark/std/recursion/groth16" +) + +// InnerCircuitNative is the definition of the inner circuit we want to +// recursively verify inside an outer circuit. The circuit proves the knowledge +// of a factorisation of a semiprime. +type InnerCircuitNative struct { + P, Q frontend.Variable + N frontend.Variable `gnark:",public"` +} + +func (c *InnerCircuitNative) Define(api frontend.API) error { + // prove that P*Q == N + res := api.Mul(c.P, c.Q) + api.AssertIsEqual(res, c.N) + // we must also enforce that P != 1 and Q != 1 + api.AssertIsDifferent(c.P, 1) + api.AssertIsDifferent(c.Q, 1) + return nil +} + +// computeInnerProof computes the proof for the inner circuit we want to verify +// recursively. In this example the Groth16 keys are generated on the fly, but +// in practice should be genrated once and using MPC. +func computeInnerProof(field *big.Int) (constraint.ConstraintSystem, groth16.VerifyingKey, witness.Witness, groth16.Proof) { + innerCcs, err := frontend.Compile(field, r1cs.NewBuilder, &InnerCircuitNative{}) + if err != nil { + panic(err) + } + // NB! UNSAFE! Use MPC. + innerPK, innerVK, err := groth16.Setup(innerCcs) + if err != nil { + panic(err) + } + + // inner proof + innerAssignment := &InnerCircuitNative{ + P: 3, + Q: 5, + N: 15, + } + innerWitness, err := frontend.NewWitness(innerAssignment, field) + if err != nil { + panic(err) + } + innerProof, err := groth16.Prove(innerCcs, innerPK, innerWitness) + if err != nil { + panic(err) + } + innerPubWitness, err := innerWitness.Public() + if err != nil { + panic(err) + } + err = groth16.Verify(innerProof, innerVK, innerPubWitness) + if err != nil { + panic(err) + } + return innerCcs, innerVK, innerPubWitness, innerProof +} + +// OuterCircuit is the generic outer circuit which can verify Groth16 proofs +// using field emulation or 2-chains of curves. +type OuterCircuit[S algebra.ScalarT, G1El algebra.G1ElementT, G2El algebra.G2ElementT, GtEl algebra.GtElementT] struct { + Proof stdgroth16.Proof[G1El, G2El] + VerifyingKey stdgroth16.VerifyingKey[G1El, G2El, GtEl] + InnerWitness stdgroth16.Witness[S] +} + +func (c *OuterCircuit[S, G1El, G2El, GtEl]) Define(api frontend.API) error { + curve, err := algebra.GetCurve[S, G1El](api) + if err != nil { + return fmt.Errorf("new curve: %w", err) + } + pairing, err := algebra.GetPairing[G1El, G2El, GtEl](api) + if err != nil { + return fmt.Errorf("get pairing: %w", err) + } + verifier := stdgroth16.NewVerifier(curve, pairing) + err = verifier.AssertProof(c.VerifyingKey, c.Proof, c.InnerWitness) + return err +} + +// Example of verifying recursively BN254 Groth16 proof in BN254 Groth16 circuit using field emulation +func Example_emulated() { + // compute the proof which we want to verify recursively + innerCcs, innerVK, innerWitness, innerProof := computeInnerProof(ecc.BN254.ScalarField()) + + // initialize the witness elements + circuitVk, err := stdgroth16.ValueOfVerifyingKey[sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl](innerVK) + if err != nil { + panic(err) + } + circuitWitness, err := stdgroth16.ValueOfWitness[sw_bn254.Scalar, sw_bn254.G1Affine](innerWitness) + if err != nil { + panic(err) + } + circuitProof, err := stdgroth16.ValueOfProof[sw_bn254.G1Affine, sw_bn254.G2Affine](innerProof) + if err != nil { + panic(err) + } + + outerAssignment := &OuterCircuit[sw_bn254.Scalar, sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl]{ + InnerWitness: circuitWitness, + Proof: circuitProof, + VerifyingKey: circuitVk, + } + + // the witness size depends on the number of public variables. We use the + // compiled inner circuit to deduce the required size for the outer witness + // using functions [stdgroth16.PlaceholderWitness] and + // [stdgroth16.PlaceholderVerifyingKey] + outerCircuit := &OuterCircuit[sw_bn254.Scalar, sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl]{ + InnerWitness: stdgroth16.PlaceholderWitness[sw_bn254.Scalar](innerCcs), + VerifyingKey: stdgroth16.PlaceholderVerifyingKey[sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl](innerCcs), + } + + // compile the outer circuit + ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, outerCircuit) + if err != nil { + panic("compile failed: " + err.Error()) + } + + // create Groth16 setup. NB! UNSAFE + pk, vk, err := groth16.Setup(ccs) // UNSAFE! Use MPC + if err != nil { + panic("setup failed: " + err.Error()) + } + + // create prover witness from the assignment + secretWitness, err := frontend.NewWitness(outerAssignment, ecc.BN254.ScalarField()) + if err != nil { + panic("secret witness failed: " + err.Error()) + } + + // create public witness from the assignment + publicWitness, err := secretWitness.Public() + if err != nil { + panic("public witness failed: " + err.Error()) + } + + // construct the groth16 proof of verifying Groth16 proof in-circuit + outerProof, err := groth16.Prove(ccs, pk, secretWitness) + if err != nil { + panic("proving failed: " + err.Error()) + } + + // verify the Groth16 proof + err = groth16.Verify(outerProof, vk, publicWitness) + if err != nil { + panic("circuit verification failed: " + err.Error()) + } +} diff --git a/std/recursion/groth16/verifier.go b/std/recursion/groth16/verifier.go new file mode 100644 index 0000000000..dbf7e51e40 --- /dev/null +++ b/std/recursion/groth16/verifier.go @@ -0,0 +1,303 @@ +package groth16 + +import ( + "fmt" + + bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377" + fr_bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381" + fr_bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315" + fr_bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bn254" + fr_bn254 "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark/backend/groth16" + groth16backend_bls12377 "github.com/consensys/gnark/backend/groth16/bls12-377" + groth16backend_bls12381 "github.com/consensys/gnark/backend/groth16/bls12-381" + groth16backend_bls24315 "github.com/consensys/gnark/backend/groth16/bls24-315" + groth16backend_bn254 "github.com/consensys/gnark/backend/groth16/bn254" + "github.com/consensys/gnark/backend/witness" + "github.com/consensys/gnark/constraint" + "github.com/consensys/gnark/std/algebra" + "github.com/consensys/gnark/std/algebra/emulated/sw_bls12381" + "github.com/consensys/gnark/std/algebra/emulated/sw_bn254" + "github.com/consensys/gnark/std/algebra/native/sw_bls12377" + "github.com/consensys/gnark/std/algebra/native/sw_bls24315" + "github.com/consensys/gnark/std/math/emulated" + "github.com/consensys/gnark/std/math/emulated/emparams" +) + +// Proof is a typed Groth16 proof of SNARK. Use [ValueOfProof] to initialize the +// witness from the native proof. +type Proof[G1El algebra.G1ElementT, G2El algebra.G2ElementT] struct { + Ar, Krs G1El + Bs G2El +} + +// ValueOfProof returns the typed witness of the native proof. It returns an +// error if there is a mismatch between the type parameters and the provided +// native proof. +func ValueOfProof[G1El algebra.G1ElementT, G2El algebra.G2ElementT](proof groth16.Proof) (Proof[G1El, G2El], error) { + var ret Proof[G1El, G2El] + switch ar := any(&ret).(type) { + case *Proof[sw_bn254.G1Affine, sw_bn254.G2Affine]: + tProof, ok := proof.(*groth16backend_bn254.Proof) + if !ok { + return ret, fmt.Errorf("expected bn254.Proof, got %T", proof) + } + ar.Ar = sw_bn254.NewG1Affine(tProof.Ar) + ar.Krs = sw_bn254.NewG1Affine(tProof.Krs) + ar.Bs = sw_bn254.NewG2Affine(tProof.Bs) + case *Proof[sw_bls12377.G1Affine, sw_bls12377.G2Affine]: + tProof, ok := proof.(*groth16backend_bls12377.Proof) + if !ok { + return ret, fmt.Errorf("expected bls12377.Proof, got %T", proof) + } + ar.Ar = sw_bls12377.NewG1Affine(tProof.Ar) + ar.Krs = sw_bls12377.NewG1Affine(tProof.Krs) + ar.Bs = sw_bls12377.NewG2Affine(tProof.Bs) + case *Proof[sw_bls12381.G1Affine, sw_bls12381.G2Affine]: + tProof, ok := proof.(*groth16backend_bls12381.Proof) + if !ok { + return ret, fmt.Errorf("expected bls12381.Proof, got %T", proof) + } + ar.Ar = sw_bls12381.NewG1Affine(tProof.Ar) + ar.Krs = sw_bls12381.NewG1Affine(tProof.Krs) + ar.Bs = sw_bls12381.NewG2Affine(tProof.Bs) + case *Proof[sw_bls24315.G1Affine, sw_bls24315.G2Affine]: + tProof, ok := proof.(*groth16backend_bls24315.Proof) + if !ok { + return ret, fmt.Errorf("expected bls24315.Proof, got %T", proof) + } + ar.Ar = sw_bls24315.NewG1Affine(tProof.Ar) + ar.Krs = sw_bls24315.NewG1Affine(tProof.Krs) + ar.Bs = sw_bls24315.NewG2Affine(tProof.Bs) + default: + return ret, fmt.Errorf("unknown parametric type combination") + } + return ret, nil +} + +// VerifyingKey is a typed Groth16 verifying key for checking SNARK proofs. For +// witness creation use the method [ValueOfVerifyingKey] and for stub +// placeholder use [PlaceholderVerifyingKey]. +type VerifyingKey[G1El algebra.G1ElementT, G2El algebra.G2ElementT, GtEl algebra.GtElementT] struct { + E GtEl + G1 struct{ K []G1El } + G2 struct{ GammaNeg, DeltaNeg G2El } +} + +// PlaceholderVerifyingKey returns an empty verifying key for a given compiled +// constraint system. The size of the verifying key depends on the number of +// public inputs and commitments used, this method allocates sufficient space +// regardless of the actual verifying key. +func PlaceholderVerifyingKey[G1El algebra.G1ElementT, G2El algebra.G2ElementT, GtEl algebra.GtElementT](ccs constraint.ConstraintSystem) VerifyingKey[G1El, G2El, GtEl] { + return VerifyingKey[G1El, G2El, GtEl]{ + G1: struct{ K []G1El }{ + K: make([]G1El, ccs.GetNbPublicVariables()), + }, + } +} + +// ValueOfVerifyingKey initializes witness from the given Groth16 verifying key. +// It returns an error if there is a mismatch between the type parameters and +// the provided native verifying key. +func ValueOfVerifyingKey[G1El algebra.G1ElementT, G2El algebra.G2ElementT, GtEl algebra.GtElementT](vk groth16.VerifyingKey) (VerifyingKey[G1El, G2El, GtEl], error) { + var ret VerifyingKey[G1El, G2El, GtEl] + switch s := any(&ret).(type) { + case *VerifyingKey[sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl]: + tVk, ok := vk.(*groth16backend_bn254.VerifyingKey) + if !ok { + return ret, fmt.Errorf("expected bn254.VerifyingKey, got %T", vk) + } + // compute E + e, err := bn254.Pair([]bn254.G1Affine{tVk.G1.Alpha}, []bn254.G2Affine{tVk.G2.Beta}) + if err != nil { + return ret, fmt.Errorf("precompute pairing: %w", err) + } + s.E = sw_bn254.NewGTEl(e) + s.G1.K = make([]sw_bn254.G1Affine, len(tVk.G1.K)) + for i := range s.G1.K { + s.G1.K[i] = sw_bn254.NewG1Affine(tVk.G1.K[i]) + } + var deltaNeg, gammaNeg bn254.G2Affine + deltaNeg.Neg(&tVk.G2.Delta) + gammaNeg.Neg(&tVk.G2.Gamma) + s.G2.DeltaNeg = sw_bn254.NewG2Affine(deltaNeg) + s.G2.GammaNeg = sw_bn254.NewG2Affine(gammaNeg) + case *VerifyingKey[sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT]: + tVk, ok := vk.(*groth16backend_bls12377.VerifyingKey) + if !ok { + return ret, fmt.Errorf("expected bn254.VerifyingKey, got %T", vk) + } + // compute E + e, err := bls12377.Pair([]bls12377.G1Affine{tVk.G1.Alpha}, []bls12377.G2Affine{tVk.G2.Beta}) + if err != nil { + return ret, fmt.Errorf("precompute pairing: %w", err) + } + s.E = sw_bls12377.NewGTEl(e) + s.G1.K = make([]sw_bls12377.G1Affine, len(tVk.G1.K)) + for i := range s.G1.K { + s.G1.K[i] = sw_bls12377.NewG1Affine(tVk.G1.K[i]) + } + var deltaNeg, gammaNeg bls12377.G2Affine + deltaNeg.Neg(&tVk.G2.Delta) + gammaNeg.Neg(&tVk.G2.Gamma) + s.G2.DeltaNeg = sw_bls12377.NewG2Affine(deltaNeg) + s.G2.GammaNeg = sw_bls12377.NewG2Affine(gammaNeg) + case *VerifyingKey[sw_bls12381.G1Affine, sw_bls12381.G2Affine, sw_bls12381.GTEl]: + tVk, ok := vk.(*groth16backend_bls12381.VerifyingKey) + if !ok { + return ret, fmt.Errorf("expected bls12381.VerifyingKey, got %T", vk) + } + // compute E + e, err := bls12381.Pair([]bls12381.G1Affine{tVk.G1.Alpha}, []bls12381.G2Affine{tVk.G2.Beta}) + if err != nil { + return ret, fmt.Errorf("precompute pairing: %w", err) + } + s.E = sw_bls12381.NewGTEl(e) + s.G1.K = make([]sw_bls12381.G1Affine, len(tVk.G1.K)) + for i := range s.G1.K { + s.G1.K[i] = sw_bls12381.NewG1Affine(tVk.G1.K[i]) + } + var deltaNeg, gammaNeg bls12381.G2Affine + deltaNeg.Neg(&tVk.G2.Delta) + gammaNeg.Neg(&tVk.G2.Gamma) + s.G2.DeltaNeg = sw_bls12381.NewG2Affine(deltaNeg) + s.G2.GammaNeg = sw_bls12381.NewG2Affine(gammaNeg) + case *VerifyingKey[sw_bls24315.G1Affine, sw_bls24315.G2Affine, sw_bls24315.GT]: + tVk, ok := vk.(*groth16backend_bls24315.VerifyingKey) + if !ok { + return ret, fmt.Errorf("expected bls12381.VerifyingKey, got %T", vk) + } + // compute E + e, err := bls24315.Pair([]bls24315.G1Affine{tVk.G1.Alpha}, []bls24315.G2Affine{tVk.G2.Beta}) + if err != nil { + return ret, fmt.Errorf("precompute pairing: %w", err) + } + s.E = sw_bls24315.NewGTEl(e) + s.G1.K = make([]sw_bls24315.G1Affine, len(tVk.G1.K)) + for i := range s.G1.K { + s.G1.K[i] = sw_bls24315.NewG1Affine(tVk.G1.K[i]) + } + var deltaNeg, gammaNeg bls24315.G2Affine + deltaNeg.Neg(&tVk.G2.Delta) + gammaNeg.Neg(&tVk.G2.Gamma) + s.G2.DeltaNeg = sw_bls24315.NewG2Affine(deltaNeg) + s.G2.GammaNeg = sw_bls24315.NewG2Affine(gammaNeg) + default: + return ret, fmt.Errorf("unknown parametric type combination") + } + return ret, nil +} + +// Witness is a public witness to verify the SNARK proof against. For assigning +// witness use [ValueOfWitness] and to create stub witness for compiling use +// [PlaceholderWitness]. +type Witness[S algebra.ScalarT] struct { + // Public is the public inputs. The first element does not need to be one + // wire and is added implicitly during verification. + Public []S +} + +// PlaceholderWitness creates a stub witness which can be used to allocate the +// variables in the circuit if the actual witness is not yet known. It takes +// into account the number of public inputs and number of used commitments. +func PlaceholderWitness[S algebra.ScalarT](ccs constraint.ConstraintSystem) Witness[S] { + return Witness[S]{ + Public: make([]S, ccs.GetNbPublicVariables()-1), + } +} + +// ValueOfWitness assigns a outer-circuit witness from the inner circuit +// witness. If there is a field mismatch then this method represents the witness +// inputs using field emulation. It returns an error if there is a mismatch +// between the type parameters and provided witness. +func ValueOfWitness[S algebra.ScalarT, G1 algebra.G1ElementT](w witness.Witness) (Witness[S], error) { + type dbwtiness[S algebra.ScalarT, G1 algebra.G1ElementT] struct { + W Witness[S] + } + var ret dbwtiness[S, G1] + pubw, err := w.Public() + if err != nil { + return ret.W, fmt.Errorf("get public witness: %w", err) + } + vec := pubw.Vector() + switch s := any(&ret).(type) { + case *dbwtiness[emulated.Element[emparams.BN254Fr], sw_bn254.G1Affine]: + vect, ok := vec.(fr_bn254.Vector) + if !ok { + return ret.W, fmt.Errorf("expected fr_bn254.Vector, got %T", vec) + } + for i := range vect { + s.W.Public = append(s.W.Public, emulated.ValueOf[emparams.BN254Fr](vect[i])) + } + case *dbwtiness[sw_bls12377.Scalar, sw_bls12377.G1Affine]: + vect, ok := vec.(fr_bls12377.Vector) + if !ok { + return ret.W, fmt.Errorf("expected fr_bls12377.Vector, got %T", vec) + } + for i := range vect { + s.W.Public = append(s.W.Public, vect[i].String()) + } + case *dbwtiness[emulated.Element[emparams.BLS12381Fr], sw_bls12381.G1Affine]: + vect, ok := vec.(fr_bls12381.Vector) + if !ok { + return ret.W, fmt.Errorf("expected fr_bls12381.Vector, got %T", vec) + } + for i := range vect { + s.W.Public = append(s.W.Public, emulated.ValueOf[emparams.BLS12381Fr](vect[i])) + } + case *dbwtiness[sw_bls24315.Scalar, sw_bls24315.G1Affine]: + vect, ok := vec.(fr_bls24315.Vector) + if !ok { + return ret.W, fmt.Errorf("expected fr_bls24315.Vector, got %T", vec) + } + for i := range vect { + s.W.Public = append(s.W.Public, vect[i].String()) + } + default: + return ret.W, fmt.Errorf("unknown parametric type combination") + } + return ret.W, nil +} + +// Verifier verifies Groth16 proofs. +type Verifier[S algebra.ScalarT, G1El algebra.G1ElementT, G2El algebra.G2ElementT, GtEl algebra.GtElementT] struct { + curve algebra.Curve[S, G1El] + pairing algebra.Pairing[G1El, G2El, GtEl] +} + +// NewVerifier returns a new [Verifier] instance using the curve and pairing +// interfaces. Use methods [algebra.GetCurve] and [algebra.GetPairing] to +// initialize the instances. +func NewVerifier[S algebra.ScalarT, G1El algebra.G1ElementT, G2El algebra.G2ElementT, GtEl algebra.GtElementT](curve algebra.Curve[S, G1El], pairing algebra.Pairing[G1El, G2El, GtEl]) *Verifier[S, G1El, G2El, GtEl] { + return &Verifier[S, G1El, G2El, GtEl]{ + curve: curve, + pairing: pairing, + } +} + +// AssertProof asserts that the SNARK proof holds for the given witness and +// verifying key. +func (v *Verifier[S, G1El, G2El, GtEl]) AssertProof(vk VerifyingKey[G1El, G2El, GtEl], proof Proof[G1El, G2El], witness Witness[S]) error { + inP := make([]*G1El, len(vk.G1.K)-1) // first is for the one wire, we add it manually after MSM + for i := range inP { + inP[i] = &vk.G1.K[i+1] + } + inS := make([]*S, len(witness.Public)) + for i := range inS { + inS[i] = &witness.Public[i] + } + kSum, err := v.curve.MultiScalarMul(inP, inS) + if err != nil { + return fmt.Errorf("multi scalar mul: %w", err) + } + kSum = v.curve.Add(kSum, &vk.G1.K[0]) + pairing, err := v.pairing.Pair([]*G1El{kSum, &proof.Krs, &proof.Ar}, []*G2El{&vk.G2.GammaNeg, &vk.G2.DeltaNeg, &proof.Bs}) + if err != nil { + return fmt.Errorf("pairing: %w", err) + } + v.pairing.AssertIsEqual(pairing, &vk.E) + return nil +} diff --git a/std/recursion/groth16/verifier_test.go b/std/recursion/groth16/verifier_test.go new file mode 100644 index 0000000000..c04c90b67b --- /dev/null +++ b/std/recursion/groth16/verifier_test.go @@ -0,0 +1,311 @@ +package groth16 + +import ( + "crypto/sha256" + "fmt" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377" + bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381" + bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315" + "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark/backend/groth16" + groth16backend_bls12377 "github.com/consensys/gnark/backend/groth16/bls12-377" + groth16backend_bls12381 "github.com/consensys/gnark/backend/groth16/bls12-381" + groth16backend_bls24315 "github.com/consensys/gnark/backend/groth16/bls24-315" + groth16backend_bn254 "github.com/consensys/gnark/backend/groth16/bn254" + "github.com/consensys/gnark/backend/witness" + "github.com/consensys/gnark/constraint" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/consensys/gnark/std/algebra" + "github.com/consensys/gnark/std/algebra/emulated/sw_bls12381" + "github.com/consensys/gnark/std/algebra/emulated/sw_bn254" + "github.com/consensys/gnark/std/algebra/native/sw_bls12377" + "github.com/consensys/gnark/std/algebra/native/sw_bls24315" + "github.com/consensys/gnark/std/hash/sha2" + "github.com/consensys/gnark/std/math/emulated" + "github.com/consensys/gnark/std/math/emulated/emparams" + "github.com/consensys/gnark/std/math/uints" + "github.com/consensys/gnark/test" +) + +// TODO: placeholder circuits for when we have implemented commitment verification for the verifier. +type InnerCircuitSHA2 struct { + PreImage [9]uints.U8 + Digest [32]uints.U8 `gnark:",public"` +} + +func (c *InnerCircuitSHA2) Define(api frontend.API) error { + h, err := sha2.New(api) + if err != nil { + return fmt.Errorf("new sha2: %w", err) + } + h.Write(c.PreImage[:]) + dgst := h.Sum() + if len(dgst) != len(c.Digest) { + return fmt.Errorf("wrong digest size") + } + uapi, err := uints.New[uints.U32](api) + if err != nil { + return fmt.Errorf("new uints api: %w", err) + } + for i := range dgst { + uapi.ByteAssertEq(dgst[i], c.Digest[i]) + } + return nil +} + +type InnerCircuitEmulation struct { + P, Q emulated.Element[emparams.Goldilocks] + N emulated.Element[emparams.Goldilocks] `gnark:",public"` +} + +func (c *InnerCircuitEmulation) Define(api frontend.API) error { + f, err := emulated.NewField[emparams.Goldilocks](api) + if err != nil { + return err + } + res := f.Mul(&c.P, &c.Q) + f.AssertIsEqual(res, &c.N) + return nil +} + +type InnerCircuitNative struct { + P, Q frontend.Variable + N frontend.Variable `gnark:",public"` +} + +func (c *InnerCircuitNative) Define(api frontend.API) error { + res := api.Mul(c.P, c.Q) + api.AssertIsEqual(res, c.N) + return nil +} + +func getInner(assert *test.Assert, field *big.Int) (constraint.ConstraintSystem, groth16.VerifyingKey, witness.Witness, groth16.Proof) { + innerCcs, err := frontend.Compile(field, r1cs.NewBuilder, &InnerCircuitNative{}) + assert.NoError(err) + innerPK, innerVK, err := groth16.Setup(innerCcs) + assert.NoError(err) + + // inner proof + innerAssignment := &InnerCircuitNative{ + P: 3, + Q: 5, + N: 15, + } + innerWitness, err := frontend.NewWitness(innerAssignment, field) + assert.NoError(err) + innerProof, err := groth16.Prove(innerCcs, innerPK, innerWitness) + assert.NoError(err) + innerPubWitness, err := innerWitness.Public() + assert.NoError(err) + err = groth16.Verify(innerProof, innerVK, innerPubWitness) + assert.NoError(err) + return innerCcs, innerVK, innerPubWitness, innerProof +} + +type OuterCircuit[S algebra.ScalarT, G1El algebra.G1ElementT, G2El algebra.G2ElementT, GtEl algebra.GtElementT] struct { + Proof Proof[G1El, G2El] + VerifyingKey VerifyingKey[G1El, G2El, GtEl] + InnerWitness Witness[S] +} + +func (c *OuterCircuit[S, G1El, G2El, GtEl]) Define(api frontend.API) error { + curve, err := algebra.GetCurve[S, G1El](api) + if err != nil { + return fmt.Errorf("new curve: %w", err) + } + pairing, err := algebra.GetPairing[G1El, G2El, GtEl](api) + if err != nil { + return fmt.Errorf("get pairing: %w", err) + } + verifier := NewVerifier(curve, pairing) + err = verifier.AssertProof(c.VerifyingKey, c.Proof, c.InnerWitness) + return err +} + +func TestBN254InBN254(t *testing.T) { + assert := test.NewAssert(t) + innerCcs, innerVK, innerWitness, innerProof := getInner(assert, ecc.BN254.ScalarField()) + + // outer proof + circuitVk, err := ValueOfVerifyingKey[sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl](innerVK) + assert.NoError(err) + circuitWitness, err := ValueOfWitness[sw_bn254.Scalar, sw_bn254.G1Affine](innerWitness) + assert.NoError(err) + circuitProof, err := ValueOfProof[sw_bn254.G1Affine, sw_bn254.G2Affine](innerProof) + assert.NoError(err) + + outerCircuit := &OuterCircuit[sw_bn254.Scalar, sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl]{ + InnerWitness: PlaceholderWitness[sw_bn254.Scalar](innerCcs), + VerifyingKey: PlaceholderVerifyingKey[sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl](innerCcs), + } + outerAssignment := &OuterCircuit[sw_bn254.Scalar, sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl]{ + InnerWitness: circuitWitness, + Proof: circuitProof, + VerifyingKey: circuitVk, + } + assert.CheckCircuit(outerCircuit, test.WithValidAssignment(outerAssignment)) +} + +func TestBLS12InBW6(t *testing.T) { + assert := test.NewAssert(t) + innerCcs, innerVK, innerWitness, innerProof := getInner(assert, ecc.BLS12_377.ScalarField()) + + // outer proof + circuitVk, err := ValueOfVerifyingKey[sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT](innerVK) + assert.NoError(err) + circuitWitness, err := ValueOfWitness[sw_bls12377.Scalar, sw_bls12377.G1Affine](innerWitness) + assert.NoError(err) + circuitProof, err := ValueOfProof[sw_bls12377.G1Affine, sw_bls12377.G2Affine](innerProof) + assert.NoError(err) + + outerCircuit := &OuterCircuit[sw_bls12377.Scalar, sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT]{ + InnerWitness: PlaceholderWitness[sw_bls12377.Scalar](innerCcs), + VerifyingKey: PlaceholderVerifyingKey[sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT](innerCcs), + } + outerAssignment := &OuterCircuit[sw_bls12377.Scalar, sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT]{ + InnerWitness: circuitWitness, + Proof: circuitProof, + VerifyingKey: circuitVk, + } + assert.CheckCircuit(outerCircuit, test.WithValidAssignment(outerAssignment), test.WithCurves(ecc.BW6_761)) +} + +func getPreimageAndDigest() (preimage [9]byte, digest [32]byte) { + copy(preimage[:], []byte("recursion")) + digest = sha256.Sum256(preimage[:]) + return +} + +type WitnessCircut struct { + A emulated.Element[emparams.Secp256k1Fr] `gnark:",public"` +} + +func (c *WitnessCircut) Define(frontend.API) error { return nil } + +func TestValueOfWitness(t *testing.T) { + assignment := WitnessCircut{ + A: emulated.ValueOf[emparams.Secp256k1Fr]("1234"), + } + assert := test.NewAssert(t) + assert.Run(func(assert *test.Assert) { + w, err := frontend.NewWitness(&assignment, ecc.BN254.ScalarField()) + assert.NoError(err) + ww, err := ValueOfWitness[sw_bn254.Scalar, sw_bn254.G1Affine](w) + assert.NoError(err) + _ = ww + }, "bn254") + assert.Run(func(assert *test.Assert) { + w, err := frontend.NewWitness(&assignment, ecc.BLS12_377.ScalarField()) + assert.NoError(err) + ww, err := ValueOfWitness[sw_bls12377.Scalar, sw_bls12377.G1Affine](w) + assert.NoError(err) + _ = ww + }, "bls12377") + assert.Run(func(assert *test.Assert) { + w, err := frontend.NewWitness(&assignment, ecc.BLS12_381.ScalarField()) + assert.NoError(err) + ww, err := ValueOfWitness[sw_bls12381.Scalar, sw_bls12381.G1Affine](w) + assert.NoError(err) + _ = ww + }, "bls12381") + assert.Run(func(assert *test.Assert) { + w, err := frontend.NewWitness(&assignment, ecc.BLS24_315.ScalarField()) + assert.NoError(err) + ww, err := ValueOfWitness[sw_bls24315.Scalar, sw_bls24315.G1Affine](w) + assert.NoError(err) + _ = ww + }, "bls24315") +} + +func TestValueOfProof(t *testing.T) { + assert := test.NewAssert(t) + assert.Run(func(assert *test.Assert) { + _, _, G1, G2 := bn254.Generators() + proof := groth16backend_bn254.Proof{ + Ar: G1, + Krs: G1, + Bs: G2, + } + assignment, err := ValueOfProof[sw_bn254.G1Affine, sw_bn254.G2Affine](&proof) + assert.NoError(err) + _ = assignment + }, "bn254") + assert.Run(func(assert *test.Assert) { + _, _, G1, G2 := bls12377.Generators() + proof := groth16backend_bls12377.Proof{ + Ar: G1, + Krs: G1, + Bs: G2, + } + assignment, err := ValueOfProof[sw_bls12377.G1Affine, sw_bls12377.G2Affine](&proof) + assert.NoError(err) + _ = assignment + }, "bls12377") + assert.Run(func(assert *test.Assert) { + _, _, G1, G2 := bls12381.Generators() + proof := groth16backend_bls12381.Proof{ + Ar: G1, + Krs: G1, + Bs: G2, + } + assignment, err := ValueOfProof[sw_bls12381.G1Affine, sw_bls12381.G2Affine](&proof) + assert.NoError(err) + _ = assignment + }, "bls12381") + assert.Run(func(assert *test.Assert) { + _, _, G1, G2 := bls24315.Generators() + proof := groth16backend_bls24315.Proof{ + Ar: G1, + Krs: G1, + Bs: G2, + } + assignment, err := ValueOfProof[sw_bls24315.G1Affine, sw_bls24315.G2Affine](&proof) + assert.NoError(err) + _ = assignment + }, "bls24315") +} + +func TestValueOfVerifyingKey(t *testing.T) { + assert := test.NewAssert(t) + assert.Run(func(assert *test.Assert) { + ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &WitnessCircut{}) + assert.NoError(err) + _, vk, err := groth16.Setup(ccs) + assert.NoError(err) + vvk, err := ValueOfVerifyingKey[sw_bn254.G1Affine, sw_bn254.G2Affine, sw_bn254.GTEl](vk) + assert.NoError(err) + _ = vvk + }, "bn254") + assert.Run(func(assert *test.Assert) { + ccs, err := frontend.Compile(ecc.BLS12_377.ScalarField(), r1cs.NewBuilder, &WitnessCircut{}) + assert.NoError(err) + _, vk, err := groth16.Setup(ccs) + assert.NoError(err) + vvk, err := ValueOfVerifyingKey[sw_bls12377.G1Affine, sw_bls12377.G2Affine, sw_bls12377.GT](vk) + assert.NoError(err) + _ = vvk + }, "bls12377") + assert.Run(func(assert *test.Assert) { + ccs, err := frontend.Compile(ecc.BLS12_381.ScalarField(), r1cs.NewBuilder, &WitnessCircut{}) + assert.NoError(err) + _, vk, err := groth16.Setup(ccs) + assert.NoError(err) + vvk, err := ValueOfVerifyingKey[sw_bls12381.G1Affine, sw_bls12381.G2Affine, sw_bls12381.GTEl](vk) + assert.NoError(err) + _ = vvk + }, "bls12381") + assert.Run(func(assert *test.Assert) { + ccs, err := frontend.Compile(ecc.BLS24_315.ScalarField(), r1cs.NewBuilder, &WitnessCircut{}) + assert.NoError(err) + _, vk, err := groth16.Setup(ccs) + assert.NoError(err) + vvk, err := ValueOfVerifyingKey[sw_bls24315.G1Affine, sw_bls24315.G2Affine, sw_bls24315.GT](vk) + assert.NoError(err) + _ = vvk + }, "bls24315") +} diff --git a/std/recursion/wrapped_hash.go b/std/recursion/wrapped_hash.go new file mode 100644 index 0000000000..1565608576 --- /dev/null +++ b/std/recursion/wrapped_hash.go @@ -0,0 +1,240 @@ +package recursion + +import ( + "bytes" + "fmt" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc" + cryptomimc "github.com/consensys/gnark-crypto/hash" + "github.com/consensys/gnark/frontend" + stdhash "github.com/consensys/gnark/std/hash" + "github.com/consensys/gnark/std/hash/mimc" + "github.com/consensys/gnark/std/math/bits" + "golang.org/x/exp/slices" +) + +type shortNativeHash struct { + wrapped hash.Hash + + outSize int + bitBlockSize int + + ringBuf *bytes.Buffer + buf []byte +} + +// NewShort returns a native hash function which reads elements in the current native +// field and outputs element in the target field (usually the scalar field of +// the circuit being recursed). The hash function is based on MiMC and +// partitions the excess bits to not overflow the target field. +func NewShort(current, target *big.Int) (hash.Hash, error) { + var h cryptomimc.Hash + var bitBlockSize int + switch current.String() { + case ecc.BN254.ScalarField().String(): + h = cryptomimc.MIMC_BN254 + bitBlockSize = ecc.BN254.ScalarField().BitLen() + case ecc.BLS12_381.ScalarField().String(): + h = cryptomimc.MIMC_BLS12_381 + bitBlockSize = ecc.BLS12_381.ScalarField().BitLen() + case ecc.BLS12_377.ScalarField().String(): + h = cryptomimc.MIMC_BLS12_377 + bitBlockSize = ecc.BLS12_377.ScalarField().BitLen() + case ecc.BLS12_378.ScalarField().String(): + h = cryptomimc.MIMC_BLS12_378 + bitBlockSize = ecc.BLS12_378.ScalarField().BitLen() + case ecc.BW6_761.ScalarField().String(): + h = cryptomimc.MIMC_BW6_761 + bitBlockSize = ecc.BW6_761.ScalarField().BitLen() + case ecc.BLS24_315.ScalarField().String(): + h = cryptomimc.MIMC_BLS24_315 + bitBlockSize = ecc.BLS24_315.ScalarField().BitLen() + case ecc.BLS24_317.ScalarField().String(): + h = cryptomimc.MIMC_BLS24_317 + bitBlockSize = ecc.BLS24_317.ScalarField().BitLen() + case ecc.BW6_633.ScalarField().String(): + h = cryptomimc.MIMC_BW6_633 + bitBlockSize = ecc.BW6_633.ScalarField().BitLen() + case ecc.BW6_756.ScalarField().String(): + h = cryptomimc.MIMC_BW6_756 + bitBlockSize = ecc.BW6_756.ScalarField().BitLen() + default: + return nil, fmt.Errorf("no default mimc for scalar field: %s", current.String()) + } + hh := h.New() + if target.Cmp(current) == 0 { + return hh, nil + } + nbBits := target.BitLen() + if nbBits > current.BitLen() { + nbBits = current.BitLen() + } + return newShortFromParam(hh, bitBlockSize, nbBits), nil +} + +func newShortFromParam(hf hash.Hash, bitBlockSize, outSize int) hash.Hash { + + // TODO: right now assume bitLength is the modulus bit length. We subtract within + return &shortNativeHash{ + wrapped: hf, + outSize: outSize, + bitBlockSize: bitBlockSize, + buf: make([]byte, (bitBlockSize+7)/8), + ringBuf: new(bytes.Buffer), + } +} + +func (h *shortNativeHash) Write(p []byte) (n int, err error) { + // we first write to the buffer. We want to be able to partition the inputs + // into smaller parts and buffer is good to keep track of the excess. + h.ringBuf.Write(p) // nosec: doesnt fail + for h.ringBuf.Len() >= (len(h.buf) - 1) { + // the buffer contains now enough bytes so that we can write it to the + // underlying hash. + h.ringBuf.Read(h.buf[1:]) + h.wrapped.Write(h.buf) + } + return len(p), nil +} + +func (h *shortNativeHash) Sum(b []byte) []byte { + // the cache buffer may contain still something. Write everything into the + // underlying hasher before we digest. + + // zero the buffer we use for transporting bytes from bytes.Buffer to + // underlying hash. Remember that the cache buffer may not be full. + for i := range h.buf { + h.buf[i] = 0 + } + h.ringBuf.Read(h.buf[1:]) + h.wrapped.Write(h.buf) + + // cut the hash a byte short to definitely fit + res := h.wrapped.Sum(nil) + nbBytes := (h.outSize+7)/8 - 1 + res = res[len(res)-nbBytes:] + return append(b, res...) +} + +func (h *shortNativeHash) Reset() { + h.ringBuf.Reset() + h.buf = make([]byte, (h.bitBlockSize+7)/8) + h.wrapped.Reset() +} + +func (h *shortNativeHash) Size() int { + return (int(h.outSize) + 6) / 8 +} + +func (h *shortNativeHash) BlockSize() int { + return h.wrapped.BlockSize() +} + +type shortCircuitHash struct { + api frontend.API + outSize int + wrapped stdhash.FieldHasher + buf []frontend.Variable + tmp []frontend.Variable + bitmode bool +} + +func newHashFromParameter(api frontend.API, hf stdhash.FieldHasher, bitLength int, bitmode bool) stdhash.FieldHasher { + tmp := make([]frontend.Variable, ((api.Compiler().FieldBitLen()+7)/8)*8-8) + for i := range tmp { + tmp[i] = 0 + } + return &shortCircuitHash{ + api: api, + outSize: bitLength, + wrapped: hf, + tmp: tmp, + bitmode: bitmode, + } +} + +// NewHash returns a circuit hash function which reads elements in the current +// native field and outputs element in the target field (usually the scalar +// field of the circuit being recursed). The hash function is based on MiMC and +// partitions the excess bits to not overflow the target field. +func NewHash(api frontend.API, target *big.Int, bitmode bool) (stdhash.FieldHasher, error) { + h, err := mimc.NewMiMC(api) + if err != nil { + return nil, fmt.Errorf("get mimc: %w", err) + } + if api.Compiler().Field().Cmp(target) == 0 { + return &h, nil + } + nbBits := target.BitLen() + if nbBits > api.Compiler().FieldBitLen() { + nbBits = api.Compiler().FieldBitLen() + } + return newHashFromParameter(api, &h, nbBits, bitmode), nil +} + +func (h *shortCircuitHash) Sum() frontend.Variable { + // before we compute the digest we have to write the rest of the buffer into + // the underlying hash. We know that we have maximum one variable left, as + // otherwise we would have written in the [Write] method. + + // but first, we have to zero the buffer we use for reversing. The cache + // buffer may not be full and so some bits may be set. + for i := range h.tmp { + h.tmp[i] = 0 + } + copy(h.tmp, h.buf) + slices.Reverse(h.tmp) + v := bits.FromBinary(h.api, h.tmp) + h.wrapped.Write(v) + res := h.wrapped.Sum() + resBts := bits.ToBinary(h.api, res) + res = bits.FromBinary(h.api, resBts[:((h.outSize+7)/8-1)*8]) + return res +} + +func (h *shortCircuitHash) Write(data ...frontend.Variable) { + // tricky part - bits representation is little-endian, i.e. least + // significant bit is at position zero. However, in the native version least + // significant BYTE is at the highest position. When we decompose into bits, + // then we first have to reverse the bits so that when we partition maximum + // number of full bytes out so it would correspond to the native version. + // + // But this means that later we have to reverse again when we recompose. + if h.bitmode { + h.buf = append(h.buf, data...) + } else { + for i := range data { + // h.tmp is maximum full number of bytes. This is one byte less than in + // the native version (the bits are on full number of bytes). Luckily, + // [bits.ToBinary] allows to decompose into arbitrary number of bits. + bts := bits.ToBinary(h.api, data[i], bits.WithNbDigits(len(h.tmp)+8)) + // reverse to be in sync with native version when we later slice + // len(h.tmp) bits. + slices.Reverse(bts) + // store in the buffer. At every round we try to write to the wrapped + // hash as much as possible so the buffer isn't usually very big. + h.buf = append(h.buf, bts...) + } + } + for len(h.buf) >= len(h.tmp) { + // OK, now there is sufficient number of bits we can write to hash + // function. First we take the maximum number of full bytes. + copy(h.tmp, h.buf[:len(h.tmp)]) + // and reverse it so that when recomposing is correct. + slices.Reverse(h.tmp) + v := bits.FromBinary(h.api, h.tmp) + // write to the underlying hash and empty the buffer. + h.wrapped.Write(v) + h.buf = h.buf[len(h.tmp):] + } +} + +func (h *shortCircuitHash) Reset() { + h.buf = nil + for i := range h.tmp { + h.tmp[i] = 0 + } + h.wrapped.Reset() +} diff --git a/std/recursion/wrapped_hash_test.go b/std/recursion/wrapped_hash_test.go new file mode 100644 index 0000000000..f26ec318d1 --- /dev/null +++ b/std/recursion/wrapped_hash_test.go @@ -0,0 +1,259 @@ +package recursion_test + +import ( + "crypto/rand" + "fmt" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377" + fr_bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315" + fr_bls24315 "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bn254" + fr_bn254 "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/std/algebra" + "github.com/consensys/gnark/std/algebra/emulated/sw_bn254" + "github.com/consensys/gnark/std/algebra/native/sw_bls12377" + "github.com/consensys/gnark/std/algebra/native/sw_bls24315" + "github.com/consensys/gnark/std/recursion" + "github.com/consensys/gnark/test" +) + +type shortHashCircuit struct { + Input []frontend.Variable + Output frontend.Variable + inner ecc.ID +} + +func (c *shortHashCircuit) Define(api frontend.API) error { + hasher, err := recursion.NewHash(api, c.inner.ScalarField(), false) + if err != nil { + return err + } + for i := range c.Input { + hasher.Write(c.Input[i]) + } + res := hasher.Sum() + api.AssertIsEqual(c.Output, res) + return nil +} + +func TestShortHash(t *testing.T) { + outerCurves := []ecc.ID{ + ecc.BN254, + ecc.BLS12_381, + ecc.BLS12_377, + ecc.BW6_761, + ecc.BW6_633, + ecc.BLS24_315, + ecc.BLS24_317, + } + innerCurves := []ecc.ID{ + ecc.BN254, + ecc.BLS12_381, + ecc.BLS12_377, + ecc.BW6_761, + ecc.BW6_633, + ecc.BLS24_315, + ecc.BLS24_317, + } + + assert := test.NewAssert(t) + nbInputs := 19 + for _, outer := range outerCurves { + outer := outer + for _, inner := range innerCurves { + inner := inner + assert.Run(func(assert *test.Assert) { + circuit := &shortHashCircuit{Input: make([]frontend.Variable, nbInputs), inner: inner} + h, err := recursion.NewShort(outer.ScalarField(), inner.ScalarField()) + assert.NoError(err) + witness := &shortHashCircuit{Input: make([]frontend.Variable, nbInputs), inner: inner} + buf := make([]byte, (outer.ScalarField().BitLen()+7)/8) + for i := range witness.Input { + el, err := rand.Int(rand.Reader, outer.ScalarField()) + assert.NoError(err) + el.FillBytes(buf) + h.Write(buf) + witness.Input[i] = el + } + res := h.Sum(nil) + witness.Output = res + assert.CheckCircuit(circuit, test.WithCurves(outer), test.WithValidAssignment(witness), test.NoFuzzing(), test.NoSerializationChecks(), test.NoSolidityChecks()) + }, outer.String(), inner.String()) + } + } +} + +type hashMarshalG1Circuit[S algebra.ScalarT, G1El algebra.G1ElementT] struct { + Point G1El + Expected frontend.Variable + + target *big.Int +} + +func (c *hashMarshalG1Circuit[S, G1El]) Define(api frontend.API) error { + h, err := recursion.NewHash(api, c.target, true) + if err != nil { + return fmt.Errorf("new hash: %w", err) + } + curve, err := algebra.GetCurve[S, G1El](api) + if err != nil { + return fmt.Errorf("get curve: %w", err) + } + marshlled := curve.MarshalG1(c.Point) + h.Write(marshlled...) + res := h.Sum() + api.AssertIsEqual(res, c.Expected) + return nil +} + +func TestHashMarshalG1(t *testing.T) { + assert := test.NewAssert(t) + + assert.Run(func(assert *test.Assert) { + var g bn254.G1Affine + var s fr_bn254.Element + s.SetRandom() + g.ScalarMultiplicationBase(s.BigInt(new(big.Int))) + h, err := recursion.NewShort(ecc.BN254.ScalarField(), ecc.BW6_761.ScalarField()) + assert.NoError(err) + marshalled := g.Marshal() + h.Write(marshalled) + hashed := h.Sum(nil) + circuit := &hashMarshalG1Circuit[sw_bn254.Scalar, sw_bn254.G1Affine]{ + target: ecc.BW6_761.ScalarField(), + } + assignment := &hashMarshalG1Circuit[sw_bn254.Scalar, sw_bn254.G1Affine]{ + Point: sw_bn254.NewG1Affine(g), + Expected: hashed, + target: ecc.BW6_761.ScalarField(), + } + assert.CheckCircuit(circuit, test.WithCurves(ecc.BN254), test.WithValidAssignment(assignment), test.NoFuzzing(), test.NoSerializationChecks(), test.NoSolidityChecks()) + }) + assert.Run(func(assert *test.Assert) { + var g bls12377.G1Affine + var s fr_bls12377.Element + s.SetRandom() + g.ScalarMultiplicationBase(s.BigInt(new(big.Int))) + h, err := recursion.NewShort(ecc.BW6_761.ScalarField(), ecc.BLS12_377.ScalarField()) + assert.NoError(err) + marshalled := g.Marshal() + h.Write(marshalled) + hashed := h.Sum(nil) + circuit := &hashMarshalG1Circuit[sw_bls12377.Scalar, sw_bls12377.G1Affine]{ + target: ecc.BLS12_377.ScalarField(), + } + assignment := &hashMarshalG1Circuit[sw_bls12377.Scalar, sw_bls12377.G1Affine]{ + Point: sw_bls12377.NewG1Affine(g), + Expected: hashed, + target: ecc.BLS12_377.ScalarField(), + } + assert.CheckCircuit(circuit, test.WithCurves(ecc.BW6_761), test.WithValidAssignment(assignment), test.NoFuzzing(), test.NoSerializationChecks(), test.NoSolidityChecks()) + }) + assert.Run(func(assert *test.Assert) { + var g bls24315.G1Affine + var s fr_bls24315.Element + s.SetRandom() + g.ScalarMultiplicationBase(s.BigInt(new(big.Int))) + h, err := recursion.NewShort(ecc.BW6_633.ScalarField(), ecc.BLS24_315.ScalarField()) + assert.NoError(err) + marshalled := g.Marshal() + h.Write(marshalled) + hashed := h.Sum(nil) + circuit := &hashMarshalG1Circuit[sw_bls24315.Scalar, sw_bls24315.G1Affine]{ + target: ecc.BLS12_377.ScalarField(), + } + assignment := &hashMarshalG1Circuit[sw_bls24315.Scalar, sw_bls24315.G1Affine]{ + Point: sw_bls24315.NewG1Affine(g), + Expected: hashed, + target: ecc.BLS12_377.ScalarField(), + } + assert.CheckCircuit(circuit, test.WithCurves(ecc.BW6_633), test.WithValidAssignment(assignment), test.NoFuzzing(), test.NoSerializationChecks(), test.NoSolidityChecks()) + }) +} + +type hashMarshalScalarCircuit[S algebra.ScalarT, G1El algebra.G1ElementT] struct { + Scalar S + Expected frontend.Variable + + target *big.Int +} + +func (c *hashMarshalScalarCircuit[S, G1El]) Define(api frontend.API) error { + h, err := recursion.NewHash(api, c.target, true) + if err != nil { + return fmt.Errorf("new hash: %w", err) + } + curve, err := algebra.GetCurve[S, G1El](api) + if err != nil { + return fmt.Errorf("get curve: %w", err) + } + marshlled := curve.MarshalScalar(c.Scalar) + h.Write(marshlled...) + res := h.Sum() + api.AssertIsEqual(res, c.Expected) + return nil +} + +func TestHashMarshalScalar(t *testing.T) { + assert := test.NewAssert(t) + + assert.Run(func(assert *test.Assert) { + var s fr_bn254.Element + s.SetRandom() + h, err := recursion.NewShort(ecc.BN254.ScalarField(), ecc.BW6_761.ScalarField()) + assert.NoError(err) + marshalled := s.Marshal() + h.Write(marshalled) + hashed := h.Sum(nil) + circuit := &hashMarshalScalarCircuit[sw_bn254.Scalar, sw_bn254.G1Affine]{ + target: ecc.BW6_761.ScalarField(), + } + assignment := &hashMarshalScalarCircuit[sw_bn254.Scalar, sw_bn254.G1Affine]{ + Scalar: sw_bn254.NewScalar(s), + Expected: hashed, + target: ecc.BW6_761.ScalarField(), + } + assert.CheckCircuit(circuit, test.WithCurves(ecc.BN254), test.WithValidAssignment(assignment), test.NoFuzzing(), test.NoSerializationChecks(), test.NoSolidityChecks()) + }) + assert.Run(func(assert *test.Assert) { + var s fr_bls12377.Element + s.SetRandom() + h, err := recursion.NewShort(ecc.BW6_761.ScalarField(), ecc.BLS12_377.ScalarField()) + assert.NoError(err) + marshalled := s.Marshal() + h.Write(marshalled) + hashed := h.Sum(nil) + circuit := &hashMarshalScalarCircuit[sw_bls12377.Scalar, sw_bls12377.G1Affine]{ + target: ecc.BLS12_377.ScalarField(), + } + assignment := &hashMarshalScalarCircuit[sw_bls12377.Scalar, sw_bls12377.G1Affine]{ + Scalar: s.String(), + Expected: hashed, + target: ecc.BLS12_377.ScalarField(), + } + assert.CheckCircuit(circuit, test.WithCurves(ecc.BW6_761), test.WithValidAssignment(assignment), test.NoFuzzing(), test.NoSerializationChecks(), test.NoSolidityChecks()) + }) + assert.Run(func(assert *test.Assert) { + var s fr_bls24315.Element + s.SetRandom() + h, err := recursion.NewShort(ecc.BW6_633.ScalarField(), ecc.BLS24_315.ScalarField()) + assert.NoError(err) + marshalled := s.Marshal() + h.Write(marshalled) + hashed := h.Sum(nil) + circuit := &hashMarshalScalarCircuit[sw_bls24315.Scalar, sw_bls24315.G1Affine]{ + target: ecc.BLS12_377.ScalarField(), + } + assignment := &hashMarshalScalarCircuit[sw_bls24315.Scalar, sw_bls24315.G1Affine]{ + Scalar: s.String(), + Expected: hashed, + target: ecc.BLS12_377.ScalarField(), + } + assert.CheckCircuit(circuit, test.WithCurves(ecc.BW6_633), test.WithValidAssignment(assignment), test.NoFuzzing(), test.NoSerializationChecks(), test.NoSolidityChecks()) + }) +} diff --git a/std/selector/doc_map_test.go b/std/selector/doc_map_test.go index f70c353f61..edca8aba02 100644 --- a/std/selector/doc_map_test.go +++ b/std/selector/doc_map_test.go @@ -37,43 +37,27 @@ func ExampleMap() { ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) if err != nil { panic(err) - } else { - fmt.Println("compiled") } pk, vk, err := groth16.Setup(ccs) if err != nil { panic(err) - } else { - fmt.Println("setup done") } secretWitness, err := frontend.NewWitness(&witness, ecc.BN254.ScalarField()) if err != nil { panic(err) - } else { - fmt.Println("secret witness") } publicWitness, err := secretWitness.Public() if err != nil { panic(err) - } else { - fmt.Println("public witness") } proof, err := groth16.Prove(ccs, pk, secretWitness) if err != nil { panic(err) - } else { - fmt.Println("proof") } err = groth16.Verify(proof, vk, publicWitness) if err != nil { panic(err) - } else { - fmt.Println("verify") } - // Output: compiled - // setup done - // secret witness - // public witness - // proof - // verify + fmt.Println("done") + // Output: done } diff --git a/std/selector/doc_mux_test.go b/std/selector/doc_mux_test.go index 5a7b75ba36..629c9d80b5 100644 --- a/std/selector/doc_mux_test.go +++ b/std/selector/doc_mux_test.go @@ -35,43 +35,27 @@ func ExampleMux() { ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) if err != nil { panic(err) - } else { - fmt.Println("compiled") } pk, vk, err := groth16.Setup(ccs) if err != nil { panic(err) - } else { - fmt.Println("setup done") } secretWitness, err := frontend.NewWitness(&witness, ecc.BN254.ScalarField()) if err != nil { panic(err) - } else { - fmt.Println("secret witness") } publicWitness, err := secretWitness.Public() if err != nil { panic(err) - } else { - fmt.Println("public witness") } proof, err := groth16.Prove(ccs, pk, secretWitness) if err != nil { panic(err) - } else { - fmt.Println("proof") } err = groth16.Verify(proof, vk, publicWitness) if err != nil { panic(err) - } else { - fmt.Println("verify") } - // Output: compiled - // setup done - // secret witness - // public witness - // proof - // verify + fmt.Println("done") + // Output: done } diff --git a/std/selector/doc_partition_test.go b/std/selector/doc_partition_test.go index 2a9b4ea548..2310d1a0bf 100644 --- a/std/selector/doc_partition_test.go +++ b/std/selector/doc_partition_test.go @@ -1,12 +1,11 @@ package selector_test -import "github.com/consensys/gnark/frontend" - import ( "fmt" "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/frontend" "github.com/consensys/gnark/frontend/cs/r1cs" "github.com/consensys/gnark/std/selector" ) @@ -38,43 +37,27 @@ func ExamplePartition() { ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) if err != nil { panic(err) - } else { - fmt.Println("compiled") } pk, vk, err := groth16.Setup(ccs) if err != nil { panic(err) - } else { - fmt.Println("setup done") } secretWitness, err := frontend.NewWitness(&witness, ecc.BN254.ScalarField()) if err != nil { panic(err) - } else { - fmt.Println("secret witness") } publicWitness, err := secretWitness.Public() if err != nil { panic(err) - } else { - fmt.Println("public witness") } proof, err := groth16.Prove(ccs, pk, secretWitness) if err != nil { panic(err) - } else { - fmt.Println("proof") } err = groth16.Verify(proof, vk, publicWitness) if err != nil { panic(err) - } else { - fmt.Println("verify") } - // Output: compiled - // setup done - // secret witness - // public witness - // proof - // verify + fmt.Println("done") + // Output: done } diff --git a/test/assert_checkcircuit.go b/test/assert_checkcircuit.go index f3d215adba..3ae327c886 100644 --- a/test/assert_checkcircuit.go +++ b/test/assert_checkcircuit.go @@ -125,7 +125,7 @@ func (assert *Assert) CheckCircuit(circuit frontend.Circuit, opts ...TestingOpti proof, err := concreteBackend.prove(ccs, pk, w.full, opt.proverOpts...) assert.noError(err, &w) - err = concreteBackend.verify(proof, vk, w.public) + err = concreteBackend.verify(proof, vk, w.public, opt.verifierOpts...) assert.noError(err, &w) if checkSolidity { @@ -225,7 +225,7 @@ type fnSetup func(ccs constraint.ConstraintSystem, curve ecc.ID) ( pkBuilder, vkBuilder, proofBuilder func() any, err error) type fnProve func(ccs constraint.ConstraintSystem, pk any, fullWitness witness.Witness, opts ...backend.ProverOption) (proof any, err error) -type fnVerify func(proof, vk any, publicWitness witness.Witness) error +type fnVerify func(proof, vk any, publicWitness witness.Witness, opts ...backend.VerifierOption) error // tBackend abstracts the backend implementation in the test package. type tBackend struct { @@ -246,8 +246,8 @@ var ( prove: func(ccs constraint.ConstraintSystem, pk any, fullWitness witness.Witness, opts ...backend.ProverOption) (proof any, err error) { return groth16.Prove(ccs, pk.(groth16.ProvingKey), fullWitness, opts...) }, - verify: func(proof, vk any, publicWitness witness.Witness) error { - return groth16.Verify(proof.(groth16.Proof), vk.(groth16.VerifyingKey), publicWitness) + verify: func(proof, vk any, publicWitness witness.Witness, opts ...backend.VerifierOption) error { + return groth16.Verify(proof.(groth16.Proof), vk.(groth16.VerifyingKey), publicWitness, opts...) }, } @@ -266,8 +266,8 @@ var ( prove: func(ccs constraint.ConstraintSystem, pk any, fullWitness witness.Witness, opts ...backend.ProverOption) (proof any, err error) { return plonk.Prove(ccs, pk.(plonk.ProvingKey), fullWitness, opts...) }, - verify: func(proof, vk any, publicWitness witness.Witness) error { - return plonk.Verify(proof.(plonk.Proof), vk.(plonk.VerifyingKey), publicWitness) + verify: func(proof, vk any, publicWitness witness.Witness, opts ...backend.VerifierOption) error { + return plonk.Verify(proof.(plonk.Proof), vk.(plonk.VerifyingKey), publicWitness, opts...) }, } @@ -282,8 +282,8 @@ var ( prove: func(ccs constraint.ConstraintSystem, pk any, fullWitness witness.Witness, opts ...backend.ProverOption) (proof any, err error) { return plonkfri.Prove(ccs, pk.(plonkfri.ProvingKey), fullWitness, opts...) }, - verify: func(proof, vk any, publicWitness witness.Witness) error { - return plonkfri.Verify(proof, vk.(plonkfri.VerifyingKey), publicWitness) + verify: func(proof, vk any, publicWitness witness.Witness, opts ...backend.VerifierOption) error { + return plonkfri.Verify(proof, vk.(plonkfri.VerifyingKey), publicWitness, opts...) }, } ) diff --git a/test/assert_options.go b/test/assert_options.go index 6cbd8d4341..31d0226183 100644 --- a/test/assert_options.go +++ b/test/assert_options.go @@ -16,9 +16,10 @@ type TestingOption func(*testingConfig) error type testingConfig struct { profile - solverOpts []solver.Option - proverOpts []backend.ProverOption - compileOpts []frontend.CompileOption + solverOpts []solver.Option + proverOpts []backend.ProverOption + verifierOpts []backend.VerifierOption + compileOpts []frontend.CompileOption validAssignments []frontend.Circuit invalidAssignments []frontend.Circuit @@ -166,3 +167,12 @@ func WithCompileOpts(compileOpts ...frontend.CompileOption) TestingOption { return nil } } + +// WithVerifierOpts is a testing option which uses the given verifierOpts when +// calling backend.Verify method. +func WithVerifierOpts(verifierOpts ...backend.VerifierOption) TestingOption { + return func(tc *testingConfig) error { + tc.verifierOpts = append(tc.verifierOpts, verifierOpts...) + return nil + } +} From 9dfc49e4c98071294b890084c5b463bcb9a4e57f Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Wed, 1 Nov 2023 14:08:15 +0200 Subject: [PATCH 13/58] Update prove with latest updates, tidy mod file --- backend/groth16/bn254/prove_gpu.go | 9 ++++++++- go.mod | 3 +-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go index e067c0d450..fe46f2feb3 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/prove_gpu.go @@ -90,8 +90,15 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b return err } + opt.HashToFieldFn.Write(constraint.SerializeCommitment(proof.Commitments[i].Marshal(), hashed, (fr.Bits-1)/8+1)) + hashBts := opt.HashToFieldFn.Sum(nil) + opt.HashToFieldFn.Reset() + nbBuf := fr.Bytes + if opt.HashToFieldFn.Size() < fr.Bytes { + nbBuf = opt.HashToFieldFn.Size() + } var res fr.Element - res, err = solveCommitmentWire(&proof.Commitments[i], hashed) + res.SetBytes(hashBts[:nbBuf]) res.BigInt(out[0]) return err } diff --git a/go.mod b/go.mod index be87da5f88..9d65bff128 100644 --- a/go.mod +++ b/go.mod @@ -19,8 +19,6 @@ require ( golang.org/x/sync v0.3.0 ) -require github.com/rogpeppe/go-internal v1.11.0 // indirect - require ( github.com/davecgh/go-spew v1.1.1 // indirect github.com/ingonyama-zk/icicle v0.0.0-20230928131117-97f0079e5c71 // indirect @@ -28,6 +26,7 @@ require ( github.com/mattn/go-isatty v0.0.19 // indirect github.com/mmcloughlin/addchain v0.4.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/rogpeppe/go-internal v1.11.0 // indirect github.com/x448/float16 v0.8.4 // indirect golang.org/x/sys v0.11.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect From 1544bf4de9d5ae1c0bf1b2fe7756c5e27dd807fb Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Wed, 1 Nov 2023 14:16:01 +0200 Subject: [PATCH 14/58] Use left shift instead of pow 2 --- backend/groth16/bn254/setup_gpu.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/backend/groth16/bn254/setup_gpu.go b/backend/groth16/bn254/setup_gpu.go index 7826d04ff2..7420a70174 100644 --- a/backend/groth16/bn254/setup_gpu.go +++ b/backend/groth16/bn254/setup_gpu.go @@ -387,7 +387,8 @@ func (pk *ProvingKey) setupDevicePointers() error { for i := 0; i < log2Size; i++ { denIcicleArr = append(denIcicleArr, denIcicleArr...) } - for i := 0; i < (n - int(math.Pow(2, float64(log2Size)))); i++ { + pow2Remainder := n - 1 << log2Size + for i := 0; i < pow2Remainder ; i++ { denIcicleArr = append(denIcicleArr, denI) } From 86dfc0bbf50d2c058ebe9d20b4d811c635c1c522 Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Wed, 1 Nov 2023 14:19:30 +0200 Subject: [PATCH 15/58] Use bits.Len instead of Floor(Log2) --- backend/groth16/bn254/setup_gpu.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/groth16/bn254/setup_gpu.go b/backend/groth16/bn254/setup_gpu.go index 7420a70174..b09b82701a 100644 --- a/backend/groth16/bn254/setup_gpu.go +++ b/backend/groth16/bn254/setup_gpu.go @@ -382,12 +382,12 @@ func (pk *ProvingKey) setupDevicePointers() error { denI.Exp(pk.Domain.FrMultiplicativeGen, big.NewInt(int64(pk.Domain.Cardinality))) denI.Sub(&denI, &oneI).Inverse(&denI) - log2Size := int(math.Floor(math.Log2(float64(n)))) + log2SizeFloor := bits.Len(uint(n)) - 1 denIcicleArr := []fr.Element{denI} - for i := 0; i < log2Size; i++ { + for i := 0; i < log2SizeFloor; i++ { denIcicleArr = append(denIcicleArr, denIcicleArr...) } - pow2Remainder := n - 1 << log2Size + pow2Remainder := n - 1 << log2SizeFloor for i := 0; i < pow2Remainder ; i++ { denIcicleArr = append(denIcicleArr, denI) } From 5b931f021b2bdab941c1f9ac61003a5b349de6ad Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Wed, 1 Nov 2023 14:22:28 +0200 Subject: [PATCH 16/58] Change buildtag to icicle instead of gpu --- README.md | 4 +++- backend/groth16/bn254/marshal.go | 4 ++-- backend/groth16/bn254/marshal_gpu.go | 4 ++-- backend/groth16/bn254/prove.go | 4 ++-- backend/groth16/bn254/prove_gpu.go | 4 ++-- backend/groth16/bn254/setup.go | 4 ++-- backend/groth16/bn254/setup_gpu.go | 8 ++++---- internal/generator/backend/main.go | 2 +- 8 files changed, 18 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 1bd3fb645e..54c20d345f 100644 --- a/README.md +++ b/README.md @@ -118,6 +118,8 @@ func main() { ### GPU Support +#### Icicle Library + The following schemes and curves support experimental use of Ingomyama's Icicle GPU library for low level zk-SNARK primitives such as MSM, NTT, and polynomial operations: - [x] [Groth16](https://eprint.iacr.org/2016/260) @@ -126,7 +128,7 @@ instantiated with the following curve(s) - [x] BN254 -To use GPUs, add the `gpu` buildtag to your build/run commands, e.g. `go run -tags=gpu main.go`. +To use GPUs, add the `icicle` buildtag to your build/run commands, e.g. `go run -tags=icicle main.go`. For more information about prerequisites see the [Icicle repo](https://github.com/ingonyama-zk/icicle). diff --git a/backend/groth16/bn254/marshal.go b/backend/groth16/bn254/marshal.go index 2162ad3b24..6d368052df 100644 --- a/backend/groth16/bn254/marshal.go +++ b/backend/groth16/bn254/marshal.go @@ -1,5 +1,5 @@ -//go:build !gpu -// +build !gpu +//go:build !icicle +// +build !icicle // Copyright 2020 ConsenSys Software Inc. // diff --git a/backend/groth16/bn254/marshal_gpu.go b/backend/groth16/bn254/marshal_gpu.go index 4a512a71a3..1c98d12ffd 100644 --- a/backend/groth16/bn254/marshal_gpu.go +++ b/backend/groth16/bn254/marshal_gpu.go @@ -1,5 +1,5 @@ -//go:build gpu -// +build gpu +//go:build icicle +// +build icicle // Copyright 2020 ConsenSys Software Inc. // diff --git a/backend/groth16/bn254/prove.go b/backend/groth16/bn254/prove.go index 00809bc822..1d1159a453 100644 --- a/backend/groth16/bn254/prove.go +++ b/backend/groth16/bn254/prove.go @@ -1,5 +1,5 @@ -//go:build !gpu -// +build !gpu +//go:build !icicle +// +build !icicle // Copyright 2020 ConsenSys Software Inc. // diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/prove_gpu.go index fe46f2feb3..114d963b9b 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/prove_gpu.go @@ -1,5 +1,5 @@ -//go:build gpu -// +build gpu +//go:build icicle +// +build icicle // Copyright 2020 ConsenSys Software Inc. // diff --git a/backend/groth16/bn254/setup.go b/backend/groth16/bn254/setup.go index fa1c56989c..480d849c1d 100644 --- a/backend/groth16/bn254/setup.go +++ b/backend/groth16/bn254/setup.go @@ -1,5 +1,5 @@ -//go:build !gpu -// +build !gpu +//go:build !icicle +// +build !icicle // Copyright 2020 ConsenSys Software Inc. // diff --git a/backend/groth16/bn254/setup_gpu.go b/backend/groth16/bn254/setup_gpu.go index b09b82701a..71b0fe2b9f 100644 --- a/backend/groth16/bn254/setup_gpu.go +++ b/backend/groth16/bn254/setup_gpu.go @@ -1,5 +1,5 @@ -//go:build gpu -// +build gpu +//go:build icicle +// +build icicle // Copyright 2020 ConsenSys Software Inc. // @@ -387,8 +387,8 @@ func (pk *ProvingKey) setupDevicePointers() error { for i := 0; i < log2SizeFloor; i++ { denIcicleArr = append(denIcicleArr, denIcicleArr...) } - pow2Remainder := n - 1 << log2SizeFloor - for i := 0; i < pow2Remainder ; i++ { + pow2Remainder := n - 1< Date: Thu, 2 Nov 2023 02:31:22 +0100 Subject: [PATCH 17/58] refactor: init prover option at switch --- backend/groth16/bls12-377/prove.go | 7 +------ backend/groth16/bls12-381/prove.go | 7 +------ backend/groth16/bls24-315/prove.go | 7 +------ backend/groth16/bls24-317/prove.go | 7 +------ backend/groth16/bn254/prove.go | 7 +------ backend/groth16/bw6-633/prove.go | 7 +------ backend/groth16/bw6-761/prove.go | 7 +------ backend/groth16/groth16.go | 20 +++++++++++-------- .../zkpschemes/groth16/groth16.prove.go.tmpl | 7 +------ test/commitments_test.go | 4 +++- 10 files changed, 23 insertions(+), 57 deletions(-) diff --git a/backend/groth16/bls12-377/prove.go b/backend/groth16/bls12-377/prove.go index 5dd2cf24d5..ae4b729cba 100644 --- a/backend/groth16/bls12-377/prove.go +++ b/backend/groth16/bls12-377/prove.go @@ -17,7 +17,6 @@ package groth16 import ( - "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" @@ -58,11 +57,7 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { - opt, err := backend.NewProverConfig(opts...) - if err != nil { - return nil, fmt.Errorf("new prover config: %w", err) - } +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bls12-381/prove.go b/backend/groth16/bls12-381/prove.go index 3a18d7ae1a..0a493b665f 100644 --- a/backend/groth16/bls12-381/prove.go +++ b/backend/groth16/bls12-381/prove.go @@ -17,7 +17,6 @@ package groth16 import ( - "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" @@ -58,11 +57,7 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { - opt, err := backend.NewProverConfig(opts...) - if err != nil { - return nil, fmt.Errorf("new prover config: %w", err) - } +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bls24-315/prove.go b/backend/groth16/bls24-315/prove.go index e32091db68..d7c51f38f1 100644 --- a/backend/groth16/bls24-315/prove.go +++ b/backend/groth16/bls24-315/prove.go @@ -17,7 +17,6 @@ package groth16 import ( - "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" @@ -58,11 +57,7 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { - opt, err := backend.NewProverConfig(opts...) - if err != nil { - return nil, fmt.Errorf("new prover config: %w", err) - } +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bls24-317/prove.go b/backend/groth16/bls24-317/prove.go index 6eb04ef675..f137cf0694 100644 --- a/backend/groth16/bls24-317/prove.go +++ b/backend/groth16/bls24-317/prove.go @@ -17,7 +17,6 @@ package groth16 import ( - "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" @@ -58,11 +57,7 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { - opt, err := backend.NewProverConfig(opts...) - if err != nil { - return nil, fmt.Errorf("new prover config: %w", err) - } +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bn254/prove.go b/backend/groth16/bn254/prove.go index 1d1159a453..c43c127600 100644 --- a/backend/groth16/bn254/prove.go +++ b/backend/groth16/bn254/prove.go @@ -20,7 +20,6 @@ package groth16 import ( - "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" @@ -61,11 +60,7 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { - opt, err := backend.NewProverConfig(opts...) - if err != nil { - return nil, fmt.Errorf("new prover config: %w", err) - } +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bw6-633/prove.go b/backend/groth16/bw6-633/prove.go index 6cac809b96..a89b77bb32 100644 --- a/backend/groth16/bw6-633/prove.go +++ b/backend/groth16/bw6-633/prove.go @@ -17,7 +17,6 @@ package groth16 import ( - "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" @@ -58,11 +57,7 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { - opt, err := backend.NewProverConfig(opts...) - if err != nil { - return nil, fmt.Errorf("new prover config: %w", err) - } +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bw6-761/prove.go b/backend/groth16/bw6-761/prove.go index 78e149be9f..c4e645c79d 100644 --- a/backend/groth16/bw6-761/prove.go +++ b/backend/groth16/bw6-761/prove.go @@ -17,7 +17,6 @@ package groth16 import ( - "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" @@ -58,11 +57,7 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { - opt, err := backend.NewProverConfig(opts...) - if err != nil { - return nil, fmt.Errorf("new prover config: %w", err) - } +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/groth16.go b/backend/groth16/groth16.go index 823e7c3f4b..d2f3856b0a 100644 --- a/backend/groth16/groth16.go +++ b/backend/groth16/groth16.go @@ -20,6 +20,7 @@ package groth16 import ( + "fmt" "io" "github.com/consensys/gnark-crypto/ecc" @@ -167,28 +168,31 @@ func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness, opts .. // will produce an invalid proof // internally, the solution vector to the R1CS will be filled with random values which may impact benchmarking func Prove(r1cs constraint.ConstraintSystem, pk ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (Proof, error) { - + opt, err := backend.NewProverConfig(opts...) + if err != nil { + return nil, fmt.Errorf("new prover config: %w", err) + } switch _r1cs := r1cs.(type) { case *cs_bls12377.R1CS: - return groth16_bls12377.Prove(_r1cs, pk.(*groth16_bls12377.ProvingKey), fullWitness, opts...) + return groth16_bls12377.Prove(_r1cs, pk.(*groth16_bls12377.ProvingKey), fullWitness, opt) case *cs_bls12381.R1CS: - return groth16_bls12381.Prove(_r1cs, pk.(*groth16_bls12381.ProvingKey), fullWitness, opts...) + return groth16_bls12381.Prove(_r1cs, pk.(*groth16_bls12381.ProvingKey), fullWitness, opt) case *cs_bn254.R1CS: - return groth16_bn254.Prove(_r1cs, pk.(*groth16_bn254.ProvingKey), fullWitness, opts...) + return groth16_bn254.Prove(_r1cs, pk.(*groth16_bn254.ProvingKey), fullWitness, opt) case *cs_bw6761.R1CS: - return groth16_bw6761.Prove(_r1cs, pk.(*groth16_bw6761.ProvingKey), fullWitness, opts...) + return groth16_bw6761.Prove(_r1cs, pk.(*groth16_bw6761.ProvingKey), fullWitness, opt) case *cs_bls24317.R1CS: - return groth16_bls24317.Prove(_r1cs, pk.(*groth16_bls24317.ProvingKey), fullWitness, opts...) + return groth16_bls24317.Prove(_r1cs, pk.(*groth16_bls24317.ProvingKey), fullWitness, opt) case *cs_bls24315.R1CS: - return groth16_bls24315.Prove(_r1cs, pk.(*groth16_bls24315.ProvingKey), fullWitness, opts...) + return groth16_bls24315.Prove(_r1cs, pk.(*groth16_bls24315.ProvingKey), fullWitness, opt) case *cs_bw6633.R1CS: - return groth16_bw6633.Prove(_r1cs, pk.(*groth16_bw6633.ProvingKey), fullWitness, opts...) + return groth16_bw6633.Prove(_r1cs, pk.(*groth16_bw6633.ProvingKey), fullWitness, opt) default: panic("unrecognized R1CS curve type") diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl index fe4b141afd..76155377a6 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl @@ -1,5 +1,4 @@ import ( - "fmt" "runtime" "math/big" "time" @@ -42,11 +41,7 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { - opt, err := backend.NewProverConfig(opts...) - if err != nil { - return nil, fmt.Errorf("new prover config: %w", err) - } +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/test/commitments_test.go b/test/commitments_test.go index 504d95dfa0..09bdec5d18 100644 --- a/test/commitments_test.go +++ b/test/commitments_test.go @@ -229,7 +229,9 @@ func TestCommitmentDummySetup(t *testing.T) { w, err = frontend.NewWitness(assignment, ecc.BN254.ScalarField()) require.NoError(t, err) - _, err = groth16.Prove(_r1cs, &pk, w) + opt, err := backend.NewProverConfig() + require.NoError(t, err) + _, err = groth16.Prove(_r1cs, &pk, w, opt) require.NoError(t, err) } } From edbf98df9cd7170c4b5b2e8daf2b47b8aed649ff Mon Sep 17 00:00:00 2001 From: Ivo Kubjas Date: Thu, 2 Nov 2023 02:32:19 +0100 Subject: [PATCH 18/58] refactor: change logging description --- backend/groth16/bls12-377/prove.go | 2 +- backend/groth16/bls12-381/prove.go | 2 +- backend/groth16/bls24-315/prove.go | 2 +- backend/groth16/bls24-317/prove.go | 2 +- backend/groth16/bn254/prove.go | 2 +- backend/groth16/bw6-633/prove.go | 2 +- backend/groth16/bw6-761/prove.go | 2 +- .../backend/template/zkpschemes/groth16/groth16.prove.go.tmpl | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/backend/groth16/bls12-377/prove.go b/backend/groth16/bls12-377/prove.go index ae4b729cba..79e3efa980 100644 --- a/backend/groth16/bls12-377/prove.go +++ b/backend/groth16/bls12-377/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backe opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("acceleration", "none").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bls12-381/prove.go b/backend/groth16/bls12-381/prove.go index 0a493b665f..22780e6996 100644 --- a/backend/groth16/bls12-381/prove.go +++ b/backend/groth16/bls12-381/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backe opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("acceleration", "none").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bls24-315/prove.go b/backend/groth16/bls24-315/prove.go index d7c51f38f1..eba19f3481 100644 --- a/backend/groth16/bls24-315/prove.go +++ b/backend/groth16/bls24-315/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backe opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("acceleration", "none").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bls24-317/prove.go b/backend/groth16/bls24-317/prove.go index f137cf0694..0a6db7cfab 100644 --- a/backend/groth16/bls24-317/prove.go +++ b/backend/groth16/bls24-317/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backe opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("acceleration", "none").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bn254/prove.go b/backend/groth16/bn254/prove.go index c43c127600..eb713c32d9 100644 --- a/backend/groth16/bn254/prove.go +++ b/backend/groth16/bn254/prove.go @@ -65,7 +65,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backe opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("acceleration", "none").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bw6-633/prove.go b/backend/groth16/bw6-633/prove.go index a89b77bb32..96917d9725 100644 --- a/backend/groth16/bw6-633/prove.go +++ b/backend/groth16/bw6-633/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backe opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("acceleration", "none").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/backend/groth16/bw6-761/prove.go b/backend/groth16/bw6-761/prove.go index c4e645c79d..425e0149c0 100644 --- a/backend/groth16/bw6-761/prove.go +++ b/backend/groth16/bw6-761/prove.go @@ -62,7 +62,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backe opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("acceleration", "none").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl index 76155377a6..31546b8203 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl @@ -46,7 +46,7 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backe opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } - log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("hardware", "CPU").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() + log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("acceleration", "none").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) From 8a226e7c268167837304cc71a752e227d6a10650 Mon Sep 17 00:00:00 2001 From: Ivo Kubjas Date: Thu, 2 Nov 2023 02:32:59 +0100 Subject: [PATCH 19/58] feat: implement ICICLE prover option --- backend/backend.go | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/backend/backend.go b/backend/backend.go index aab30f2df1..50f0ab15e5 100644 --- a/backend/backend.go +++ b/backend/backend.go @@ -62,6 +62,7 @@ type ProverConfig struct { HashToFieldFn hash.Hash ChallengeHash hash.Hash KZGFoldingHash hash.Hash + Accelerator string } // NewProverConfig returns a default ProverConfig with given prover options opts @@ -122,6 +123,19 @@ func WithProverKZGFoldingHashFunction(hFunc hash.Hash) ProverOption { } } +// WithIcicleAcceleration requests to use [ICICLE] GPU proving backend for the +// prover. This option requires that the program is compiled with `icicle` build +// tag and the ICICLE dependencies are properly installed. See [ICICLE] for +// installation description. +// +// [ICICLE]: https://github.com/ingonyama-zk/icicle +func WithIcicleAcceleration() ProverOption { + return func(pc *ProverConfig) error { + pc.Accelerator = "icicle" + return nil + } +} + // VerifierOption defines option for altering the behavior of the verifier. See // the descriptions of functions returning instances of this type for // implemented options. From eacf521d72b678912dedb6a165007d94232245eb Mon Sep 17 00:00:00 2001 From: Ivo Kubjas Date: Thu, 2 Nov 2023 02:37:37 +0100 Subject: [PATCH 20/58] refactor: move icicle backend to separate package --- backend/groth16/bn254/icicle/doc.go | 2 + .../bn254/{prove_gpu.go => icicle/icicle.go} | 158 +++- backend/groth16/bn254/icicle/noicicle.go | 18 + backend/groth16/bn254/icicle/provingkey.go | 37 + backend/groth16/bn254/marshal_gpu.go | 381 -------- backend/groth16/bn254/setup_gpu.go | 811 ------------------ 6 files changed, 171 insertions(+), 1236 deletions(-) create mode 100644 backend/groth16/bn254/icicle/doc.go rename backend/groth16/bn254/{prove_gpu.go => icicle/icicle.go} (70%) create mode 100644 backend/groth16/bn254/icicle/noicicle.go create mode 100644 backend/groth16/bn254/icicle/provingkey.go delete mode 100644 backend/groth16/bn254/marshal_gpu.go delete mode 100644 backend/groth16/bn254/setup_gpu.go diff --git a/backend/groth16/bn254/icicle/doc.go b/backend/groth16/bn254/icicle/doc.go new file mode 100644 index 0000000000..a77a7fbde9 --- /dev/null +++ b/backend/groth16/bn254/icicle/doc.go @@ -0,0 +1,2 @@ +// Package icicle_bn254 implements ICICLE acceleration for BN254 Groth16 backend. +package icicle_bn254 diff --git a/backend/groth16/bn254/prove_gpu.go b/backend/groth16/bn254/icicle/icicle.go similarity index 70% rename from backend/groth16/bn254/prove_gpu.go rename to backend/groth16/bn254/icicle/icicle.go index 114d963b9b..a0f98b6462 100644 --- a/backend/groth16/bn254/prove_gpu.go +++ b/backend/groth16/bn254/icicle/icicle.go @@ -1,76 +1,146 @@ //go:build icicle -// +build icicle - -// Copyright 2020 ConsenSys Software Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by gnark DO NOT EDIT - -package groth16 + +package icicle_bn254 import ( + "fmt" "math/big" + "math/bits" "time" "unsafe" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fp" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" "github.com/consensys/gnark/backend" + groth16_bn254 "github.com/consensys/gnark/backend/groth16/bn254" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/backend/witness" "github.com/consensys/gnark/constraint" - "github.com/consensys/gnark/constraint/bn254" + cs "github.com/consensys/gnark/constraint/bn254" "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/internal/utils" "github.com/consensys/gnark/logger" iciclegnark "github.com/ingonyama-zk/iciclegnark/curves/bn254" ) -// Proof represents a Groth16 proof that was encoded with a ProvingKey and can be verified -// with a valid statement and a VerifyingKey -// Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf -type Proof struct { - Ar, Krs curve.G1Affine - Bs curve.G2Affine - Commitments []curve.G1Affine // Pedersen commitments a la https://eprint.iacr.org/2022/1072 - CommitmentPok curve.G1Affine // Batched proof of knowledge of the above commitments -} +const HasIcicle = true -// isValid ensures proof elements are in the correct subgroup -func (proof *Proof) isValid() bool { - return proof.Ar.IsInSubGroup() && proof.Krs.IsInSubGroup() && proof.Bs.IsInSubGroup() -} +func (pk *ProvingKey) setupDevicePointers() error { + if pk.deviceInfo != nil { + return nil + } + pk.deviceInfo = &deviceInfo{} + n := int(pk.Domain.Cardinality) + sizeBytes := n * fr.Bytes + + /************************* Start Domain Device Setup ***************************/ + copyCosetInvDone := make(chan unsafe.Pointer, 1) + copyCosetDone := make(chan unsafe.Pointer, 1) + copyDenDone := make(chan unsafe.Pointer, 1) + /************************* CosetTableInv ***************************/ + go iciclegnark.CopyToDevice(pk.Domain.CosetTableInv, sizeBytes, copyCosetInvDone) + + /************************* CosetTable ***************************/ + go iciclegnark.CopyToDevice(pk.Domain.CosetTable, sizeBytes, copyCosetDone) + + /************************* Den ***************************/ + var denI, oneI fr.Element + oneI.SetOne() + denI.Exp(pk.Domain.FrMultiplicativeGen, big.NewInt(int64(pk.Domain.Cardinality))) + denI.Sub(&denI, &oneI).Inverse(&denI) + + log2SizeFloor := bits.Len(uint(n)) - 1 + denIcicleArr := []fr.Element{denI} + for i := 0; i < log2SizeFloor; i++ { + denIcicleArr = append(denIcicleArr, denIcicleArr...) + } + pow2Remainder := n - 1< the number of gates is len(GateOrdering)+len(PureStructuralConstraints)+len(InpureStructuralConstraints) - - loop through the ordered computational constraints (=gate in r1cs system structure), eValuate A(X), B(X), C(X) with simple formula (the gate number is the current iterator) - - loop through the inpure structural constraints, eValuate A(X), B(X), C(X) with simple formula, the gate number is len(gateOrdering)+ current iterator - - loop through the pure structural constraints, eValuate A(X), B(X), C(X) with simple formula, the gate number is len(gateOrdering)+len(InpureStructuralConstraints)+current iterator - */ - - // get R1CS nb constraints, wires and public/private inputs - nbWires := r1cs.NbInternalVariables + r1cs.GetNbPublicVariables() + r1cs.GetNbSecretVariables() - - commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - commitmentWires := commitmentInfo.CommitmentIndexes() - privateCommitted := commitmentInfo.GetPrivateCommitted() - nbPrivateCommittedWires := internal.NbElements(privateCommitted) - - // a commitment is itself defined by a hint so the prover considers it private - // but the verifier will need to inject the value itself so on the groth16 - // level it must be considered public - nbPublicWires := r1cs.GetNbPublicVariables() + len(commitmentInfo) - nbPrivateWires := r1cs.GetNbSecretVariables() + r1cs.NbInternalVariables - nbPrivateCommittedWires - len(commitmentInfo) - - // Setting group for fft - domain := fft.NewDomain(uint64(r1cs.GetNbConstraints())) - - // samples toxic waste - toxicWaste, err := sampleToxicWaste() - if err != nil { - return err - } - - // Setup coeffs to compute pk.G1.A, pk.G1.B, pk.G1.K - A, B, C := setupABC(r1cs, domain, toxicWaste) - - // To fill in the Proving and Verifying keys, we need to perform a lot of ecc scalar multiplication (with generator) - // and convert the resulting points to affine - // this is done using the curve.BatchScalarMultiplicationGX API, which takes as input the base point - // (in our case the generator) and the list of scalars, and outputs a list of points (len(points) == len(scalars)) - // to use this batch call, we need to order our scalars in the same slice - // we have 1 batch call for G1 and 1 batch call for G1 - // scalars are fr.Element in non montgomery form - _, _, g1, g2 := curve.Generators() - - // --------------------------------------------------------------------------------------------- - // G1 scalars - - // the G1 scalars are ordered (arbitrary) as follows: - // - // [[α], [β], [δ], [A(i)], [B(i)], [pk.K(i)], [Z(i)], [vk.K(i)]] - // len(A) == len(B) == nbWires - // len(pk.K) == nbPrivateWires - // len(vk.K) == nbPublicWires - // len(Z) == domain.Cardinality - - // compute scalars for pkK, vkK and ckK - pkK := make([]fr.Element, nbPrivateWires) - vkK := make([]fr.Element, nbPublicWires) - ckK := make([][]fr.Element, len(commitmentInfo)) - for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) - } - - var t0, t1 fr.Element - - computeK := func(i int, coeff *fr.Element) { // TODO: Inline again - t1.Mul(&A[i], &toxicWaste.beta) - t0.Mul(&B[i], &toxicWaste.alpha) - t1.Add(&t1, &t0). - Add(&t1, &C[i]). - Mul(&t1, coeff) - } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] - nbCommitmentsSeen := 0 - - for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { - if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { - isCommitment = true - nbCommitmentsSeen++ - } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } - } - - if isPublic || commitment != -1 || isCommitment { - computeK(i, &toxicWaste.gammaInv) - - if isPublic || isCommitment { - vkK[vI] = t1 - vI++ - } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ - nbPrivateCommittedSeen++ - } - } else { - computeK(i, &toxicWaste.deltaInv) - pkK[i-vI-nbPrivateCommittedSeen] = t1 // vI = nbPublicSeen + nbCommitmentsSeen - } - } - - // Z part of the proving key (scalars) - Z := make([]fr.Element, domain.Cardinality) - one := fr.One() - var zdt fr.Element - - zdt.Exp(toxicWaste.t, new(big.Int).SetUint64(domain.Cardinality)). - Sub(&zdt, &one). - Mul(&zdt, &toxicWaste.deltaInv) // sets Zdt to Zdt/delta - - for i := 0; i < int(domain.Cardinality); i++ { - Z[i] = zdt - zdt.Mul(&zdt, &toxicWaste.t) - } - - // mark points at infinity and filter them - pk.InfinityA = make([]bool, len(A)) - pk.InfinityB = make([]bool, len(B)) - - n := 0 - for i, e := range A { - if e.IsZero() { - pk.InfinityA[i] = true - continue - } - A[n] = A[i] - n++ - } - A = A[:n] - pk.NbInfinityA = uint64(nbWires - n) - n = 0 - for i, e := range B { - if e.IsZero() { - pk.InfinityB[i] = true - continue - } - B[n] = B[i] - n++ - } - B = B[:n] - pk.NbInfinityB = uint64(nbWires - n) - - // compute our batch scalar multiplication with g1 elements - g1Scalars := make([]fr.Element, 0, (nbWires*3)+int(domain.Cardinality)+3) - g1Scalars = append(g1Scalars, toxicWaste.alpha, toxicWaste.beta, toxicWaste.delta) - g1Scalars = append(g1Scalars, A...) - g1Scalars = append(g1Scalars, B...) - g1Scalars = append(g1Scalars, Z...) - g1Scalars = append(g1Scalars, vkK...) - g1Scalars = append(g1Scalars, pkK...) - for i := range ckK { - g1Scalars = append(g1Scalars, ckK[i]...) - } - - g1PointsAff := curve.BatchScalarMultiplicationG1(&g1, g1Scalars) - - // sets pk: [α]₁, [β]₁, [δ]₁ - pk.G1.Alpha = g1PointsAff[0] - pk.G1.Beta = g1PointsAff[1] - pk.G1.Delta = g1PointsAff[2] - - offset := 3 - pk.G1.A = g1PointsAff[offset : offset+len(A)] - offset += len(A) - - pk.G1.B = g1PointsAff[offset : offset+len(B)] - offset += len(B) - - bitReverse(g1PointsAff[offset : offset+int(domain.Cardinality)]) - sizeZ := int(domain.Cardinality) - 1 // deg(H)=deg(A*B-C/X^n-1)=(n-1)+(n-1)-n=n-2 - pk.G1.Z = g1PointsAff[offset : offset+sizeZ] - - offset += int(domain.Cardinality) - - vk.G1.K = g1PointsAff[offset : offset+nbPublicWires] - offset += nbPublicWires - - pk.G1.K = g1PointsAff[offset : offset+nbPrivateWires] - offset += nbPrivateWires - - // --------------------------------------------------------------------------------------------- - // Commitment setup - - commitmentBases := make([][]curve.G1Affine, len(commitmentInfo)) - for i := range commitmentBases { - size := len(ckK[i]) - commitmentBases[i] = g1PointsAff[offset : offset+size] - offset += size - } - if offset != len(g1PointsAff) { - return errors.New("didn't consume all G1 points") // TODO @Tabaie Remove this - } - - pk.CommitmentKeys, vk.CommitmentKey, err = pedersen.Setup(commitmentBases...) - if err != nil { - return err - } - - vk.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentWires, r1cs.GetNbPublicVariables()) - - // --------------------------------------------------------------------------------------------- - // G2 scalars - - // the G2 scalars are ordered as follow: - // - // [[B(i)], [β], [δ], [γ]] - // len(B) == nbWires - - // compute our batch scalar multiplication with g2 elements - g2Scalars := append(B, toxicWaste.beta, toxicWaste.delta, toxicWaste.gamma) - - g2PointsAff := curve.BatchScalarMultiplicationG2(&g2, g2Scalars) - - pk.G2.B = g2PointsAff[:len(B)] - - // sets pk: [β]₂, [δ]₂ - pk.G2.Beta = g2PointsAff[len(B)+0] - pk.G2.Delta = g2PointsAff[len(B)+1] - - // sets vk: [δ]₂, [γ]₂ - vk.G2.Delta = g2PointsAff[len(B)+1] - vk.G2.Gamma = g2PointsAff[len(B)+2] - - // --------------------------------------------------------------------------------------------- - // Pairing: vk.e - vk.G1.Alpha = pk.G1.Alpha - vk.G2.Beta = pk.G2.Beta - - // unused, here for compatibility purposes - vk.G1.Beta = pk.G1.Beta - vk.G1.Delta = pk.G1.Delta - - if err := vk.Precompute(); err != nil { - return err - } - - // set domain - pk.Domain = *domain - - // Move static values (points, domain, hpoly denom) to the device/GPU - err = pk.setupDevicePointers() - - return nil -} - -func (pk *ProvingKey) setupDevicePointers() error { - n := int(pk.Domain.Cardinality) - sizeBytes := n * fr.Bytes - - /************************* Start Domain Device Setup ***************************/ - copyCosetInvDone := make(chan unsafe.Pointer, 1) - copyCosetDone := make(chan unsafe.Pointer, 1) - copyDenDone := make(chan unsafe.Pointer, 1) - /************************* CosetTableInv ***************************/ - go iciclegnark.CopyToDevice(pk.Domain.CosetTableInv, sizeBytes, copyCosetInvDone) - - /************************* CosetTable ***************************/ - go iciclegnark.CopyToDevice(pk.Domain.CosetTable, sizeBytes, copyCosetDone) - - /************************* Den ***************************/ - var denI, oneI fr.Element - oneI.SetOne() - denI.Exp(pk.Domain.FrMultiplicativeGen, big.NewInt(int64(pk.Domain.Cardinality))) - denI.Sub(&denI, &oneI).Inverse(&denI) - - log2SizeFloor := bits.Len(uint(n)) - 1 - denIcicleArr := []fr.Element{denI} - for i := 0; i < log2SizeFloor; i++ { - denIcicleArr = append(denIcicleArr, denIcicleArr...) - } - pow2Remainder := n - 1<> nn - if irev > i { - a[i], a[irev] = a[irev], a[i] - } - } -} From 2b82265f35f77a74d6fb5b107a111b01ffd6bfb5 Mon Sep 17 00:00:00 2001 From: Ivo Kubjas Date: Thu, 2 Nov 2023 09:53:40 +0100 Subject: [PATCH 21/58] feat: use icicle backend when has tag --- backend/groth16/groth16.go | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/backend/groth16/groth16.go b/backend/groth16/groth16.go index d2f3856b0a..f6b8791a10 100644 --- a/backend/groth16/groth16.go +++ b/backend/groth16/groth16.go @@ -50,6 +50,7 @@ import ( groth16_bls24315 "github.com/consensys/gnark/backend/groth16/bls24-315" groth16_bls24317 "github.com/consensys/gnark/backend/groth16/bls24-317" groth16_bn254 "github.com/consensys/gnark/backend/groth16/bn254" + icicle_bn254 "github.com/consensys/gnark/backend/groth16/bn254/icicle" groth16_bw6633 "github.com/consensys/gnark/backend/groth16/bw6-633" groth16_bw6761 "github.com/consensys/gnark/backend/groth16/bw6-761" ) @@ -180,6 +181,9 @@ func Prove(r1cs constraint.ConstraintSystem, pk ProvingKey, fullWitness witness. return groth16_bls12381.Prove(_r1cs, pk.(*groth16_bls12381.ProvingKey), fullWitness, opt) case *cs_bn254.R1CS: + if icicle_bn254.HasIcicle { + return icicle_bn254.Prove(_r1cs, pk.(*icicle_bn254.ProvingKey), fullWitness, opt) + } return groth16_bn254.Prove(_r1cs, pk.(*groth16_bn254.ProvingKey), fullWitness, opt) case *cs_bw6761.R1CS: @@ -225,8 +229,15 @@ func Setup(r1cs constraint.ConstraintSystem) (ProvingKey, VerifyingKey, error) { } return &pk, &vk, nil case *cs_bn254.R1CS: - var pk groth16_bn254.ProvingKey var vk groth16_bn254.VerifyingKey + if icicle_bn254.HasIcicle { + var pk icicle_bn254.ProvingKey + if err := icicle_bn254.Setup(_r1cs, &pk, &vk); err != nil { + return nil, nil, err + } + return &pk, &vk, nil + } + var pk groth16_bn254.ProvingKey if err := groth16_bn254.Setup(_r1cs, &pk, &vk); err != nil { return nil, nil, err } @@ -281,6 +292,13 @@ func DummySetup(r1cs constraint.ConstraintSystem) (ProvingKey, error) { } return &pk, nil case *cs_bn254.R1CS: + if icicle_bn254.HasIcicle { + var pk icicle_bn254.ProvingKey + if err := icicle_bn254.DummySetup(_r1cs, &pk); err != nil { + return nil, err + } + return &pk, nil + } var pk groth16_bn254.ProvingKey if err := groth16_bn254.DummySetup(_r1cs, &pk); err != nil { return nil, err @@ -322,6 +340,9 @@ func NewProvingKey(curveID ecc.ID) ProvingKey { switch curveID { case ecc.BN254: pk = &groth16_bn254.ProvingKey{} + if icicle_bn254.HasIcicle { + pk = &icicle_bn254.ProvingKey{} + } case ecc.BLS12_377: pk = &groth16_bls12377.ProvingKey{} case ecc.BLS12_381: From 19b5866828d10473f9435296ce5982d22fa52dc1 Mon Sep 17 00:00:00 2001 From: Ivo Kubjas Date: Thu, 2 Nov 2023 09:54:44 +0100 Subject: [PATCH 22/58] chore: revert unified option generation --- backend/groth16/bls12-377/prove.go | 7 ++++++- backend/groth16/bls12-381/prove.go | 7 ++++++- backend/groth16/bls24-315/prove.go | 7 ++++++- backend/groth16/bls24-317/prove.go | 7 ++++++- backend/groth16/bn254/icicle/icicle.go | 10 ++++++++- backend/groth16/bn254/icicle/noicicle.go | 2 +- backend/groth16/bn254/prove.go | 7 ++++++- backend/groth16/bw6-633/prove.go | 7 ++++++- backend/groth16/bw6-761/prove.go | 7 ++++++- backend/groth16/groth16.go | 21 +++++++------------ .../zkpschemes/groth16/groth16.prove.go.tmpl | 7 ++++++- test/commitments_test.go | 4 +--- 12 files changed, 67 insertions(+), 26 deletions(-) diff --git a/backend/groth16/bls12-377/prove.go b/backend/groth16/bls12-377/prove.go index 79e3efa980..d1ee7d1103 100644 --- a/backend/groth16/bls12-377/prove.go +++ b/backend/groth16/bls12-377/prove.go @@ -17,6 +17,7 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" @@ -57,7 +58,11 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { + opt, err := backend.NewProverConfig(opts...) + if err != nil { + return nil, fmt.Errorf("new prover config: %w", err) + } if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bls12-381/prove.go b/backend/groth16/bls12-381/prove.go index 22780e6996..c96394908d 100644 --- a/backend/groth16/bls12-381/prove.go +++ b/backend/groth16/bls12-381/prove.go @@ -17,6 +17,7 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" @@ -57,7 +58,11 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { + opt, err := backend.NewProverConfig(opts...) + if err != nil { + return nil, fmt.Errorf("new prover config: %w", err) + } if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bls24-315/prove.go b/backend/groth16/bls24-315/prove.go index eba19f3481..88b8218711 100644 --- a/backend/groth16/bls24-315/prove.go +++ b/backend/groth16/bls24-315/prove.go @@ -17,6 +17,7 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" @@ -57,7 +58,11 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { + opt, err := backend.NewProverConfig(opts...) + if err != nil { + return nil, fmt.Errorf("new prover config: %w", err) + } if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bls24-317/prove.go b/backend/groth16/bls24-317/prove.go index 0a6db7cfab..de4230d50c 100644 --- a/backend/groth16/bls24-317/prove.go +++ b/backend/groth16/bls24-317/prove.go @@ -17,6 +17,7 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" @@ -57,7 +58,11 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { + opt, err := backend.NewProverConfig(opts...) + if err != nil { + return nil, fmt.Errorf("new prover config: %w", err) + } if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bn254/icicle/icicle.go b/backend/groth16/bn254/icicle/icicle.go index a0f98b6462..f118d3fa59 100644 --- a/backend/groth16/bn254/icicle/icicle.go +++ b/backend/groth16/bn254/icicle/icicle.go @@ -12,6 +12,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fp" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" "github.com/consensys/gnark/backend" groth16_bn254 "github.com/consensys/gnark/backend/groth16/bn254" @@ -129,7 +130,14 @@ func (pk *ProvingKey) setupDevicePointers() error { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*groth16_bn254.Proof, error) { +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*groth16_bn254.Proof, error) { + opt, err := backend.NewProverConfig(opts...) + if err != nil { + return nil, fmt.Errorf("new prover config: %w", err) + } + if opt.HashToFieldFn == nil { + opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) + } log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("acceleration", "icicle").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() if pk.deviceInfo == nil { log.Debug().Msg("precomputing proving key in GPU") diff --git a/backend/groth16/bn254/icicle/noicicle.go b/backend/groth16/bn254/icicle/noicicle.go index c0eab26cb0..87703339ce 100644 --- a/backend/groth16/bn254/icicle/noicicle.go +++ b/backend/groth16/bn254/icicle/noicicle.go @@ -13,6 +13,6 @@ import ( const HasIcicle = false -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*groth16_bn254.Proof, error) { +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*groth16_bn254.Proof, error) { return nil, fmt.Errorf("icicle backend requested but program compiled without 'icicle' build tag") } diff --git a/backend/groth16/bn254/prove.go b/backend/groth16/bn254/prove.go index eb713c32d9..a0e689088c 100644 --- a/backend/groth16/bn254/prove.go +++ b/backend/groth16/bn254/prove.go @@ -20,6 +20,7 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" @@ -60,7 +61,11 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { + opt, err := backend.NewProverConfig(opts...) + if err != nil { + return nil, fmt.Errorf("new prover config: %w", err) + } if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bw6-633/prove.go b/backend/groth16/bw6-633/prove.go index 96917d9725..6d0ee420f0 100644 --- a/backend/groth16/bw6-633/prove.go +++ b/backend/groth16/bw6-633/prove.go @@ -17,6 +17,7 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" @@ -57,7 +58,11 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { + opt, err := backend.NewProverConfig(opts...) + if err != nil { + return nil, fmt.Errorf("new prover config: %w", err) + } if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/bw6-761/prove.go b/backend/groth16/bw6-761/prove.go index 425e0149c0..203d1be898 100644 --- a/backend/groth16/bw6-761/prove.go +++ b/backend/groth16/bw6-761/prove.go @@ -17,6 +17,7 @@ package groth16 import ( + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" @@ -57,7 +58,11 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { + opt, err := backend.NewProverConfig(opts...) + if err != nil { + return nil, fmt.Errorf("new prover config: %w", err) + } if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/backend/groth16/groth16.go b/backend/groth16/groth16.go index f6b8791a10..25b60cebca 100644 --- a/backend/groth16/groth16.go +++ b/backend/groth16/groth16.go @@ -20,7 +20,6 @@ package groth16 import ( - "fmt" "io" "github.com/consensys/gnark-crypto/ecc" @@ -169,34 +168,30 @@ func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness, opts .. // will produce an invalid proof // internally, the solution vector to the R1CS will be filled with random values which may impact benchmarking func Prove(r1cs constraint.ConstraintSystem, pk ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (Proof, error) { - opt, err := backend.NewProverConfig(opts...) - if err != nil { - return nil, fmt.Errorf("new prover config: %w", err) - } switch _r1cs := r1cs.(type) { case *cs_bls12377.R1CS: - return groth16_bls12377.Prove(_r1cs, pk.(*groth16_bls12377.ProvingKey), fullWitness, opt) + return groth16_bls12377.Prove(_r1cs, pk.(*groth16_bls12377.ProvingKey), fullWitness, opts...) case *cs_bls12381.R1CS: - return groth16_bls12381.Prove(_r1cs, pk.(*groth16_bls12381.ProvingKey), fullWitness, opt) + return groth16_bls12381.Prove(_r1cs, pk.(*groth16_bls12381.ProvingKey), fullWitness, opts...) case *cs_bn254.R1CS: if icicle_bn254.HasIcicle { - return icicle_bn254.Prove(_r1cs, pk.(*icicle_bn254.ProvingKey), fullWitness, opt) + return icicle_bn254.Prove(_r1cs, pk.(*icicle_bn254.ProvingKey), fullWitness, opts...) } - return groth16_bn254.Prove(_r1cs, pk.(*groth16_bn254.ProvingKey), fullWitness, opt) + return groth16_bn254.Prove(_r1cs, pk.(*groth16_bn254.ProvingKey), fullWitness, opts...) case *cs_bw6761.R1CS: - return groth16_bw6761.Prove(_r1cs, pk.(*groth16_bw6761.ProvingKey), fullWitness, opt) + return groth16_bw6761.Prove(_r1cs, pk.(*groth16_bw6761.ProvingKey), fullWitness, opts...) case *cs_bls24317.R1CS: - return groth16_bls24317.Prove(_r1cs, pk.(*groth16_bls24317.ProvingKey), fullWitness, opt) + return groth16_bls24317.Prove(_r1cs, pk.(*groth16_bls24317.ProvingKey), fullWitness, opts...) case *cs_bls24315.R1CS: - return groth16_bls24315.Prove(_r1cs, pk.(*groth16_bls24315.ProvingKey), fullWitness, opt) + return groth16_bls24315.Prove(_r1cs, pk.(*groth16_bls24315.ProvingKey), fullWitness, opts...) case *cs_bw6633.R1CS: - return groth16_bw6633.Prove(_r1cs, pk.(*groth16_bw6633.ProvingKey), fullWitness, opt) + return groth16_bw6633.Prove(_r1cs, pk.(*groth16_bw6633.ProvingKey), fullWitness, opts...) default: panic("unrecognized R1CS curve type") diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl index 31546b8203..635f71c8cf 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/groth16.prove.go.tmpl @@ -1,4 +1,5 @@ import ( + "fmt" "runtime" "math/big" "time" @@ -41,7 +42,11 @@ func (proof *Proof) CurveID() ecc.ID { } // Prove generates the proof of knowledge of a r1cs with full witness (secret + public part). -func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opt backend.ProverConfig) (*Proof, error) { +func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...backend.ProverOption) (*Proof, error) { + opt, err := backend.NewProverConfig(opts...) + if err != nil { + return nil, fmt.Errorf("new prover config: %w", err) + } if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } diff --git a/test/commitments_test.go b/test/commitments_test.go index 09bdec5d18..504d95dfa0 100644 --- a/test/commitments_test.go +++ b/test/commitments_test.go @@ -229,9 +229,7 @@ func TestCommitmentDummySetup(t *testing.T) { w, err = frontend.NewWitness(assignment, ecc.BN254.ScalarField()) require.NoError(t, err) - opt, err := backend.NewProverConfig() - require.NoError(t, err) - _, err = groth16.Prove(_r1cs, &pk, w, opt) + _, err = groth16.Prove(_r1cs, &pk, w) require.NoError(t, err) } } From 3b9f2293f5fe8ac31c7d0ef4e93c9409528b0ea8 Mon Sep 17 00:00:00 2001 From: Ivo Kubjas Date: Thu, 2 Nov 2023 09:56:11 +0100 Subject: [PATCH 23/58] chore: remove icicle build tag --- backend/groth16/bn254/marshal.go | 3 --- backend/groth16/bn254/prove.go | 3 --- backend/groth16/bn254/setup.go | 3 --- internal/generator/backend/main.go | 12 +++--------- 4 files changed, 3 insertions(+), 18 deletions(-) diff --git a/backend/groth16/bn254/marshal.go b/backend/groth16/bn254/marshal.go index 6d368052df..c6539a7ba2 100644 --- a/backend/groth16/bn254/marshal.go +++ b/backend/groth16/bn254/marshal.go @@ -1,6 +1,3 @@ -//go:build !icicle -// +build !icicle - // Copyright 2020 ConsenSys Software Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/backend/groth16/bn254/prove.go b/backend/groth16/bn254/prove.go index a0e689088c..b20b6c7cae 100644 --- a/backend/groth16/bn254/prove.go +++ b/backend/groth16/bn254/prove.go @@ -1,6 +1,3 @@ -//go:build !icicle -// +build !icicle - // Copyright 2020 ConsenSys Software Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/backend/groth16/bn254/setup.go b/backend/groth16/bn254/setup.go index 480d849c1d..372c723da0 100644 --- a/backend/groth16/bn254/setup.go +++ b/backend/groth16/bn254/setup.go @@ -1,6 +1,3 @@ -//go:build !icicle -// +build !icicle - // Copyright 2020 ConsenSys Software Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/internal/generator/backend/main.go b/internal/generator/backend/main.go index 231f62573d..bf0b33e5d5 100644 --- a/internal/generator/backend/main.go +++ b/internal/generator/backend/main.go @@ -36,7 +36,6 @@ func main() { CSPath: "../../../constraint/bn254/", Curve: "BN254", CurveID: "BN254", - BuildTag: "!icicle", } bw6_761 := templateData{ RootPath: "../../../backend/{?}/bw6-761/", @@ -158,16 +157,11 @@ func main() { panic(err) } - var buildTag string = "" - if d.BuildTag != "" { - buildTag = d.BuildTag - } - entries = []bavard.Entry{ {File: filepath.Join(groth16Dir, "verify.go"), Templates: []string{"groth16/groth16.verify.go.tmpl", importCurve}}, - {File: filepath.Join(groth16Dir, "prove.go"), Templates: []string{"groth16/groth16.prove.go.tmpl", importCurve}, BuildTag: buildTag}, - {File: filepath.Join(groth16Dir, "setup.go"), Templates: []string{"groth16/groth16.setup.go.tmpl", importCurve}, BuildTag: buildTag}, - {File: filepath.Join(groth16Dir, "marshal.go"), Templates: []string{"groth16/groth16.marshal.go.tmpl", importCurve}, BuildTag: buildTag}, + {File: filepath.Join(groth16Dir, "prove.go"), Templates: []string{"groth16/groth16.prove.go.tmpl", importCurve}}, + {File: filepath.Join(groth16Dir, "setup.go"), Templates: []string{"groth16/groth16.setup.go.tmpl", importCurve}}, + {File: filepath.Join(groth16Dir, "marshal.go"), Templates: []string{"groth16/groth16.marshal.go.tmpl", importCurve}}, {File: filepath.Join(groth16Dir, "marshal_test.go"), Templates: []string{"groth16/tests/groth16.marshal.go.tmpl", importCurve}}, } if err := bgen.Generate(d, "groth16", "./template/zkpschemes/", entries...); err != nil { From 6d64b53560eee10a2781154f03343f43f07dd752 Mon Sep 17 00:00:00 2001 From: Ivo Kubjas Date: Thu, 2 Nov 2023 09:56:54 +0100 Subject: [PATCH 24/58] feat: make icicle proving optional --- backend/groth16/bn254/icicle/icicle.go | 3 +++ backend/groth16/bn254/icicle/provingkey.go | 1 - 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/backend/groth16/bn254/icicle/icicle.go b/backend/groth16/bn254/icicle/icicle.go index f118d3fa59..5b1b235d33 100644 --- a/backend/groth16/bn254/icicle/icicle.go +++ b/backend/groth16/bn254/icicle/icicle.go @@ -138,6 +138,9 @@ func Prove(r1cs *cs.R1CS, pk *ProvingKey, fullWitness witness.Witness, opts ...b if opt.HashToFieldFn == nil { opt.HashToFieldFn = hash_to_field.New([]byte(constraint.CommitmentDst)) } + if opt.Accelerator != "icicle" { + return groth16_bn254.Prove(r1cs, &pk.ProvingKey, fullWitness, opts...) + } log := logger.Logger().With().Str("curve", r1cs.CurveID().String()).Str("acceleration", "icicle").Int("nbConstraints", r1cs.GetNbConstraints()).Str("backend", "groth16").Logger() if pk.deviceInfo == nil { log.Debug().Msg("precomputing proving key in GPU") diff --git a/backend/groth16/bn254/icicle/provingkey.go b/backend/groth16/bn254/icicle/provingkey.go index 6981f284c1..146a794255 100644 --- a/backend/groth16/bn254/icicle/provingkey.go +++ b/backend/groth16/bn254/icicle/provingkey.go @@ -24,7 +24,6 @@ type deviceInfo struct { type ProvingKey struct { groth16_bn254.ProvingKey - *deviceInfo } From 18d8ae1d0f6b4ebd61ef4aa5470a29e1bef5b35a Mon Sep 17 00:00:00 2001 From: Ivo Kubjas Date: Thu, 2 Nov 2023 10:16:35 +0100 Subject: [PATCH 25/58] test: add marshal test --- backend/groth16/bn254/icicle/marshal_test.go | 45 ++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 backend/groth16/bn254/icicle/marshal_test.go diff --git a/backend/groth16/bn254/icicle/marshal_test.go b/backend/groth16/bn254/icicle/marshal_test.go new file mode 100644 index 0000000000..d6200b80ee --- /dev/null +++ b/backend/groth16/bn254/icicle/marshal_test.go @@ -0,0 +1,45 @@ +package icicle_bn254_test + +import ( + "bytes" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark/backend/groth16" + groth16_bn254 "github.com/consensys/gnark/backend/groth16/bn254" + cs_bn254 "github.com/consensys/gnark/constraint/bn254" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/consensys/gnark/test" +) + +type circuit struct { + A, B frontend.Variable `gnark:",public"` + Res frontend.Variable +} + +func (c *circuit) Define(api frontend.API) error { + api.AssertIsEqual(api.Mul(c.A, c.B), c.Res) + return nil +} + +func TestMarshal(t *testing.T) { + assert := test.NewAssert(t) + ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit{}) + assert.NoError(err) + tCcs := ccs.(*cs_bn254.R1CS) + nativePK := groth16_bn254.ProvingKey{} + nativeVK := groth16_bn254.VerifyingKey{} + err = groth16_bn254.Setup(tCcs, &nativePK, &nativeVK) + assert.NoError(err) + + pk := groth16.NewProvingKey(ecc.BN254) + buf := new(bytes.Buffer) + _, err = nativePK.WriteTo(buf) + assert.NoError(err) + _, err = pk.ReadFrom(buf) + assert.NoError(err) + if pk.IsDifferent(&nativePK) { + t.Error("marshal output difference") + } +} From 025a150a11d46c85eeea398ab590e903b2a24905 Mon Sep 17 00:00:00 2001 From: Ivo Kubjas Date: Thu, 2 Nov 2023 10:54:26 +0100 Subject: [PATCH 26/58] test: add reverse marshal test --- backend/groth16/bn254/icicle/marshal_test.go | 22 ++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/backend/groth16/bn254/icicle/marshal_test.go b/backend/groth16/bn254/icicle/marshal_test.go index d6200b80ee..75c5a2b57e 100644 --- a/backend/groth16/bn254/icicle/marshal_test.go +++ b/backend/groth16/bn254/icicle/marshal_test.go @@ -7,6 +7,7 @@ import ( "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark/backend/groth16" groth16_bn254 "github.com/consensys/gnark/backend/groth16/bn254" + icicle_bn254 "github.com/consensys/gnark/backend/groth16/bn254/icicle" cs_bn254 "github.com/consensys/gnark/constraint/bn254" "github.com/consensys/gnark/frontend" "github.com/consensys/gnark/frontend/cs/r1cs" @@ -43,3 +44,24 @@ func TestMarshal(t *testing.T) { t.Error("marshal output difference") } } + +func TestMarshal2(t *testing.T) { + assert := test.NewAssert(t) + ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit{}) + assert.NoError(err) + tCcs := ccs.(*cs_bn254.R1CS) + iciPK := icicle_bn254.ProvingKey{} + iciVK := groth16_bn254.VerifyingKey{} + err = icicle_bn254.Setup(tCcs, &iciPK, &iciVK) + assert.NoError(err) + + nativePK := groth16_bn254.ProvingKey{} + buf := new(bytes.Buffer) + _, err = iciPK.WriteTo(buf) + assert.NoError(err) + _, err = nativePK.ReadFrom(buf) + assert.NoError(err) + if iciPK.IsDifferent(&nativePK) { + t.Error("marshal output difference") + } +} From 41458ca348a3069deb87e4beae8c8bf89d8f320e Mon Sep 17 00:00:00 2001 From: Ivo Kubjas Date: Thu, 2 Nov 2023 10:54:47 +0100 Subject: [PATCH 27/58] chore: remove icicle build tag --- internal/generator/backend/main.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/generator/backend/main.go b/internal/generator/backend/main.go index bf0b33e5d5..3af25e7b3d 100644 --- a/internal/generator/backend/main.go +++ b/internal/generator/backend/main.go @@ -243,6 +243,5 @@ type templateData struct { CSPath string Curve string CurveID string - BuildTag string noBackend bool } From 1f98897d21836b31ef078ceed7e2aa0aba6890c4 Mon Sep 17 00:00:00 2001 From: Jeremy Felder Date: Sun, 5 Nov 2023 13:17:30 +0200 Subject: [PATCH 28/58] Update readme for toggling on and off icicle acceleration --- README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/README.md b/README.md index 54c20d345f..e222cd5411 100644 --- a/README.md +++ b/README.md @@ -130,6 +130,16 @@ instantiated with the following curve(s) To use GPUs, add the `icicle` buildtag to your build/run commands, e.g. `go run -tags=icicle main.go`. +You can then toggle on or off icicle acceleration by providing the `WithIcicleAcceleration` backend ProverOption: + +```go + // toggle on + proofIci, err := groth16.Prove(ccs, pk, secretWitness, backend.WithIcicleAcceleration()) + + // toggle off + proof, err := groth16.Prove(ccs, pk, secretWitness) +``` + For more information about prerequisites see the [Icicle repo](https://github.com/ingonyama-zk/icicle). ## Citing From d88413091fc5c404eb50a9724003e57f5d2ead29 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Sun, 4 Feb 2024 12:07:54 -0700 Subject: [PATCH 29/58] refactor --- backend/plonk/bn254/icicle/doc.go | 2 + backend/plonk/bn254/icicle/kzg.go | 143 ++ backend/plonk/bn254/icicle/marshal.go | 406 +++++ backend/plonk/bn254/icicle/marshal_test.go | 175 +++ backend/plonk/bn254/icicle/prove.go | 1593 ++++++++++++++++++++ backend/plonk/bn254/icicle/setup.go | 410 +++++ backend/plonk/bn254/icicle/solidity.go | 1420 +++++++++++++++++ backend/plonk/bn254/icicle/unmarshal.go | 88 ++ backend/plonk/bn254/icicle/verify.go | 401 +++++ backend/plonk/bn254/prove.go | 2 +- backend/plonk/plonk.go | 12 +- 11 files changed, 4649 insertions(+), 3 deletions(-) create mode 100644 backend/plonk/bn254/icicle/doc.go create mode 100644 backend/plonk/bn254/icicle/kzg.go create mode 100644 backend/plonk/bn254/icicle/marshal.go create mode 100644 backend/plonk/bn254/icicle/marshal_test.go create mode 100644 backend/plonk/bn254/icicle/prove.go create mode 100644 backend/plonk/bn254/icicle/setup.go create mode 100644 backend/plonk/bn254/icicle/solidity.go create mode 100644 backend/plonk/bn254/icicle/unmarshal.go create mode 100644 backend/plonk/bn254/icicle/verify.go diff --git a/backend/plonk/bn254/icicle/doc.go b/backend/plonk/bn254/icicle/doc.go new file mode 100644 index 0000000000..07bcd4568e --- /dev/null +++ b/backend/plonk/bn254/icicle/doc.go @@ -0,0 +1,2 @@ +// Package icicle_bn254 implements ICICLE acceleration for BN254 Plonk backend. +package icicle_bn254 diff --git a/backend/plonk/bn254/icicle/kzg.go b/backend/plonk/bn254/icicle/kzg.go new file mode 100644 index 0000000000..75d9aad88c --- /dev/null +++ b/backend/plonk/bn254/icicle/kzg.go @@ -0,0 +1,143 @@ +package icicle_bn254 + +import ( + "errors" + "fmt" + "sync" + "unsafe" + + "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + iciclegnark "github.com/ingonyama-zk/iciclegnark/curves/bn254" +) + +var ( + ErrInvalidPolynomialSize = errors.New("invalid polynomial size (larger than SRS or == 0)") +) + +// Digest commitment of a polynomial. +type Digest = bn254.G1Affine + +// Commit commits to a polynomial using a multi exponentiation with the SRS. +// It is assumed that the polynomial is in canonical form, in Montgomery form. +func kzgDeviceCommit(p []fr.Element, G1 unsafe.Pointer, nbTasks ...int) (Digest, error) { + // Size of the polynomial + np := len(p) + + // Size of the polynomial in bytes + sizeBytesScalars := np * fr.Bytes + + // Initialize Scalar channels + copyCpDone := make(chan unsafe.Pointer, 1) + cpDeviceData := make(chan iciclegnark.OnDeviceData, 1) + + // Copy Scalar to device + go func() { + // Perform copy operation + iciclegnark.CopyToDevice(p, sizeBytesScalars, copyCpDone) + + // Receive result once copy operation is done + cpDevice := <-copyCpDone + + // Create OnDeviceData + cpDeviceValue := iciclegnark.OnDeviceData{ + P: cpDevice, + Size: sizeBytesScalars, + } + + // Send OnDeviceData to respective channel + cpDeviceData <- cpDeviceValue + + // Close channels + close(copyCpDone) + close(cpDeviceData) + }() + + // Wait for copy operation to finish + cpDeviceValue := <-cpDeviceData + + // KZG Committment on device + var wg sync.WaitGroup + + // Perform multi exponentiation on device + wg.Add(1) + tmpChan := make(chan bn254.G1Affine, 1) + go func() { + defer wg.Done() + tmp, _, err := iciclegnark.MsmOnDevice(cpDeviceValue.P, G1, np, true) + //fmt.Println("tmp", tmp) + if err != nil { + fmt.Print("error", err) + } + var res bn254.G1Affine + res.FromJacobian(&tmp) + tmpChan <- res + }() + wg.Wait() + + // Receive result once copy operation is done + res := <-tmpChan + + // Free device memory + go func() { + iciclegnark.FreeDevicePointer(unsafe.Pointer(&cpDeviceValue)) + }() + + return res, nil +} + +// Open computes an opening proof of polynomial p at given point. +// fft.Domain Cardinality must be larger than p.Degree() +func kzgDeviceOpen(p []fr.Element, point fr.Element, pk *ProvingKey) (kzg.OpeningProof, error) { + // build the proof + res := kzg.OpeningProof{ + ClaimedValue: eval(p, point), + } + + // compute H + // h reuses memory from _p + _p := make([]fr.Element, len(p)) + copy(_p, p) + h := dividePolyByXminusA(_p, res.ClaimedValue, point) + + // commit to H + hCommit, err := kzgDeviceCommit(h, pk.deviceInfo.G1Device.G1) + if err != nil { + return kzg.OpeningProof{}, err + } + res.H.Set(&hCommit) + + return res, nil +} + +// dividePolyByXminusA computes (f-f(a))/(x-a), in canonical basis, in regular form +// f memory is re-used for the result +func dividePolyByXminusA(f []fr.Element, fa, a fr.Element) []fr.Element { + + // first we compute f-f(a) + f[0].Sub(&f[0], &fa) + + // now we use synthetic division to divide by x-a + var t fr.Element + for i := len(f) - 2; i >= 0; i-- { + t.Mul(&f[i+1], &a) + + f[i].Add(&f[i], &t) + } + + // the result is of degree deg(f)-1 + return f[1:] +} + +// eval returns p(point) where p is interpreted as a polynomial +// ∑_{i= 0; i-- { + res.Mul(&res, &point).Add(&res, &p[i]) + } + return res +} diff --git a/backend/plonk/bn254/icicle/marshal.go b/backend/plonk/bn254/icicle/marshal.go new file mode 100644 index 0000000000..8912b29d1e --- /dev/null +++ b/backend/plonk/bn254/icicle/marshal.go @@ -0,0 +1,406 @@ +// Copyright 2020 ConsenSys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by gnark DO NOT EDIT + +package icicle_bn254 + +import ( + curve "github.com/consensys/gnark-crypto/ecc/bn254" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + + "errors" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/iop" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "io" +) + +// WriteRawTo writes binary encoding of Proof to w without point compression +func (proof *Proof) WriteRawTo(w io.Writer) (int64, error) { + return proof.writeTo(w, curve.RawEncoding()) +} + +// WriteTo writes binary encoding of Proof to w with point compression +func (proof *Proof) WriteTo(w io.Writer) (int64, error) { + return proof.writeTo(w) +} + +func (proof *Proof) writeTo(w io.Writer, options ...func(*curve.Encoder)) (int64, error) { + enc := curve.NewEncoder(w, options...) + + toEncode := []interface{}{ + &proof.LRO[0], + &proof.LRO[1], + &proof.LRO[2], + &proof.Z, + &proof.H[0], + &proof.H[1], + &proof.H[2], + &proof.BatchedProof.H, + proof.BatchedProof.ClaimedValues, + &proof.ZShiftedOpening.H, + &proof.ZShiftedOpening.ClaimedValue, + proof.Bsb22Commitments, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} + +// ReadFrom reads binary representation of Proof from r +func (proof *Proof) ReadFrom(r io.Reader) (int64, error) { + dec := curve.NewDecoder(r) + toDecode := []interface{}{ + &proof.LRO[0], + &proof.LRO[1], + &proof.LRO[2], + &proof.Z, + &proof.H[0], + &proof.H[1], + &proof.H[2], + &proof.BatchedProof.H, + &proof.BatchedProof.ClaimedValues, + &proof.ZShiftedOpening.H, + &proof.ZShiftedOpening.ClaimedValue, + &proof.Bsb22Commitments, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + if proof.Bsb22Commitments == nil { + proof.Bsb22Commitments = []kzg.Digest{} + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of ProvingKey to w +func (pk *ProvingKey) WriteTo(w io.Writer) (n int64, err error) { + return pk.writeTo(w, true) +} + +// WriteRawTo writes binary encoding of ProvingKey to w without point compression +func (pk *ProvingKey) WriteRawTo(w io.Writer) (n int64, err error) { + return pk.writeTo(w, false) +} + +func (pk *ProvingKey) writeTo(w io.Writer, withCompression bool) (n int64, err error) { + // encode the verifying key + if withCompression { + n, err = pk.Vk.WriteTo(w) + } else { + n, err = pk.Vk.WriteRawTo(w) + } + if err != nil { + return + } + + // fft domains + n2, err := pk.Domain[0].WriteTo(w) + if err != nil { + return + } + n += n2 + + n2, err = pk.Domain[1].WriteTo(w) + if err != nil { + return + } + n += n2 + + // KZG key + if withCompression { + n2, err = pk.Kzg.WriteTo(w) + } else { + n2, err = pk.Kzg.WriteRawTo(w) + } + if err != nil { + return + } + n += n2 + if withCompression { + n2, err = pk.KzgLagrange.WriteTo(w) + } else { + n2, err = pk.KzgLagrange.WriteRawTo(w) + } + if err != nil { + return + } + n += n2 + + + // sanity check len(Permutation) == 3*int(pk.Domain[0].Cardinality) + if len(pk.trace.S) != (3 * int(pk.Domain[0].Cardinality)) { + return n, errors.New("invalid permutation size, expected 3*domain cardinality") + } + + enc := curve.NewEncoder(w) + // note: type Polynomial, which is handled by default binary.Write(...) op and doesn't + // encode the size (nor does it convert from Montgomery to Regular form) + // so we explicitly transmit []fr.Element + toEncode := []interface{}{ + pk.trace.Ql.Coefficients(), + pk.trace.Qr.Coefficients(), + pk.trace.Qm.Coefficients(), + pk.trace.Qo.Coefficients(), + pk.trace.Qk.Coefficients(), + coefficients(pk.trace.Qcp), + pk.trace.S1.Coefficients(), + pk.trace.S2.Coefficients(), + pk.trace.S3.Coefficients(), + pk.trace.S, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return n + enc.BytesWritten(), err + } + } + + return n + enc.BytesWritten(), nil +} + +// ReadFrom reads from binary representation in r into ProvingKey +func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { + return pk.readFrom(r, true) +} + +// UnsafeReadFrom reads from binary representation in r into ProvingKey without subgroup checks +func (pk *ProvingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return pk.readFrom(r, false) +} + +func (pk *ProvingKey) readFrom(r io.Reader, withSubgroupChecks bool) (int64, error) { + pk.Vk = &VerifyingKey{} + n, err := pk.Vk.ReadFrom(r) + if err != nil { + return n, err + } + + n2, err, chDomain0 := pk.Domain[0].AsyncReadFrom(r) + n += n2 + if err != nil { + return n, err + } + + n2, err, chDomain1 := pk.Domain[1].AsyncReadFrom(r) + n += n2 + if err != nil { + return n, err + } + + if withSubgroupChecks { + n2, err = pk.Kzg.ReadFrom(r) + } else { + n2, err = pk.Kzg.UnsafeReadFrom(r) + } + n += n2 + if err != nil { + return n, err + } + if withSubgroupChecks { + n2, err = pk.KzgLagrange.ReadFrom(r) + } else { + n2, err = pk.KzgLagrange.UnsafeReadFrom(r) + } + n += n2 + if err != nil { + return n, err + } + + pk.trace.S = make([]int64, 3*pk.Domain[0].Cardinality) + + dec := curve.NewDecoder(r) + + var ql, qr, qm, qo, qk, s1, s2, s3 []fr.Element + var qcp [][]fr.Element + + // TODO @gbotrel: this is a bit ugly, we should probably refactor this. + // The order of the variables is important, as it matches the order in which they are + // encoded in the WriteTo(...) method. + + // Note: instead of calling dec.Decode(...) for each of the above variables, + // we call AsyncReadFrom when possible which allows to consume bytes from the reader + // and perform the decoding in parallel + + type v struct { + data *fr.Vector + chErr chan error + } + + vectors := make([]v, 8) + vectors[0] = v{data: (*fr.Vector)(&ql)} + vectors[1] = v{data: (*fr.Vector)(&qr)} + vectors[2] = v{data: (*fr.Vector)(&qm)} + vectors[3] = v{data: (*fr.Vector)(&qo)} + vectors[4] = v{data: (*fr.Vector)(&qk)} + vectors[5] = v{data: (*fr.Vector)(&s1)} + vectors[6] = v{data: (*fr.Vector)(&s2)} + vectors[7] = v{data: (*fr.Vector)(&s3)} + + // read ql, qr, qm, qo, qk + for i := 0; i < 5; i++ { + n2, err, ch := vectors[i].data.AsyncReadFrom(r) + n += n2 + if err != nil { + return n, err + } + vectors[i].chErr = ch + } + + // read qcp + if err := dec.Decode(&qcp); err != nil { + return n + dec.BytesRead(), err + } + + // read lqk, s1, s2, s3 + for i := 5; i < 8; i++ { + n2, err, ch := vectors[i].data.AsyncReadFrom(r) + n += n2 + if err != nil { + return n, err + } + vectors[i].chErr = ch + } + + // read pk.Trace.S + if err := dec.Decode(&pk.trace.S); err != nil { + return n + dec.BytesRead(), err + } + + // wait for all AsyncReadFrom(...) to complete + for i := range vectors { + if err := <-vectors[i].chErr; err != nil { + return n, err + } + } + + canReg := iop.Form{Basis: iop.Canonical, Layout: iop.Regular} + pk.trace.Ql = iop.NewPolynomial(&ql, canReg) + pk.trace.Qr = iop.NewPolynomial(&qr, canReg) + pk.trace.Qm = iop.NewPolynomial(&qm, canReg) + pk.trace.Qo = iop.NewPolynomial(&qo, canReg) + pk.trace.Qk = iop.NewPolynomial(&qk, canReg) + pk.trace.S1 = iop.NewPolynomial(&s1, canReg) + pk.trace.S2 = iop.NewPolynomial(&s2, canReg) + pk.trace.S3 = iop.NewPolynomial(&s3, canReg) + + pk.trace.Qcp = make([]*iop.Polynomial, len(qcp)) + for i := range qcp { + pk.trace.Qcp[i] = iop.NewPolynomial(&qcp[i], canReg) + } + + // wait for FFT to be precomputed + <-chDomain0 + <-chDomain1 + + return n + dec.BytesRead(), nil + +} + +// WriteTo writes binary encoding of VerifyingKey to w +func (vk *VerifyingKey) WriteTo(w io.Writer) (n int64, err error) { + return vk.writeTo(w) +} + +// WriteRawTo writes binary encoding of VerifyingKey to w without point compression +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (int64, error) { + return vk.writeTo(w, curve.RawEncoding()) +} + +func (vk *VerifyingKey) writeTo(w io.Writer, options ...func(*curve.Encoder)) (n int64, err error) { + enc := curve.NewEncoder(w) + + toEncode := []interface{}{ + vk.Size, + &vk.SizeInv, + &vk.Generator, + vk.NbPublicVariables, + &vk.CosetShift, + &vk.S[0], + &vk.S[1], + &vk.S[2], + &vk.Ql, + &vk.Qr, + &vk.Qm, + &vk.Qo, + &vk.Qk, + vk.Qcp, + &vk.Kzg.G1, + &vk.Kzg.G2[0], + &vk.Kzg.G2[1], + vk.CommitmentConstraintIndexes, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} + +// UnsafeReadFrom reads from binary representation in r into VerifyingKey. +// Current implementation is a passthrough to ReadFrom +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return vk.ReadFrom(r) +} + +// ReadFrom reads from binary representation in r into VerifyingKey +func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { + dec := curve.NewDecoder(r) + toDecode := []interface{}{ + &vk.Size, + &vk.SizeInv, + &vk.Generator, + &vk.NbPublicVariables, + &vk.CosetShift, + &vk.S[0], + &vk.S[1], + &vk.S[2], + &vk.Ql, + &vk.Qr, + &vk.Qm, + &vk.Qo, + &vk.Qk, + &vk.Qcp, + &vk.Kzg.G1, + &vk.Kzg.G2[0], + &vk.Kzg.G2[1], + &vk.CommitmentConstraintIndexes, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + if vk.Qcp == nil { + vk.Qcp = []kzg.Digest{} + } + + return dec.BytesRead(), nil +} diff --git a/backend/plonk/bn254/icicle/marshal_test.go b/backend/plonk/bn254/icicle/marshal_test.go new file mode 100644 index 0000000000..349ab38887 --- /dev/null +++ b/backend/plonk/bn254/icicle/marshal_test.go @@ -0,0 +1,175 @@ +// Copyright 2020 ConsenSys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by gnark DO NOT EDIT + +package icicle_bn254 + +import ( + curve "github.com/consensys/gnark-crypto/ecc/bn254" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/iop" + "github.com/consensys/gnark/io" + "math/big" + "math/rand" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestProofSerialization(t *testing.T) { + // create a proof + var proof Proof + proof.randomize() + + assert.NoError(t, io.RoundTripCheck(&proof, func() interface{} { return new(Proof) })) +} + +func TestProvingKeySerialization(t *testing.T) { + // random pk + var pk ProvingKey + pk.randomize() + + assert.NoError(t, io.RoundTripCheck(&pk, func() interface{} { return new(ProvingKey) })) +} + +func TestVerifyingKeySerialization(t *testing.T) { + // create a random vk + var vk VerifyingKey + vk.randomize() + + assert.NoError(t, io.RoundTripCheck(&vk, func() interface{} { return new(VerifyingKey) })) +} + +func (pk *ProvingKey) randomize() { + + var vk VerifyingKey + vk.randomize() + pk.Vk = &vk + pk.Domain[0] = *fft.NewDomain(32) + pk.Domain[1] = *fft.NewDomain(4 * 32) + + pk.Kzg.G1 = make([]curve.G1Affine, 32) + pk.KzgLagrange.G1 = make([]curve.G1Affine, 32) + for i := range pk.Kzg.G1 { + pk.Kzg.G1[i] = randomG1Point() + pk.KzgLagrange.G1[i] = randomG1Point() + } + + n := int(pk.Domain[0].Cardinality) + ql := randomScalars(n) + qr := randomScalars(n) + qm := randomScalars(n) + qo := randomScalars(n) + qk := randomScalars(n) + s1 := randomScalars(n) + s2 := randomScalars(n) + s3 := randomScalars(n) + + canReg := iop.Form{Basis: iop.Canonical, Layout: iop.Regular} + pk.trace.Ql = iop.NewPolynomial(&ql, canReg) + pk.trace.Qr = iop.NewPolynomial(&qr, canReg) + pk.trace.Qm = iop.NewPolynomial(&qm, canReg) + pk.trace.Qo = iop.NewPolynomial(&qo, canReg) + pk.trace.Qk = iop.NewPolynomial(&qk, canReg) + pk.trace.S1 = iop.NewPolynomial(&s1, canReg) + pk.trace.S2 = iop.NewPolynomial(&s2, canReg) + pk.trace.S3 = iop.NewPolynomial(&s3, canReg) + + pk.trace.Qcp = make([]*iop.Polynomial, rand.Intn(4)) //#nosec G404 weak rng is fine here + for i := range pk.trace.Qcp { + qcp := randomScalars(rand.Intn(n / 4)) //#nosec G404 weak rng is fine here + pk.trace.Qcp[i] = iop.NewPolynomial(&qcp, canReg) + } + + pk.trace.S = make([]int64, 3*pk.Domain[0].Cardinality) + pk.trace.S[0] = -12 + pk.trace.S[len(pk.trace.S)-1] = 8888 + +} + +func (vk *VerifyingKey) randomize() { + vk.Size = rand.Uint64() //#nosec G404 weak rng is fine here + vk.SizeInv.SetRandom() + vk.Generator.SetRandom() + vk.NbPublicVariables = rand.Uint64() //#nosec G404 weak rng is fine here + vk.CommitmentConstraintIndexes = []uint64{rand.Uint64()} //#nosec G404 weak rng is fine here + vk.CosetShift.SetRandom() + + vk.S[0] = randomG1Point() + vk.S[1] = randomG1Point() + vk.S[2] = randomG1Point() + + vk.Kzg.G1 = randomG1Point() + vk.Kzg.G2[0] = randomG2Point() + vk.Kzg.G2[1] = randomG2Point() + + vk.Ql = randomG1Point() + vk.Qr = randomG1Point() + vk.Qm = randomG1Point() + vk.Qo = randomG1Point() + vk.Qk = randomG1Point() + vk.Qcp = randomG1Points(rand.Intn(4)) //#nosec G404 weak rng is fine here +} + +func (proof *Proof) randomize() { + proof.LRO[0] = randomG1Point() + proof.LRO[1] = randomG1Point() + proof.LRO[2] = randomG1Point() + proof.Z = randomG1Point() + proof.H[0] = randomG1Point() + proof.H[1] = randomG1Point() + proof.H[2] = randomG1Point() + proof.BatchedProof.H = randomG1Point() + proof.BatchedProof.ClaimedValues = randomScalars(2) + proof.ZShiftedOpening.H = randomG1Point() + proof.ZShiftedOpening.ClaimedValue.SetRandom() + proof.Bsb22Commitments = randomG1Points(rand.Intn(4)) //#nosec G404 weak rng is fine here +} + +func randomG2Point() curve.G2Affine { + _, _, _, r := curve.Generators() + r.ScalarMultiplication(&r, big.NewInt(int64(rand.Uint64()))) //#nosec G404 weak rng is fine here + return r +} + +func randomG1Point() curve.G1Affine { + _, _, r, _ := curve.Generators() + r.ScalarMultiplication(&r, big.NewInt(int64(rand.Uint64()))) //#nosec G404 weak rng is fine here + return r +} + +func randomG1Points(n int) []curve.G1Affine { + res := make([]curve.G1Affine, n) + for i := range res { + res[i] = randomG1Point() + } + return res +} + +func randomScalars(n int) []fr.Element { + v := make([]fr.Element, n) + one := fr.One() + for i := 0; i < len(v); i++ { + if i == 0 { + v[i].SetRandom() + } else { + v[i].Add(&v[i-1], &one) + } + } + return v +} diff --git a/backend/plonk/bn254/icicle/prove.go b/backend/plonk/bn254/icicle/prove.go new file mode 100644 index 0000000000..909b664835 --- /dev/null +++ b/backend/plonk/bn254/icicle/prove.go @@ -0,0 +1,1593 @@ +// Copyright 2020 ConsenSys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by gnark DO NOT EDIT + +package icicle_bn254 + +import ( + "context" + "unsafe" + "errors" + "fmt" + "hash" + "math/big" + "math/bits" + "runtime" + "sync" + "time" + + "golang.org/x/sync/errgroup" + + //"github.com/consensys/gnark-crypto/ecc" + + curve "github.com/consensys/gnark-crypto/ecc/bn254" + + "github.com/consensys/gnark-crypto/ecc/bn254/fp" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/hash_to_field" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/iop" + + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" + "github.com/consensys/gnark/backend/witness" + + "github.com/consensys/gnark/constraint" + cs "github.com/consensys/gnark/constraint/bn254" + "github.com/consensys/gnark/constraint/solver" + "github.com/consensys/gnark/internal/utils" + "github.com/consensys/gnark/logger" + + iciclegnark "github.com/ingonyama-zk/iciclegnark/curves/bn254" +) + +const HasIcicle = true + +const ( + id_L int = iota + id_R + id_O + id_Z + id_ZS + id_Ql + id_Qr + id_Qm + id_Qo + id_Qk + id_S1 + id_S2 + id_S3 + id_ID + id_LOne + id_Qci // [ .. , Qc_i, Pi_i, ...] +) + +// blinding factors +const ( + id_Bl int = iota + id_Br + id_Bo + id_Bz + nb_blinding_polynomials +) + +// blinding orders (-1 to deactivate) +const ( + order_blinding_L = 1 + order_blinding_R = 1 + order_blinding_O = 1 + order_blinding_Z = 2 +) + +type Proof struct { + + // Commitments to the solution vectors + LRO [3]kzg.Digest + + // Commitment to Z, the permutation polynomial + Z kzg.Digest + + // Commitments to h1, h2, h3 such that h = h1 + Xh2 + X**2h3 is the quotient polynomial + H [3]kzg.Digest + + Bsb22Commitments []kzg.Digest + + // Batch opening proof of h1 + zeta*h2 + zeta**2h3, linearizedPolynomial, l, r, o, s1, s2, qCPrime + BatchedProof kzg.BatchOpeningProof + + // Opening proof of Z at zeta*mu + ZShiftedOpening kzg.OpeningProof +} + +func (pk *ProvingKey) setupDevicePointers() error { + log := logger.Logger().With().Str("position", "start").Logger() + log.Info().Msg("setupDevicePointers") + + start := time.Now() + + if pk.deviceInfo != nil { + return nil + } + + // TODO is [0] the correct part of the array + pk.deviceInfo = &deviceInfo{} + n := int(pk.Domain[0].Cardinality) + sizeBytes := n * fr.Bytes + + /************************* Start Domain Device Setup ***************************/ + copyCosetInvDone := make(chan unsafe.Pointer, 1) + copyCosetDone := make(chan unsafe.Pointer, 1) + copyDenDone := make(chan unsafe.Pointer, 1) + + /************************* CosetTableInv ***************************/ + go iciclegnark.CopyToDevice(pk.Domain[0].CosetTableInv, sizeBytes, copyCosetInvDone) + + /************************* CosetTable ***************************/ + go iciclegnark.CopyToDevice(pk.Domain[0].CosetTable, sizeBytes, copyCosetDone) + + /************************* Den ***************************/ + var denI, oneI fr.Element + oneI.SetOne() + denI.Exp(pk.Domain[0].FrMultiplicativeGen, big.NewInt(int64(pk.Domain[0].Cardinality))) + denI.Sub(&denI, &oneI).Inverse(&denI) + + log2SizeFloor := bits.Len(uint(n)) - 1 + denIcicleArr := []fr.Element{denI} + for i := 0; i < log2SizeFloor; i++ { + denIcicleArr = append(denIcicleArr, denIcicleArr...) + } + pow2Remainder := n - 1<> 1 + + gateConstraint := func(u ...fr.Element) fr.Element { + + var ic, tmp fr.Element + + ic.Mul(&u[id_Ql], &u[id_L]) + tmp.Mul(&u[id_Qr], &u[id_R]) + ic.Add(&ic, &tmp) + tmp.Mul(&u[id_Qm], &u[id_L]).Mul(&tmp, &u[id_R]) + ic.Add(&ic, &tmp) + tmp.Mul(&u[id_Qo], &u[id_O]) + ic.Add(&ic, &tmp).Add(&ic, &u[id_Qk]) + for i := 0; i < nbBsbGates; i++ { + tmp.Mul(&u[id_Qci+2*i], &u[id_Qci+2*i+1]) + ic.Add(&ic, &tmp) + } + + return ic + } + + var cs, css fr.Element + cs.Set(&s.pk.Domain[1].FrMultiplicativeGen) + css.Square(&cs) + + orderingConstraint := func(u ...fr.Element) fr.Element { + gamma := s.gamma + + var a, b, c, r, l fr.Element + + a.Add(&gamma, &u[id_L]).Add(&a, &u[id_ID]) + b.Mul(&u[id_ID], &cs).Add(&b, &u[id_R]).Add(&b, &gamma) + c.Mul(&u[id_ID], &css).Add(&c, &u[id_O]).Add(&c, &gamma) + r.Mul(&a, &b).Mul(&r, &c).Mul(&r, &u[id_Z]) + + a.Add(&u[id_S1], &u[id_L]).Add(&a, &gamma) + b.Add(&u[id_S2], &u[id_R]).Add(&b, &gamma) + c.Add(&u[id_S3], &u[id_O]).Add(&c, &gamma) + l.Mul(&a, &b).Mul(&l, &c).Mul(&l, &u[id_ZS]) + + l.Sub(&l, &r) + + return l + } + + ratioLocalConstraint := func(u ...fr.Element) fr.Element { + + var res fr.Element + res.SetOne() + res.Sub(&u[id_Z], &res).Mul(&res, &u[id_LOne]) + + return res + } + + rho := int(s.pk.Domain[1].Cardinality / n) + shifters := make([]fr.Element, rho) + shifters[0].Set(&s.pk.Domain[1].FrMultiplicativeGen) + for i := 1; i < rho; i++ { + shifters[i].Set(&s.pk.Domain[1].Generator) + } + + // stores the current coset shifter + var coset fr.Element + coset.SetOne() + + var tmp, one fr.Element + one.SetOne() + bn := big.NewInt(int64(n)) + + // wait for init go routine + <-s.chNumeratorInit + + cosetTable := s.pk.Domain[0].CosetTable + twiddles := s.pk.Domain[1].Twiddles[0][:n] + + // init the result polynomial & buffer + cres := s.cres + buf := make([]fr.Element, n) + var wgBuf sync.WaitGroup + + allConstraints := func(i int, u ...fr.Element) fr.Element { + // scale S1, S2, S3 by β + u[id_S1].Mul(&u[id_S1], &s.beta) + u[id_S2].Mul(&u[id_S2], &s.beta) + u[id_S3].Mul(&u[id_S3], &s.beta) + + // blind L, R, O, Z, ZS + var y fr.Element + y = s.bp[id_Bl].Evaluate(s.twiddles0[i]) + u[id_L].Add(&u[id_L], &y) + y = s.bp[id_Br].Evaluate(s.twiddles0[i]) + u[id_R].Add(&u[id_R], &y) + y = s.bp[id_Bo].Evaluate(s.twiddles0[i]) + u[id_O].Add(&u[id_O], &y) + y = s.bp[id_Bz].Evaluate(s.twiddles0[i]) + u[id_Z].Add(&u[id_Z], &y) + + // ZS is shifted by 1; need to get correct twiddle + y = s.bp[id_Bz].Evaluate(s.twiddles0[(i+1)%int(n)]) + u[id_ZS].Add(&u[id_ZS], &y) + + a := gateConstraint(u...) + b := orderingConstraint(u...) + c := ratioLocalConstraint(u...) + c.Mul(&c, &s.alpha).Add(&c, &b).Mul(&c, &s.alpha).Add(&c, &a) + return c + } + + // select the correct scaling vector to scale by shifter[i] + selectScalingVector := func(i int, l iop.Layout) []fr.Element { + var w []fr.Element + if i == 0 { + if l == iop.Regular { + w = cosetTable + } else { + w = s.cosetTableRev + } + } else { + if l == iop.Regular { + w = twiddles + } else { + w = s.twiddlesRev + } + } + return w + } + + // pre-computed to compute the bit reverse index + // of the result polynomial + m := uint64(s.pk.Domain[1].Cardinality) + mm := uint64(64 - bits.TrailingZeros64(m)) + + for i := 0; i < rho; i++ { + + coset.Mul(&coset, &shifters[i]) + tmp.Exp(coset, bn).Sub(&tmp, &one) + + // bl <- bl *( (s*ωⁱ)ⁿ-1 )s + for _, q := range s.bp { + cq := q.Coefficients() + acc := tmp + for j := 0; j < len(cq); j++ { + cq[j].Mul(&cq[j], &acc) + acc.Mul(&acc, &shifters[i]) + } + } + + //batchApply(s.x, func(p *iop.Polynomial) { + // // ON Device + // n := p.Size() + // sizeBytes := p.Size() * fr.Bytes + + // copyADone := make(chan unsafe.Pointer, 1) + // go iciclegnark.CopyToDevice(p.Coefficients(), sizeBytes, copyADone) + // a_device := <-copyADone + + // // scale by shifter[i] + // w := selectScalingVector(i, p.Layout) + // + // copyWDone := make(chan unsafe.Pointer, 1) + // go iciclegnark.CopyToDevice(w, sizeBytes, copyWDone) + // w_device := <-copyWDone + + // // Initialize channels + // computeInttNttDone := make(chan error, 1) + // computeInttNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { + // a_intt_d := iciclegnark.INttOnDevice(devicePointer, s.pk.deviceInfo.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) + + // iciclegnark.VecMulOnDevice(a_intt_d, scaleVecPtr, n) + // iciclegnark.MontConvOnDevice(a_intt_d, n, true) + // iciclegnark.NttOnDevice(devicePointer, a_intt_d, s.pk.deviceInfo.DomainDevice.Twiddles, s.pk.deviceInfo.DomainDevice.CosetTable, n, n, sizeBytes, true) + // + // computeInttNttDone <- nil + // iciclegnark.FreeDevicePointer(a_intt_d) + // } + // // Run computeInttNttOnDevice on device + // go computeInttNttOnDevice(w_device, a_device) + // _ = <-computeInttNttDone + + // res := iciclegnark.CopyScalarsToHost(a_device, n, sizeBytes) + // p = iop.NewPolynomial(&res, iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}) + // fmt.Print("GPU", p.Coefficients()[0], "\n") + + // go func() { + // iciclegnark.FreeDevicePointer(a_device) + // iciclegnark.FreeDevicePointer(w_device) + // }() + //}) + + // we do **a lot** of FFT here, but on the small domain. + // note that for all the polynomials in the proving key + // (Ql, Qr, Qm, Qo, S1, S2, S3, Qcp, Qc) and ID, LOne + // we could pre-compute theses rho*2 FFTs and store them + // at the cost of a huge memory footprint. + batchApply(s.x, func(p *iop.Polynomial) { + nbTasks := calculateNbTasks(len(s.x)-1) * 2 + // shift polynomials to be in the correct coset + p.ToCanonical(&s.pk.Domain[0], nbTasks) + //fmt.Print("CPU",p.Coefficients()[0], "\n") + + // scale by shifter[i] + w := selectScalingVector(i, p.Layout) + + cp := p.Coefficients() + utils.Parallelize(len(cp), func(start, end int) { + for j := start; j < end; j++ { + cp[j].Mul(&cp[j], &w[j]) + } + }, nbTasks) + + // fft in the correct coset + p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() + }) + + wgBuf.Wait() + if _, err := iop.Evaluate( + allConstraints, + buf, + iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}, + s.x..., + ); err != nil { + return nil, err + } + wgBuf.Add(1) + go func(i int) { + for j := 0; j < int(n); j++ { + // we build the polynomial in bit reverse order + cres[bits.Reverse64(uint64(rho*j+i))>>mm] = buf[j] + } + wgBuf.Done() + }(i) + + tmp.Inverse(&tmp) + // bl <- bl *( (s*ωⁱ)ⁿ-1 )s + for _, q := range s.bp { + cq := q.Coefficients() + for j := 0; j < len(cq); j++ { + cq[j].Mul(&cq[j], &tmp) + } + } + } + + // scale everything back + go func() { + for i := id_ZS; i < len(s.x); i++ { + s.x[i] = nil + } + + var cs fr.Element + cs.Set(&shifters[0]) + for i := 1; i < len(shifters); i++ { + cs.Mul(&cs, &shifters[i]) + } + cs.Inverse(&cs) + + batchApply(s.x[:id_ZS], func(p *iop.Polynomial) { + p.ToCanonical(&s.pk.Domain[0], 8).ToRegular() + scalePowers(p, cs) + }) + + for _, q := range s.bp { + scalePowers(q, cs) + } + + close(s.chRestoreLRO) + }() + + // ensure all the goroutines are done + wgBuf.Wait() + + res := iop.NewPolynomial(&cres, iop.Form{Basis: iop.LagrangeCoset, Layout: iop.BitReverse}) + + return res, nil + +} + +func calculateNbTasks(n int) int { + nbAvailableCPU := runtime.NumCPU() - n + if nbAvailableCPU < 0 { + nbAvailableCPU = 1 + } + nbTasks := 1 + (nbAvailableCPU / n) + return nbTasks +} + +// batchApply executes fn on all polynomials in x except x[id_ZS] in parallel. +func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial)) { + var wg sync.WaitGroup + for i := 0; i < len(x); i++ { + if i == id_ZS { + continue + } + wg.Add(1) + go func(i int) { + fn(x[i]) + wg.Done() + }(i) + } + wg.Wait() +} + +// p <- +// p is supposed to be in canonical form +func scalePowers(p *iop.Polynomial, w fr.Element) { + var acc fr.Element + acc.SetOne() + cp := p.Coefficients() + for i := 0; i < p.Size(); i++ { + cp[i].Mul(&cp[i], &acc) + acc.Mul(&acc, &w) + } +} + +func evaluateBlinded(p, bp *iop.Polynomial, zeta fr.Element) fr.Element { + // Get the size of the polynomial + n := big.NewInt(int64(p.Size())) + + var pEvaluatedAtZeta fr.Element + + // Evaluate the polynomial and blinded polynomial at zeta + chP := make(chan struct{}, 1) + go func() { + pEvaluatedAtZeta = p.Evaluate(zeta) + close(chP) + }() + + bpEvaluatedAtZeta := bp.Evaluate(zeta) + + // Multiply the evaluated blinded polynomial by tempElement + var t fr.Element + one := fr.One() + t.Exp(zeta, n).Sub(&t, &one) + bpEvaluatedAtZeta.Mul(&bpEvaluatedAtZeta, &t) + + // Add the evaluated polynomial and the evaluated blinded polynomial + <-chP + pEvaluatedAtZeta.Add(&pEvaluatedAtZeta, &bpEvaluatedAtZeta) + + // Return the result + return pEvaluatedAtZeta +} + +// /!\ modifies p's underlying array of coefficients, in particular the size changes +func getBlindedCoefficients(p, bp *iop.Polynomial) []fr.Element { + cp := p.Coefficients() + cbp := bp.Coefficients() + cp = append(cp, cbp...) + for i := 0; i < len(cbp); i++ { + cp[i].Sub(&cp[i], &cbp[i]) + } + return cp +} + +func deviceCommitBlindingFactor(n int, b *iop.Polynomial, pk *ProvingKey) curve.G1Affine { + // scalars + cp := b.Coefficients() + np := b.Size() + + // get slice of points from unsafe pointer + resPtr := unsafe.Pointer(uintptr(unsafe.Pointer(pk.deviceInfo.G1Device.G1)) + uintptr(n)*unsafe.Sizeof(curve.G1Affine{})) + + // Initialize channels + copyCpDone := make(chan unsafe.Pointer, 1) + cpDeviceData := make(chan iciclegnark.OnDeviceData, 1) + + // Start asynchronous routine + go func() { + + // tbd mul * 2 + sizeBytesScalars := np * fr.Bytes + + // Perform copy operation + iciclegnark.CopyToDevice(cp, sizeBytesScalars, copyCpDone) + + // Receive result once copy operation is done + cpDevice := <-copyCpDone + + // Create OnDeviceData + cpDeviceValue := iciclegnark.OnDeviceData{ + P: cpDevice, + Size: sizeBytesScalars, + } + + // Send OnDeviceData to respective channel + cpDeviceData <- cpDeviceValue + + // Close channels + close(copyCpDone) + close(cpDeviceData) + }() + + // Wait for copy operation to finish + cpDeviceValue := <-cpDeviceData + + var wg sync.WaitGroup + + // Calculate(lo) commitment on Device + wg.Add(1) + tmpChan := make(chan curve.G1Affine, 1) + go func() { + defer wg.Done() + tmpVal, _, err := iciclegnark.MsmOnDevice(cpDeviceValue.P, pk.deviceInfo.G1Device.G1, np, true) + if err != nil { + fmt.Print("Error", err) + } + var tmp curve.G1Affine + tmp.FromJacobian(&tmpVal) + tmpChan <- tmp + }() + wg.Wait() + + tmpAffinePoint := <-tmpChan + + // Calculate(hi) commitment on Device + wg.Add(1) + resChan := make(chan curve.G1Affine, 1) + go func() { + defer wg.Done() + + resVal, _, err := iciclegnark.MsmOnDevice(cpDeviceValue.P, resPtr, np, true) + if err != nil { + fmt.Print("Error", err) + } + var res curve.G1Affine + res.FromJacobian(&resVal) + + resChan <- res + }() + wg.Wait() + resAffinePoint := <-resChan + + // Sub(lo, hi) to get the final commitment + resAffinePoint.Sub(&resAffinePoint, &tmpAffinePoint) + + // Free device memory + go func() { + iciclegnark.FreeDevicePointer(unsafe.Pointer(&cpDeviceValue)) + }() + + return resAffinePoint +} + +// return a random polynomial of degree n, if n==-1 cancel the blinding +func getRandomPolynomial(n int) *iop.Polynomial { + var a []fr.Element + if n == -1 { + a := make([]fr.Element, 1) + a[0].SetZero() + } else { + a = make([]fr.Element, n+1) + for i := 0; i <= n; i++ { + a[i].SetRandom() + } + } + res := iop.NewPolynomial(&a, iop.Form{ + Basis: iop.Canonical, Layout: iop.Regular}) + return res +} + +func coefficients(p []*iop.Polynomial) [][]fr.Element { + res := make([][]fr.Element, len(p)) + for i, pI := range p { + res[i] = pI.Coefficients() + } + return res +} + +func commitToQuotient(h1, h2, h3 []fr.Element, proof *Proof, pk *ProvingKey) error { + g := new(errgroup.Group) + + g.Go(func() (err error) { + proof.H[0], err = kzgDeviceCommit(h1, pk.deviceInfo.G1Device.G1) + return + }) + + g.Go(func() (err error) { + proof.H[1], err = kzgDeviceCommit(h2, pk.deviceInfo.G1Device.G1) + return + }) + + g.Go(func() (err error) { + proof.H[2], err = kzgDeviceCommit(h3, pk.deviceInfo.G1Device.G1) + return + }) + + return g.Wait() +} + +// divideByXMinusOne +// The input must be in LagrangeCoset. +// The result is in Canonical Regular. (in place using a) +func divideByXMinusOne(a *iop.Polynomial, domains [2]*fft.Domain) (*iop.Polynomial, error) { + + // check that the basis is LagrangeCoset + if a.Basis != iop.LagrangeCoset || a.Layout != iop.BitReverse { + return nil, errors.New("invalid form") + } + + // prepare the evaluations of x^n-1 on the big domain's coset + xnMinusOneInverseLagrangeCoset := evaluateXnMinusOneDomainBigCoset(domains) + rho := int(domains[1].Cardinality / domains[0].Cardinality) + + r := a.Coefficients() + n := uint64(len(r)) + nn := uint64(64 - bits.TrailingZeros64(n)) + + utils.Parallelize(len(r), func(start, end int) { + for i := start; i < end; i++ { + iRev := bits.Reverse64(uint64(i)) >> nn + r[i].Mul(&r[i], &xnMinusOneInverseLagrangeCoset[int(iRev)%rho]) + } + }) + + // since a is in bit reverse order, ToRegular shouldn't do anything + a.ToCanonical(domains[1]).ToRegular() + + return a, nil + +} + +// evaluateXnMinusOneDomainBigCoset evaluates Xᵐ-1 on DomainBig coset +func evaluateXnMinusOneDomainBigCoset(domains [2]*fft.Domain) []fr.Element { + + rho := domains[1].Cardinality / domains[0].Cardinality + + res := make([]fr.Element, rho) + + expo := big.NewInt(int64(domains[0].Cardinality)) + res[0].Exp(domains[1].FrMultiplicativeGen, expo) + + var t fr.Element + t.Exp(domains[1].Generator, big.NewInt(int64(domains[0].Cardinality))) + + one := fr.One() + + for i := 1; i < int(rho); i++ { + res[i].Mul(&res[i-1], &t) + res[i-1].Sub(&res[i-1], &one) + } + res[len(res)-1].Sub(&res[len(res)-1], &one) + + res = fr.BatchInvert(res) + + return res +} + +// computeLinearizedPolynomial computes the linearized polynomial in canonical basis. +// The purpose is to commit and open all in one ql, qr, qm, qo, qk. +// * lZeta, rZeta, oZeta are the evaluation of l, r, o at zeta +// * z is the permutation polynomial, zu is Z(μX), the shifted version of Z +// * pk is the proving key: the linearized polynomial is a linear combination of ql, qr, qm, qo, qk. +// +// The Linearized polynomial is: +// +// α²*L₁(ζ)*Z(X) +// + α*( (l(ζ)+β*s1(ζ)+γ)*(r(ζ)+β*s2(ζ)+γ)*Z(μζ)*s3(X) - Z(X)*(l(ζ)+β*id1(ζ)+γ)*(r(ζ)+β*id2(ζ)+γ)*(o(ζ)+β*id3(ζ)+γ)) +// + l(ζ)*Ql(X) + l(ζ)r(ζ)*Qm(X) + r(ζ)*Qr(X) + o(ζ)*Qo(X) + Qk(X) +func computeLinearizedPolynomial(lZeta, rZeta, oZeta, alpha, beta, gamma, zeta, zu fr.Element, qcpZeta, blindedZCanonical []fr.Element, pi2Canonical [][]fr.Element, pk *ProvingKey) []fr.Element { + + // first part: individual constraints + var rl fr.Element + rl.Mul(&rZeta, &lZeta) + + // second part: + // Z(μζ)(l(ζ)+β*s1(ζ)+γ)*(r(ζ)+β*s2(ζ)+γ)*β*s3(X)-Z(X)(l(ζ)+β*id1(ζ)+γ)*(r(ζ)+β*id2(ζ)+γ)*(o(ζ)+β*id3(ζ)+γ) + var s1, s2 fr.Element + chS1 := make(chan struct{}, 1) + go func() { + s1 = pk.trace.S1.Evaluate(zeta) // s1(ζ) + s1.Mul(&s1, &beta).Add(&s1, &lZeta).Add(&s1, &gamma) // (l(ζ)+β*s1(ζ)+γ) + close(chS1) + }() + // ps2 := iop.NewPolynomial(&pk.S2Canonical, iop.Form{Basis: iop.Canonical, Layout: iop.Regular}) + tmp := pk.trace.S2.Evaluate(zeta) // s2(ζ) + tmp.Mul(&tmp, &beta).Add(&tmp, &rZeta).Add(&tmp, &gamma) // (r(ζ)+β*s2(ζ)+γ) + <-chS1 + s1.Mul(&s1, &tmp).Mul(&s1, &zu).Mul(&s1, &beta) // (l(ζ)+β*s1(β)+γ)*(r(ζ)+β*s2(β)+γ)*β*Z(μζ) + + var uzeta, uuzeta fr.Element + uzeta.Mul(&zeta, &pk.Vk.CosetShift) + uuzeta.Mul(&uzeta, &pk.Vk.CosetShift) + + s2.Mul(&beta, &zeta).Add(&s2, &lZeta).Add(&s2, &gamma) // (l(ζ)+β*ζ+γ) + tmp.Mul(&beta, &uzeta).Add(&tmp, &rZeta).Add(&tmp, &gamma) // (r(ζ)+β*u*ζ+γ) + s2.Mul(&s2, &tmp) // (l(ζ)+β*ζ+γ)*(r(ζ)+β*u*ζ+γ) + tmp.Mul(&beta, &uuzeta).Add(&tmp, &oZeta).Add(&tmp, &gamma) // (o(ζ)+β*u²*ζ+γ) + s2.Mul(&s2, &tmp) // (l(ζ)+β*ζ+γ)*(r(ζ)+β*u*ζ+γ)*(o(ζ)+β*u²*ζ+γ) + s2.Neg(&s2) // -(l(ζ)+β*ζ+γ)*(r(ζ)+β*u*ζ+γ)*(o(ζ)+β*u²*ζ+γ) + + // third part L₁(ζ)*α²*Z + var lagrangeZeta, one, den, frNbElmt fr.Element + one.SetOne() + nbElmt := int64(pk.Domain[0].Cardinality) + lagrangeZeta.Set(&zeta). + Exp(lagrangeZeta, big.NewInt(nbElmt)). + Sub(&lagrangeZeta, &one) + frNbElmt.SetUint64(uint64(nbElmt)) + den.Sub(&zeta, &one). + Inverse(&den) + lagrangeZeta.Mul(&lagrangeZeta, &den). // L₁ = (ζⁿ⁻¹)/(ζ-1) + Mul(&lagrangeZeta, &alpha). + Mul(&lagrangeZeta, &alpha). + Mul(&lagrangeZeta, &pk.Domain[0].CardinalityInv) // (1/n)*α²*L₁(ζ) + + s3canonical := pk.trace.S3.Coefficients() + + utils.Parallelize(len(blindedZCanonical), func(start, end int) { + + cql := pk.trace.Ql.Coefficients() + cqr := pk.trace.Qr.Coefficients() + cqm := pk.trace.Qm.Coefficients() + cqo := pk.trace.Qo.Coefficients() + cqk := pk.trace.Qk.Coefficients() + + var t, t0, t1 fr.Element + + for i := start; i < end; i++ { + + t.Mul(&blindedZCanonical[i], &s2) // -Z(X)*(l(ζ)+β*ζ+γ)*(r(ζ)+β*u*ζ+γ)*(o(ζ)+β*u²*ζ+γ) + + if i < len(s3canonical) { + + t0.Mul(&s3canonical[i], &s1) // (l(ζ)+β*s1(ζ)+γ)*(r(ζ)+β*s2(ζ)+γ)*Z(μζ)*β*s3(X) + + t.Add(&t, &t0) + } + + t.Mul(&t, &alpha) // α*( (l(ζ)+β*s1(ζ)+γ)*(r(ζ)+β*s2(ζ)+γ)*Z(μζ)*s3(X) - Z(X)*(l(ζ)+β*ζ+γ)*(r(ζ)+β*u*ζ+γ)*(o(ζ)+β*u²*ζ+γ)) + + if i < len(cqm) { + + t1.Mul(&cqm[i], &rl) // linPol = linPol + l(ζ)r(ζ)*Qm(X) + + t0.Mul(&cql[i], &lZeta) + t0.Add(&t0, &t1) + + t.Add(&t, &t0) // linPol = linPol + l(ζ)*Ql(X) + + t0.Mul(&cqr[i], &rZeta) + t.Add(&t, &t0) // linPol = linPol + r(ζ)*Qr(X) + + t0.Mul(&cqo[i], &oZeta) + t0.Add(&t0, &cqk[i]) + + t.Add(&t, &t0) // linPol = linPol + o(ζ)*Qo(X) + Qk(X) + + for j := range qcpZeta { + t0.Mul(&pi2Canonical[j][i], &qcpZeta[j]) + t.Add(&t, &t0) + } + } + + t0.Mul(&blindedZCanonical[i], &lagrangeZeta) + blindedZCanonical[i].Add(&t, &t0) // finish the computation + } + }) + return blindedZCanonical +} + +var errContextDone = errors.New("context done") diff --git a/backend/plonk/bn254/icicle/setup.go b/backend/plonk/bn254/icicle/setup.go new file mode 100644 index 0000000000..c46ba58794 --- /dev/null +++ b/backend/plonk/bn254/icicle/setup.go @@ -0,0 +1,410 @@ +package icicle_bn254 + +import ( + "errors" + "fmt" + "unsafe" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/iop" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "github.com/consensys/gnark/backend/plonk/internal" + "github.com/consensys/gnark/constraint" + cs "github.com/consensys/gnark/constraint/bn254" +) + +// VerifyingKey stores the data needed to verify a proof: +// * The commitment scheme +// * Commitments of ql prepended with as many ones as there are public inputs +// * Commitments of qr, qm, qo, qk prepended with as many zeroes as there are public inputs +// * Commitments to S1, S2, S3 +type VerifyingKey struct { + // Size circuit + Size uint64 + SizeInv fr.Element + Generator fr.Element + NbPublicVariables uint64 + + // Commitment scheme that is used for an instantiation of PLONK + Kzg kzg.VerifyingKey + + // cosetShift generator of the coset on the small domain + CosetShift fr.Element + + // S commitments to S1, S2, S3 + S [3]kzg.Digest + + // Commitments to ql, qr, qm, qo, qcp prepended with as many zeroes (ones for l) as there are public inputs. + // In particular Qk is not complete. + Ql, Qr, Qm, Qo, Qk kzg.Digest + Qcp []kzg.Digest + + CommitmentConstraintIndexes []uint64 +} + +// Trace stores a plonk trace as columns +type Trace struct { + // Constants describing a plonk circuit. The first entries + // of LQk (whose index correspond to the public inputs) are set to 0, and are to be + // completed by the prover. At those indices i (so from 0 to nb_public_variables), LQl[i]=-1 + // so the first nb_public_variables constraints look like this: + // -1*Wire[i] + 0* + 0 . It is zero when the constant coefficient is replaced by Wire[i]. + Ql, Qr, Qm, Qo, Qk *iop.Polynomial + Qcp []*iop.Polynomial + + // Polynomials representing the splitted permutation. The full permutation's support is 3*N where N=nb wires. + // The set of interpolation is of size N, so to represent the permutation S we let S acts on the + // set A=(, u*, u^{2}*) of size 3*N, where u is outside (its use is to shift the set ). + // We obtain a permutation of A, A'. We split A' in 3 (A'_{1}, A'_{2}, A'_{3}), and S1, S2, S3 are + // respectively the interpolation of A'_{1}, A'_{2}, A'_{3} on . + S1, S2, S3 *iop.Polynomial + + // S full permutation, i -> S[i] + S []int64 +} + +// ProvingKey stores the data needed to generate a proof: +// * the commitment scheme +// * ql, prepended with as many ones as they are public inputs +// * qr, qm, qo prepended with as many zeroes as there are public inputs. +// * qk, prepended with as many zeroes as public inputs, to be completed by the prover +// with the list of public inputs. +// * sigma_1, sigma_2, sigma_3 in both basis +// * the copy constraint permutation +type ProvingKey struct { + // stores ql, qr, qm, qo, qk (-> to be completed by the prover) + // and s1, s2, s3. They are set in canonical basis before generating the proof, they will be used + // for computing the opening proofs (hence the canonical form). The canonical version + // of qk incomplete is used in the linearisation polynomial. + // The polynomials in trace are in canonical basis. + trace Trace + + Kzg, KzgLagrange kzg.ProvingKey + + // Verifying Key is embedded into the proving key (needed by Prove) + Vk *VerifyingKey + + // Domains used for the FFTs. + // Domain[0] = small Domain + // Domain[1] = big Domain + Domain [2]fft.Domain + + deviceInfo *deviceInfo +} + +type deviceInfo struct { + G1Device struct { + G1 unsafe.Pointer + G1Lagrange unsafe.Pointer + } + DomainDevice struct { + Twiddles, TwiddlesInv unsafe.Pointer + CosetTable, CosetTableInv unsafe.Pointer + } + DenDevice unsafe.Pointer + InfinityPointIndicesK []int +} + +// TODO modify the signature to receive the SRS in Lagrange form (optional argument ?) +func Setup(spr *cs.SparseR1CS, kzgSrs kzg.SRS) (*ProvingKey, *VerifyingKey, error) { + + var pk ProvingKey + var vk VerifyingKey + pk.Vk = &vk + vk.CommitmentConstraintIndexes = internal.IntSliceToUint64Slice(spr.CommitmentInfo.CommitmentIndexes()) + + pk.deviceInfo = nil + + // step 0: set the fft domains + pk.initDomains(spr) + if pk.Domain[0].Cardinality < 2 { + return nil, nil, fmt.Errorf("circuit has only %d constraints; unsupported by the current implementation", spr.GetNbConstraints()) + } + + // step 1: set the verifying key + pk.Vk.CosetShift.Set(&pk.Domain[0].FrMultiplicativeGen) + vk.Size = pk.Domain[0].Cardinality + vk.SizeInv.SetUint64(vk.Size).Inverse(&vk.SizeInv) + vk.Generator.Set(&pk.Domain[0].Generator) + vk.NbPublicVariables = uint64(len(spr.Public)) + if len(kzgSrs.Pk.G1) < int(vk.Size)+3 { // + 3 for the kzg.Open of blinded poly + return nil, nil, errors.New("kzg srs is too small") + } + pk.Kzg.G1 = kzgSrs.Pk.G1[:int(vk.Size)+3] + var err error + pk.KzgLagrange.G1, err = kzg.ToLagrangeG1(kzgSrs.Pk.G1[:int(vk.Size)]) + if err != nil { + return nil, nil, err + } + vk.Kzg = kzgSrs.Vk + + // step 2: ql, qr, qm, qo, qk, qcp in Lagrange Basis + BuildTrace(spr, &pk.trace) + + // step 3: build the permutation and build the polynomials S1, S2, S3 to encode the permutation. + // Note: at this stage, the permutation takes in account the placeholders + nbVariables := spr.NbInternalVariables + len(spr.Public) + len(spr.Secret) + buildPermutation(spr, &pk.trace, nbVariables) + s := computePermutationPolynomials(&pk.trace, &pk.Domain[0]) + pk.trace.S1 = s[0] + pk.trace.S2 = s[1] + pk.trace.S3 = s[2] + + // step 4: commit to s1, s2, s3, ql, qr, qm, qo, and (the incomplete version of) qk. + // All the above polynomials are expressed in canonical basis afterwards. This is why + // we save lqk before, because the prover needs to complete it in Lagrange form, and + // then express it on the Lagrange coset basis. + if err = commitTrace(&pk.trace, &pk); err != nil { + return nil, nil, err + } + + return &pk, &vk, nil +} + +// NbPublicWitness returns the expected public witness size (number of field elements) +func (vk *VerifyingKey) NbPublicWitness() int { + return int(vk.NbPublicVariables) +} + +// VerifyingKey returns pk.Vk +func (pk *ProvingKey) VerifyingKey() interface{} { + return pk.Vk +} + +// BuildTrace fills the constant columns ql, qr, qm, qo, qk from the sparser1cs. +// Size is the size of the system that is nb_constraints+nb_public_variables +func BuildTrace(spr *cs.SparseR1CS, pt *Trace) { + + nbConstraints := spr.GetNbConstraints() + sizeSystem := uint64(nbConstraints + len(spr.Public)) + size := ecc.NextPowerOfTwo(sizeSystem) + commitmentInfo := spr.CommitmentInfo.(constraint.PlonkCommitments) + + ql := make([]fr.Element, size) + qr := make([]fr.Element, size) + qm := make([]fr.Element, size) + qo := make([]fr.Element, size) + qk := make([]fr.Element, size) + qcp := make([][]fr.Element, len(commitmentInfo)) + + for i := 0; i < len(spr.Public); i++ { // placeholders (-PUB_INPUT_i + qk_i = 0) TODO should return error if size is inconsistent + ql[i].SetOne().Neg(&ql[i]) + qr[i].SetZero() + qm[i].SetZero() + qo[i].SetZero() + qk[i].SetZero() // → to be completed by the prover + } + offset := len(spr.Public) + + j := 0 + it := spr.GetSparseR1CIterator() + for c := it.Next(); c != nil; c = it.Next() { + ql[offset+j].Set(&spr.Coefficients[c.QL]) + qr[offset+j].Set(&spr.Coefficients[c.QR]) + qm[offset+j].Set(&spr.Coefficients[c.QM]) + qo[offset+j].Set(&spr.Coefficients[c.QO]) + qk[offset+j].Set(&spr.Coefficients[c.QC]) + j++ + } + + lagReg := iop.Form{Basis: iop.Lagrange, Layout: iop.Regular} + + pt.Ql = iop.NewPolynomial(&ql, lagReg) + pt.Qr = iop.NewPolynomial(&qr, lagReg) + pt.Qm = iop.NewPolynomial(&qm, lagReg) + pt.Qo = iop.NewPolynomial(&qo, lagReg) + pt.Qk = iop.NewPolynomial(&qk, lagReg) + pt.Qcp = make([]*iop.Polynomial, len(qcp)) + + for i := range commitmentInfo { + qcp[i] = make([]fr.Element, size) + for _, committed := range commitmentInfo[i].Committed { + qcp[i][offset+committed].SetOne() + } + pt.Qcp[i] = iop.NewPolynomial(&qcp[i], lagReg) + } +} + +// commitTrace commits to every polynomial in the trace, and put +// the commitments int the verifying key. +func commitTrace(trace *Trace, pk *ProvingKey) error { + + trace.Ql.ToCanonical(&pk.Domain[0]).ToRegular() + trace.Qr.ToCanonical(&pk.Domain[0]).ToRegular() + trace.Qm.ToCanonical(&pk.Domain[0]).ToRegular() + trace.Qo.ToCanonical(&pk.Domain[0]).ToRegular() + trace.Qk.ToCanonical(&pk.Domain[0]).ToRegular() // -> qk is not complete + trace.S1.ToCanonical(&pk.Domain[0]).ToRegular() + trace.S2.ToCanonical(&pk.Domain[0]).ToRegular() + trace.S3.ToCanonical(&pk.Domain[0]).ToRegular() + + var err error + pk.Vk.Qcp = make([]kzg.Digest, len(trace.Qcp)) + for i := range trace.Qcp { + trace.Qcp[i].ToCanonical(&pk.Domain[0]).ToRegular() + if pk.Vk.Qcp[i], err = kzg.Commit(pk.trace.Qcp[i].Coefficients(), pk.Kzg); err != nil { + return err + } + } + if pk.Vk.Ql, err = kzg.Commit(pk.trace.Ql.Coefficients(), pk.Kzg); err != nil { + return err + } + if pk.Vk.Qr, err = kzg.Commit(pk.trace.Qr.Coefficients(), pk.Kzg); err != nil { + return err + } + if pk.Vk.Qm, err = kzg.Commit(pk.trace.Qm.Coefficients(), pk.Kzg); err != nil { + return err + } + if pk.Vk.Qo, err = kzg.Commit(pk.trace.Qo.Coefficients(), pk.Kzg); err != nil { + return err + } + if pk.Vk.Qk, err = kzg.Commit(pk.trace.Qk.Coefficients(), pk.Kzg); err != nil { + return err + } + if pk.Vk.S[0], err = kzg.Commit(pk.trace.S1.Coefficients(), pk.Kzg); err != nil { + return err + } + if pk.Vk.S[1], err = kzg.Commit(pk.trace.S2.Coefficients(), pk.Kzg); err != nil { + return err + } + if pk.Vk.S[2], err = kzg.Commit(pk.trace.S3.Coefficients(), pk.Kzg); err != nil { + return err + } + return nil +} + +func (pk *ProvingKey) initDomains(spr *cs.SparseR1CS) { + + nbConstraints := spr.GetNbConstraints() + sizeSystem := uint64(nbConstraints + len(spr.Public)) // len(spr.Public) is for the placeholder constraints + pk.Domain[0] = *fft.NewDomain(sizeSystem) + + // h, the quotient polynomial is of degree 3(n+1)+2, so it's in a 3(n+2) dim vector space, + // the domain is the next power of 2 superior to 3(n+2). 4*domainNum is enough in all cases + // except when n<6. + if sizeSystem < 6 { + pk.Domain[1] = *fft.NewDomain(8 * sizeSystem) + } else { + pk.Domain[1] = *fft.NewDomain(4 * sizeSystem) + } + +} + +// buildPermutation builds the Permutation associated with a circuit. +// +// The permutation s is composed of cycles of maximum length such that +// +// s. (l∥r∥o) = (l∥r∥o) +// +// , where l∥r∥o is the concatenation of the indices of l, r, o in +// ql.l+qr.r+qm.l.r+qo.O+k = 0. +// +// The permutation is encoded as a slice s of size 3*size(l), where the +// i-th entry of l∥r∥o is sent to the s[i]-th entry, so it acts on a tab +// like this: for i in tab: tab[i] = tab[permutation[i]] +func buildPermutation(spr *cs.SparseR1CS, pt *Trace, nbVariables int) { + + // nbVariables := spr.NbInternalVariables + len(spr.Public) + len(spr.Secret) + sizeSolution := len(pt.Ql.Coefficients()) + sizePermutation := 3 * sizeSolution + + // init permutation + permutation := make([]int64, sizePermutation) + for i := 0; i < len(permutation); i++ { + permutation[i] = -1 + } + + // init LRO position -> variable_ID + lro := make([]int, sizePermutation) // position -> variable_ID + for i := 0; i < len(spr.Public); i++ { + lro[i] = i // IDs of LRO associated to placeholders (only L needs to be taken care of) + } + + offset := len(spr.Public) + + j := 0 + it := spr.GetSparseR1CIterator() + for c := it.Next(); c != nil; c = it.Next() { + lro[offset+j] = int(c.XA) + lro[sizeSolution+offset+j] = int(c.XB) + lro[2*sizeSolution+offset+j] = int(c.XC) + + j++ + } + + // init cycle: + // map ID -> last position the ID was seen + cycle := make([]int64, nbVariables) + for i := 0; i < len(cycle); i++ { + cycle[i] = -1 + } + + for i := 0; i < len(lro); i++ { + if cycle[lro[i]] != -1 { + // if != -1, it means we already encountered this value + // so we need to set the corresponding permutation index. + permutation[i] = cycle[lro[i]] + } + cycle[lro[i]] = int64(i) + } + + // complete the Permutation by filling the first IDs encountered + for i := 0; i < sizePermutation; i++ { + if permutation[i] == -1 { + permutation[i] = cycle[lro[i]] + } + } + + pt.S = permutation +} + +// computePermutationPolynomials computes the LDE (Lagrange basis) of the permutation. +// We let the permutation act on || u || u^{2}, split the result in 3 parts, +// and interpolate each of the 3 parts on . +func computePermutationPolynomials(pt *Trace, domain *fft.Domain) [3]*iop.Polynomial { + + nbElmts := int(domain.Cardinality) + + var res [3]*iop.Polynomial + + // Lagrange form of ID + evaluationIDSmallDomain := getSupportPermutation(domain) + + // Lagrange form of S1, S2, S3 + s1Canonical := make([]fr.Element, nbElmts) + s2Canonical := make([]fr.Element, nbElmts) + s3Canonical := make([]fr.Element, nbElmts) + for i := 0; i < nbElmts; i++ { + s1Canonical[i].Set(&evaluationIDSmallDomain[pt.S[i]]) + s2Canonical[i].Set(&evaluationIDSmallDomain[pt.S[nbElmts+i]]) + s3Canonical[i].Set(&evaluationIDSmallDomain[pt.S[2*nbElmts+i]]) + } + + lagReg := iop.Form{Basis: iop.Lagrange, Layout: iop.Regular} + res[0] = iop.NewPolynomial(&s1Canonical, lagReg) + res[1] = iop.NewPolynomial(&s2Canonical, lagReg) + res[2] = iop.NewPolynomial(&s3Canonical, lagReg) + + return res +} + +// getSupportPermutation returns the support on which the permutation acts, it is +// || u || u^{2} +func getSupportPermutation(domain *fft.Domain) []fr.Element { + + res := make([]fr.Element, 3*domain.Cardinality) + + res[0].SetOne() + res[domain.Cardinality].Set(&domain.FrMultiplicativeGen) + res[2*domain.Cardinality].Square(&domain.FrMultiplicativeGen) + + for i := uint64(1); i < domain.Cardinality; i++ { + res[i].Mul(&res[i-1], &domain.Generator) + res[domain.Cardinality+i].Mul(&res[domain.Cardinality+i-1], &domain.Generator) + res[2*domain.Cardinality+i].Mul(&res[2*domain.Cardinality+i-1], &domain.Generator) + } + + return res +} diff --git a/backend/plonk/bn254/icicle/solidity.go b/backend/plonk/bn254/icicle/solidity.go new file mode 100644 index 0000000000..22b7f36454 --- /dev/null +++ b/backend/plonk/bn254/icicle/solidity.go @@ -0,0 +1,1420 @@ +package icicle_bn254 + +const tmplSolidityVerifier = `// SPDX-License-Identifier: Apache-2.0 + +// Copyright 2023 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by gnark DO NOT EDIT + +pragma solidity ^0.8.19; + +contract PlonkVerifier { + + uint256 private constant R_MOD = 21888242871839275222246405745257275088548364400416034343698204186575808495617; + uint256 private constant P_MOD = 21888242871839275222246405745257275088696311157297823662689037894645226208583; + {{ range $index, $element := .Kzg.G2 }} + uint256 private constant G2_SRS_{{ $index }}_X_0 = {{ (fpstr $element.X.A1) }}; + uint256 private constant G2_SRS_{{ $index }}_X_1 = {{ (fpstr $element.X.A0) }}; + uint256 private constant G2_SRS_{{ $index }}_Y_0 = {{ (fpstr $element.Y.A1) }}; + uint256 private constant G2_SRS_{{ $index }}_Y_1 = {{ (fpstr $element.Y.A0) }}; + {{ end }} + uint256 private constant G1_SRS_X = {{ fpstr .Kzg.G1.X }}; + uint256 private constant G1_SRS_Y = {{ fpstr .Kzg.G1.Y }}; + + // ----------------------- vk --------------------- + uint256 private constant VK_NB_PUBLIC_INPUTS = {{ .NbPublicVariables }}; + uint256 private constant VK_DOMAIN_SIZE = {{ .Size }}; + uint256 private constant VK_INV_DOMAIN_SIZE = {{ (frstr .SizeInv) }}; + uint256 private constant VK_OMEGA = {{ (frstr .Generator) }}; + uint256 private constant VK_QL_COM_X = {{ (fpstr .Ql.X) }}; + uint256 private constant VK_QL_COM_Y = {{ (fpstr .Ql.Y) }}; + uint256 private constant VK_QR_COM_X = {{ (fpstr .Qr.X) }}; + uint256 private constant VK_QR_COM_Y = {{ (fpstr .Qr.Y) }}; + uint256 private constant VK_QM_COM_X = {{ (fpstr .Qm.X) }}; + uint256 private constant VK_QM_COM_Y = {{ (fpstr .Qm.Y) }}; + uint256 private constant VK_QO_COM_X = {{ (fpstr .Qo.X) }}; + uint256 private constant VK_QO_COM_Y = {{ (fpstr .Qo.Y) }}; + uint256 private constant VK_QK_COM_X = {{ (fpstr .Qk.X) }}; + uint256 private constant VK_QK_COM_Y = {{ (fpstr .Qk.Y) }}; + {{ range $index, $element := .S }} + uint256 private constant VK_S{{ inc $index }}_COM_X = {{ (fpstr $element.X) }}; + uint256 private constant VK_S{{ inc $index }}_COM_Y = {{ (fpstr $element.Y) }}; + {{ end }} + uint256 private constant VK_COSET_SHIFT = 5; + + {{ range $index, $element := .Qcp}} + uint256 private constant VK_QCP_{{ $index }}_X = {{ (fpstr $element.X) }}; + uint256 private constant VK_QCP_{{ $index }}_Y = {{ (fpstr $element.Y) }}; + {{ end }} + + {{ range $index, $element := .CommitmentConstraintIndexes -}} + uint256 private constant VK_INDEX_COMMIT_API{{ $index }} = {{ $element }}; + {{ end -}} + uint256 private constant VK_NB_CUSTOM_GATES = {{ len .CommitmentConstraintIndexes }}; + + // ------------------------------------------------ + + // offset proof + uint256 private constant PROOF_L_COM_X = 0x00; + uint256 private constant PROOF_L_COM_Y = 0x20; + uint256 private constant PROOF_R_COM_X = 0x40; + uint256 private constant PROOF_R_COM_Y = 0x60; + uint256 private constant PROOF_O_COM_X = 0x80; + uint256 private constant PROOF_O_COM_Y = 0xa0; + + // h = h_0 + x^{n+2}h_1 + x^{2(n+2)}h_2 + uint256 private constant PROOF_H_0_X = 0xc0; + uint256 private constant PROOF_H_0_Y = 0xe0; + uint256 private constant PROOF_H_1_X = 0x100; + uint256 private constant PROOF_H_1_Y = 0x120; + uint256 private constant PROOF_H_2_X = 0x140; + uint256 private constant PROOF_H_2_Y = 0x160; + + // wire values at zeta + uint256 private constant PROOF_L_AT_ZETA = 0x180; + uint256 private constant PROOF_R_AT_ZETA = 0x1a0; + uint256 private constant PROOF_O_AT_ZETA = 0x1c0; + + //uint256[STATE_WIDTH-1] permutation_polynomials_at_zeta; // Sσ1(zeta),Sσ2(zeta) + uint256 private constant PROOF_S1_AT_ZETA = 0x1e0; // Sσ1(zeta) + uint256 private constant PROOF_S2_AT_ZETA = 0x200; // Sσ2(zeta) + + //Bn254.G1Point grand_product_commitment; // [z(x)] + uint256 private constant PROOF_GRAND_PRODUCT_COMMITMENT_X = 0x220; + uint256 private constant PROOF_GRAND_PRODUCT_COMMITMENT_Y = 0x240; + + uint256 private constant PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA = 0x260; // z(w*zeta) + uint256 private constant PROOF_QUOTIENT_POLYNOMIAL_AT_ZETA = 0x280; // t(zeta) + uint256 private constant PROOF_LINEARISED_POLYNOMIAL_AT_ZETA = 0x2a0; // r(zeta) + + // Folded proof for the opening of H, linearised poly, l, r, o, s_1, s_2, qcp + uint256 private constant PROOF_BATCH_OPENING_AT_ZETA_X = 0x2c0; // [Wzeta] + uint256 private constant PROOF_BATCH_OPENING_AT_ZETA_Y = 0x2e0; + + uint256 private constant PROOF_OPENING_AT_ZETA_OMEGA_X = 0x300; + uint256 private constant PROOF_OPENING_AT_ZETA_OMEGA_Y = 0x320; + + uint256 private constant PROOF_OPENING_QCP_AT_ZETA = 0x340; + uint256 private constant PROOF_COMMITMENTS_WIRES_CUSTOM_GATES = {{ hex (add 832 (mul (len .CommitmentConstraintIndexes) 32 ) )}}; + + // -> next part of proof is + // [ openings_selector_commits || commitments_wires_commit_api] + + // -------- offset state + + // challenges to check the claimed quotient + uint256 private constant STATE_ALPHA = 0x00; + uint256 private constant STATE_BETA = 0x20; + uint256 private constant STATE_GAMMA = 0x40; + uint256 private constant STATE_ZETA = 0x60; + + // reusable value + uint256 private constant STATE_ALPHA_SQUARE_LAGRANGE_0 = 0x80; + + // commitment to H + uint256 private constant STATE_FOLDED_H_X = 0xa0; + uint256 private constant STATE_FOLDED_H_Y = 0xc0; + + // commitment to the linearised polynomial + uint256 private constant STATE_LINEARISED_POLYNOMIAL_X = 0xe0; + uint256 private constant STATE_LINEARISED_POLYNOMIAL_Y = 0x100; + + // Folded proof for the opening of H, linearised poly, l, r, o, s_1, s_2, qcp + uint256 private constant STATE_FOLDED_CLAIMED_VALUES = 0x120; + + // folded digests of H, linearised poly, l, r, o, s_1, s_2, qcp + uint256 private constant STATE_FOLDED_DIGESTS_X = 0x140; + uint256 private constant STATE_FOLDED_DIGESTS_Y = 0x160; + + uint256 private constant STATE_PI = 0x180; + + uint256 private constant STATE_ZETA_POWER_N_MINUS_ONE = 0x1a0; + + uint256 private constant STATE_GAMMA_KZG = 0x1c0; + + uint256 private constant STATE_SUCCESS = 0x1e0; + uint256 private constant STATE_CHECK_VAR = 0x200; // /!\ this slot is used for debugging only + + uint256 private constant STATE_LAST_MEM = 0x220; + + // -------- errors + uint256 private constant ERROR_STRING_ID = 0x08c379a000000000000000000000000000000000000000000000000000000000; // selector for function Error(string) + + {{ if (gt (len .CommitmentConstraintIndexes) 0 )}} + // -------- utils (for hash_fr) + uint256 private constant HASH_FR_BB = 340282366920938463463374607431768211456; // 2**128 + uint256 private constant HASH_FR_ZERO_UINT256 = 0; + + uint8 private constant HASH_FR_LEN_IN_BYTES = 48; + uint8 private constant HASH_FR_SIZE_DOMAIN = 11; + uint8 private constant HASH_FR_ONE = 1; + uint8 private constant HASH_FR_TWO = 2; + {{ end }} + + /// Verify a Plonk proof. + /// Reverts if the proof or the public inputs are malformed. + /// @param proof serialised plonk proof (using gnark's MarshalSolidity) + /// @param public_inputs (must be reduced) + /// @return success true if the proof passes false otherwise + function Verify(bytes calldata proof, uint256[] calldata public_inputs) + public view returns(bool success) { + + assembly { + + let mem := mload(0x40) + let freeMem := add(mem, STATE_LAST_MEM) + + // sanity checks + check_number_of_public_inputs(public_inputs.length) + check_inputs_size(public_inputs.length, public_inputs.offset) + check_proof_size(proof.length) + check_proof_openings_size(proof.offset) + + // compute the challenges + let prev_challenge_non_reduced + prev_challenge_non_reduced := derive_gamma(proof.offset, public_inputs.length, public_inputs.offset) + prev_challenge_non_reduced := derive_beta(prev_challenge_non_reduced) + prev_challenge_non_reduced := derive_alpha(proof.offset, prev_challenge_non_reduced) + derive_zeta(proof.offset, prev_challenge_non_reduced) + + // evaluation of Z=Xⁿ-1 at ζ, we save this value + let zeta := mload(add(mem, STATE_ZETA)) + let zeta_power_n_minus_one := addmod(pow(zeta, VK_DOMAIN_SIZE, freeMem), sub(R_MOD, 1), R_MOD) + mstore(add(mem, STATE_ZETA_POWER_N_MINUS_ONE), zeta_power_n_minus_one) + + // public inputs contribution + let l_pi := sum_pi_wo_api_commit(public_inputs.offset, public_inputs.length, freeMem) + {{ if (gt (len .CommitmentConstraintIndexes) 0 ) -}} + let l_wocommit := sum_pi_commit(proof.offset, public_inputs.length, freeMem) + l_pi := addmod(l_wocommit, l_pi, R_MOD) + {{ end -}} + mstore(add(mem, STATE_PI), l_pi) + + compute_alpha_square_lagrange_0() + verify_quotient_poly_eval_at_zeta(proof.offset) + fold_h(proof.offset) + compute_commitment_linearised_polynomial(proof.offset) + compute_gamma_kzg(proof.offset) + fold_state(proof.offset) + batch_verify_multi_points(proof.offset) + + success := mload(add(mem, STATE_SUCCESS)) + + // Beginning errors ------------------------------------------------- + + function error_nb_public_inputs() { + let ptError := mload(0x40) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) + mstore(add(ptError, 0x4), 0x20) + mstore(add(ptError, 0x24), 0x1d) + mstore(add(ptError, 0x44), "wrong number of public inputs") + revert(ptError, 0x64) + } + + /// Called when an operation on Bn254 fails + /// @dev for instance when calling EcMul on a point not on Bn254. + function error_ec_op() { + let ptError := mload(0x40) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) + mstore(add(ptError, 0x4), 0x20) + mstore(add(ptError, 0x24), 0x12) + mstore(add(ptError, 0x44), "error ec operation") + revert(ptError, 0x64) + } + + /// Called when one of the public inputs is not reduced. + function error_inputs_size() { + let ptError := mload(0x40) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) + mstore(add(ptError, 0x4), 0x20) + mstore(add(ptError, 0x24), 0x18) + mstore(add(ptError, 0x44), "inputs are bigger than r") + revert(ptError, 0x64) + } + + /// Called when the size proof is not as expected + /// @dev to avoid overflow attack for instance + function error_proof_size() { + let ptError := mload(0x40) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) + mstore(add(ptError, 0x4), 0x20) + mstore(add(ptError, 0x24), 0x10) + mstore(add(ptError, 0x44), "wrong proof size") + revert(ptError, 0x64) + } + + /// Called when one the openings is bigger than r + /// The openings are the claimed evalutions of a polynomial + /// in a Kzg proof. + function error_proof_openings_size() { + let ptError := mload(0x40) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) + mstore(add(ptError, 0x4), 0x20) + mstore(add(ptError, 0x24), 0x16) + mstore(add(ptError, 0x44), "openings bigger than r") + revert(ptError, 0x64) + } + + function error_verify() { + let ptError := mload(0x40) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) + mstore(add(ptError, 0x4), 0x20) + mstore(add(ptError, 0x24), 0xc) + mstore(add(ptError, 0x44), "error verify") + revert(ptError, 0x64) + } + + function error_random_generation() { + let ptError := mload(0x40) + mstore(ptError, ERROR_STRING_ID) // selector for function Error(string) + mstore(add(ptError, 0x4), 0x20) + mstore(add(ptError, 0x24), 0x14) + mstore(add(ptError, 0x44), "error random gen kzg") + revert(ptError, 0x64) + } + // end errors ------------------------------------------------- + + // Beginning checks ------------------------------------------------- + + /// @param s actual number of public inputs + function check_number_of_public_inputs(s) { + if iszero(eq(s, VK_NB_PUBLIC_INPUTS)) { + error_nb_public_inputs() + } + } + + /// Checks that the public inputs are < R_MOD. + /// @param s number of public inputs + /// @param p pointer to the public inputs array + function check_inputs_size(s, p) { + let input_checks := 1 + for {let i} lt(i, s) {i:=add(i,1)} + { + input_checks := and(input_checks,lt(calldataload(p), R_MOD)) + p := add(p, 0x20) + } + if iszero(input_checks) { + error_inputs_size() + } + } + + /// Checks if the proof is of the correct size + /// @param actual_proof_size size of the proof (not the expected size) + function check_proof_size(actual_proof_size) { + let expected_proof_size := add(0x340, mul(VK_NB_CUSTOM_GATES,0x60)) + if iszero(eq(actual_proof_size, expected_proof_size)) { + error_proof_size() + } + } + + /// Checks if the multiple openings of the polynomials are < R_MOD. + /// @param aproof pointer to the beginning of the proof + /// @dev the 'a' prepending proof is to have a local name + function check_proof_openings_size(aproof) { + + let openings_check := 1 + + // linearised polynomial at zeta + let p := add(aproof, PROOF_LINEARISED_POLYNOMIAL_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) + + // quotient polynomial at zeta + p := add(aproof, PROOF_QUOTIENT_POLYNOMIAL_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) + + // PROOF_L_AT_ZETA + p := add(aproof, PROOF_L_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) + + // PROOF_R_AT_ZETA + p := add(aproof, PROOF_R_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) + + // PROOF_O_AT_ZETA + p := add(aproof, PROOF_O_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) + + // PROOF_S1_AT_ZETA + p := add(aproof, PROOF_S1_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) + + // PROOF_S2_AT_ZETA + p := add(aproof, PROOF_S2_AT_ZETA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) + + // PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA + p := add(aproof, PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA) + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) + + // PROOF_OPENING_QCP_AT_ZETA + + p := add(aproof, PROOF_OPENING_QCP_AT_ZETA) + for {let i:=0} lt(i, VK_NB_CUSTOM_GATES) {i:=add(i,1)} + { + openings_check := and(openings_check, lt(calldataload(p), R_MOD)) + p := add(p, 0x20) + } + + if iszero(openings_check) { + error_proof_openings_size() + } + + } + // end checks ------------------------------------------------- + + // Beginning challenges ------------------------------------------------- + + /// Derive gamma as Sha256() + /// @param aproof pointer to the proof + /// @param nb_pi number of public inputs + /// @param pi pointer to the array of public inputs + /// @return the challenge gamma, not reduced + /// @notice The transcript is the concatenation (in this order) of: + /// * the word "gamma" in ascii, equal to [0x67,0x61,0x6d, 0x6d, 0x61] and encoded as a uint256. + /// * the commitments to the permutation polynomials S1, S2, S3, where we concatenate the coordinates of those points + /// * the commitments of Ql, Qr, Qm, Qo, Qk + /// * the public inputs + /// * the commitments of the wires related to the custom gates (commitments_wires_commit_api) + /// * commitments to L, R, O (proof__com_) + /// The data described above is written starting at mPtr. "gamma" lies on 5 bytes, + /// and is encoded as a uint256 number n. In basis b = 256, the number looks like this + /// [0 0 0 .. 0x67 0x61 0x6d, 0x6d, 0x61]. The first non zero entry is at position 27=0x1b + /// Gamma reduced (the actual challenge) is stored at add(state, state_gamma) + function derive_gamma(aproof, nb_pi, pi)->gamma_not_reduced { + + let state := mload(0x40) + let mPtr := add(state, STATE_LAST_MEM) + + // gamma + // gamma in ascii is [0x67,0x61,0x6d, 0x6d, 0x61] + // (same for alpha, beta, zeta) + mstore(mPtr, 0x67616d6d61) // "gamma" + + mstore(add(mPtr, 0x20), VK_S1_COM_X) + mstore(add(mPtr, 0x40), VK_S1_COM_Y) + mstore(add(mPtr, 0x60), VK_S2_COM_X) + mstore(add(mPtr, 0x80), VK_S2_COM_Y) + mstore(add(mPtr, 0xa0), VK_S3_COM_X) + mstore(add(mPtr, 0xc0), VK_S3_COM_Y) + mstore(add(mPtr, 0xe0), VK_QL_COM_X) + mstore(add(mPtr, 0x100), VK_QL_COM_Y) + mstore(add(mPtr, 0x120), VK_QR_COM_X) + mstore(add(mPtr, 0x140), VK_QR_COM_Y) + mstore(add(mPtr, 0x160), VK_QM_COM_X) + mstore(add(mPtr, 0x180), VK_QM_COM_Y) + mstore(add(mPtr, 0x1a0), VK_QO_COM_X) + mstore(add(mPtr, 0x1c0), VK_QO_COM_Y) + mstore(add(mPtr, 0x1e0), VK_QK_COM_X) + mstore(add(mPtr, 0x200), VK_QK_COM_Y) + {{ range $index, $element := .CommitmentConstraintIndexes}} + mstore(add(mPtr, {{ hex (add 544 (mul $index 64)) }}), VK_QCP_{{ $index }}_X) + mstore(add(mPtr, {{ hex (add 576 (mul $index 64)) }}), VK_QCP_{{ $index }}_Y) + {{ end }} + // public inputs + let _mPtr := add(mPtr, {{ hex (add (mul (len .CommitmentConstraintIndexes) 64) 544) }}) + let size_pi_in_bytes := mul(nb_pi, 0x20) + calldatacopy(_mPtr, pi, size_pi_in_bytes) + _mPtr := add(_mPtr, size_pi_in_bytes) + + // commitments to l, r, o + let size_commitments_lro_in_bytes := 0xc0 + calldatacopy(_mPtr, aproof, size_commitments_lro_in_bytes) + _mPtr := add(_mPtr, size_commitments_lro_in_bytes) + + // total size is : + // sizegamma(=0x5) + 11*64(=0x2c0) + // + nb_public_inputs*0x20 + // + nb_custom gates*0x40 + let size := add(0x2c5, size_pi_in_bytes) + {{ if (gt (len .CommitmentConstraintIndexes) 0 )}} + size := add(size, mul(VK_NB_CUSTOM_GATES, 0x40)) + {{ end -}} + let l_success := staticcall(gas(), 0x2, add(mPtr, 0x1b), size, mPtr, 0x20) //0x1b -> 000.."gamma" + if iszero(l_success) { + error_verify() + } + gamma_not_reduced := mload(mPtr) + mstore(add(state, STATE_GAMMA), mod(gamma_not_reduced, R_MOD)) + } + + /// derive beta as Sha256 + /// @param gamma_not_reduced the previous challenge (gamma) not reduced + /// @return beta_not_reduced the next challenge, beta, not reduced + /// @notice the transcript consists of the previous challenge only. + /// The reduced version of beta is stored at add(state, state_beta) + function derive_beta(gamma_not_reduced)->beta_not_reduced{ + + let state := mload(0x40) + let mPtr := add(mload(0x40), STATE_LAST_MEM) + + // beta + mstore(mPtr, 0x62657461) // "beta" + mstore(add(mPtr, 0x20), gamma_not_reduced) + let l_success := staticcall(gas(), 0x2, add(mPtr, 0x1c), 0x24, mPtr, 0x20) //0x1b -> 000.."gamma" + if iszero(l_success) { + error_verify() + } + beta_not_reduced := mload(mPtr) + mstore(add(state, STATE_BETA), mod(beta_not_reduced, R_MOD)) + } + + /// derive alpha as sha256 + /// @param aproof pointer to the proof object + /// @param beta_not_reduced the previous challenge (beta) not reduced + /// @return alpha_not_reduced the next challenge, alpha, not reduced + /// @notice the transcript consists of the previous challenge (beta) + /// not reduced, the commitments to the wires associated to the QCP_i, + /// and the commitment to the grand product polynomial + function derive_alpha(aproof, beta_not_reduced)->alpha_not_reduced { + + let state := mload(0x40) + let mPtr := add(mload(0x40), STATE_LAST_MEM) + let full_size := 0x65 // size("alpha") + 0x20 (previous challenge) + + // alpha + mstore(mPtr, 0x616C706861) // "alpha" + let _mPtr := add(mPtr, 0x20) + mstore(_mPtr, beta_not_reduced) + _mPtr := add(_mPtr, 0x20) + {{ if (gt (len .CommitmentConstraintIndexes) 0 )}} + // Bsb22Commitments + let proof_bsb_commitments := add(aproof, PROOF_COMMITMENTS_WIRES_CUSTOM_GATES) + let size_bsb_commitments := mul(0x40, VK_NB_CUSTOM_GATES) + calldatacopy(_mPtr, proof_bsb_commitments, size_bsb_commitments) + _mPtr := add(_mPtr, size_bsb_commitments) + full_size := add(full_size, size_bsb_commitments) + {{ end }} + // [Z], the commitment to the grand product polynomial + calldatacopy(_mPtr, add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_X), 0x40) + let l_success := staticcall(gas(), 0x2, add(mPtr, 0x1b), full_size, mPtr, 0x20) + if iszero(l_success) { + error_verify() + } + + alpha_not_reduced := mload(mPtr) + mstore(add(state, STATE_ALPHA), mod(alpha_not_reduced, R_MOD)) + } + + /// derive zeta as sha256 + /// @param aproof pointer to the proof object + /// @param alpha_not_reduced the previous challenge (alpha) not reduced + /// The transcript consists of the previous challenge and the commitment to + /// the quotient polynomial h. + function derive_zeta(aproof, alpha_not_reduced) { + + let state := mload(0x40) + let mPtr := add(mload(0x40), STATE_LAST_MEM) + + // zeta + mstore(mPtr, 0x7a657461) // "zeta" + mstore(add(mPtr, 0x20), alpha_not_reduced) + calldatacopy(add(mPtr, 0x40), add(aproof, PROOF_H_0_X), 0xc0) + let l_success := staticcall(gas(), 0x2, add(mPtr, 0x1c), 0xe4, mPtr, 0x20) + if iszero(l_success) { + error_verify() + } + let zeta_not_reduced := mload(mPtr) + mstore(add(state, STATE_ZETA), mod(zeta_not_reduced, R_MOD)) + } + // END challenges ------------------------------------------------- + + // BEGINNING compute_pi ------------------------------------------------- + + /// sum_pi_wo_api_commit computes the public inputs contributions, + /// except for the public inputs coming from the custom gate + /// @param ins pointer to the public inputs + /// @param n number of public inputs + /// @param mPtr free memory + /// @return pi_wo_commit public inputs contribution (except the public inputs coming from the custom gate) + function sum_pi_wo_api_commit(ins, n, mPtr)->pi_wo_commit { + + let state := mload(0x40) + let z := mload(add(state, STATE_ZETA)) + let zpnmo := mload(add(state, STATE_ZETA_POWER_N_MINUS_ONE)) + + let li := mPtr + batch_compute_lagranges_at_z(z, zpnmo, n, li) + + let tmp := 0 + for {let i:=0} lt(i,n) {i:=add(i,1)} + { + tmp := mulmod(mload(li), calldataload(ins), R_MOD) + pi_wo_commit := addmod(pi_wo_commit, tmp, R_MOD) + li := add(li, 0x20) + ins := add(ins, 0x20) + } + + } + + /// batch_compute_lagranges_at_z computes [L_0(z), .., L_{n-1}(z)] + /// @param z point at which the Lagranges are evaluated + /// @param zpnmo ζⁿ-1 + /// @param n number of public inputs (number of Lagranges to compute) + /// @param mPtr pointer to which the results are stored + function batch_compute_lagranges_at_z(z, zpnmo, n, mPtr) { + + let zn := mulmod(zpnmo, VK_INV_DOMAIN_SIZE, R_MOD) // 1/n * (ζⁿ - 1) + + let _w := 1 + let _mPtr := mPtr + for {let i:=0} lt(i,n) {i:=add(i,1)} + { + mstore(_mPtr, addmod(z,sub(R_MOD, _w), R_MOD)) + _w := mulmod(_w, VK_OMEGA, R_MOD) + _mPtr := add(_mPtr, 0x20) + } + batch_invert(mPtr, n, _mPtr) + _mPtr := mPtr + _w := 1 + for {let i:=0} lt(i,n) {i:=add(i,1)} + { + mstore(_mPtr, mulmod(mulmod(mload(_mPtr), zn , R_MOD), _w, R_MOD)) + _mPtr := add(_mPtr, 0x20) + _w := mulmod(_w, VK_OMEGA, R_MOD) + } + } + + /// @notice Montgomery trick for batch inversion mod R_MOD + /// @param ins pointer to the data to batch invert + /// @param number of elements to batch invert + /// @param mPtr free memory + function batch_invert(ins, nb_ins, mPtr) { + mstore(mPtr, 1) + let offset := 0 + for {let i:=0} lt(i, nb_ins) {i:=add(i,1)} + { + let prev := mload(add(mPtr, offset)) + let cur := mload(add(ins, offset)) + cur := mulmod(prev, cur, R_MOD) + offset := add(offset, 0x20) + mstore(add(mPtr, offset), cur) + } + ins := add(ins, sub(offset, 0x20)) + mPtr := add(mPtr, offset) + let inv := pow(mload(mPtr), sub(R_MOD,2), add(mPtr, 0x20)) + for {let i:=0} lt(i, nb_ins) {i:=add(i,1)} + { + mPtr := sub(mPtr, 0x20) + let tmp := mload(ins) + let cur := mulmod(inv, mload(mPtr), R_MOD) + mstore(ins, cur) + inv := mulmod(inv, tmp, R_MOD) + ins := sub(ins, 0x20) + } + } + + {{ if (gt (len .CommitmentConstraintIndexes) 0 )}} + /// Public inputs (the ones coming from the custom gate) contribution + /// @param aproof pointer to the proof + /// @param nb_public_inputs number of public inputs + /// @param mPtr pointer to free memory + /// @return pi_commit custom gate public inputs contribution + function sum_pi_commit(aproof, nb_public_inputs, mPtr)->pi_commit { + + let state := mload(0x40) + let z := mload(add(state, STATE_ZETA)) + let zpnmo := mload(add(state, STATE_ZETA_POWER_N_MINUS_ONE)) + + let p := add(aproof, PROOF_COMMITMENTS_WIRES_CUSTOM_GATES) + + let h_fr, ith_lagrange + + {{ range $index, $element := .CommitmentConstraintIndexes}} + h_fr := hash_fr(calldataload(p), calldataload(add(p, 0x20)), mPtr) + ith_lagrange := compute_ith_lagrange_at_z(z, zpnmo, add(nb_public_inputs, VK_INDEX_COMMIT_API{{ $index }}), mPtr) + pi_commit := addmod(pi_commit, mulmod(h_fr, ith_lagrange, R_MOD), R_MOD) + p := add(p, 0x40) + {{ end }} + + } + + /// Computes L_i(zeta) = ωⁱ/n * (ζⁿ-1)/(ζ-ωⁱ) where: + /// @param z zeta + /// @param zpmno ζⁿ-1 + /// @param i i-th lagrange + /// @param mPtr free memory + /// @return res = ωⁱ/n * (ζⁿ-1)/(ζ-ωⁱ) + function compute_ith_lagrange_at_z(z, zpnmo, i, mPtr)->res { + + let w := pow(VK_OMEGA, i, mPtr) // w**i + i := addmod(z, sub(R_MOD, w), R_MOD) // z-w**i + w := mulmod(w, VK_INV_DOMAIN_SIZE, R_MOD) // w**i/n + i := pow(i, sub(R_MOD,2), mPtr) // (z-w**i)**-1 + w := mulmod(w, i, R_MOD) // w**i/n*(z-w)**-1 + res := mulmod(w, zpnmo, R_MOD) + + } + + /// @dev https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-06#section-5.2 + /// @param x x coordinate of a point on Bn254(𝔽_p) + /// @param y y coordinate of a point on Bn254(𝔽_p) + /// @param mPtr free memory + /// @return res an element mod R_MOD + function hash_fr(x, y, mPtr)->res { + + // [0x00, .. , 0x00 || x, y, || 0, 48, 0, dst, HASH_FR_SIZE_DOMAIN] + // <- 64 bytes -> <-64b -> <- 1 bytes each -> + + // [0x00, .., 0x00] 64 bytes of zero + mstore(mPtr, HASH_FR_ZERO_UINT256) + mstore(add(mPtr, 0x20), HASH_FR_ZERO_UINT256) + + // msg = x || y , both on 32 bytes + mstore(add(mPtr, 0x40), x) + mstore(add(mPtr, 0x60), y) + + // 0 || 48 || 0 all on 1 byte + mstore8(add(mPtr, 0x80), 0) + mstore8(add(mPtr, 0x81), HASH_FR_LEN_IN_BYTES) + mstore8(add(mPtr, 0x82), 0) + + // "BSB22-Plonk" = [42, 53, 42, 32, 32, 2d, 50, 6c, 6f, 6e, 6b,] + mstore8(add(mPtr, 0x83), 0x42) + mstore8(add(mPtr, 0x84), 0x53) + mstore8(add(mPtr, 0x85), 0x42) + mstore8(add(mPtr, 0x86), 0x32) + mstore8(add(mPtr, 0x87), 0x32) + mstore8(add(mPtr, 0x88), 0x2d) + mstore8(add(mPtr, 0x89), 0x50) + mstore8(add(mPtr, 0x8a), 0x6c) + mstore8(add(mPtr, 0x8b), 0x6f) + mstore8(add(mPtr, 0x8c), 0x6e) + mstore8(add(mPtr, 0x8d), 0x6b) + + // size domain + mstore8(add(mPtr, 0x8e), HASH_FR_SIZE_DOMAIN) + + let l_success := staticcall(gas(), 0x2, mPtr, 0x8f, mPtr, 0x20) + if iszero(l_success) { + error_verify() + } + + let b0 := mload(mPtr) + + // [b0 || one || dst || HASH_FR_SIZE_DOMAIN] + // <-64bytes -> <- 1 byte each -> + mstore8(add(mPtr, 0x20), HASH_FR_ONE) // 1 + + mstore8(add(mPtr, 0x21), 0x42) // dst + mstore8(add(mPtr, 0x22), 0x53) + mstore8(add(mPtr, 0x23), 0x42) + mstore8(add(mPtr, 0x24), 0x32) + mstore8(add(mPtr, 0x25), 0x32) + mstore8(add(mPtr, 0x26), 0x2d) + mstore8(add(mPtr, 0x27), 0x50) + mstore8(add(mPtr, 0x28), 0x6c) + mstore8(add(mPtr, 0x29), 0x6f) + mstore8(add(mPtr, 0x2a), 0x6e) + mstore8(add(mPtr, 0x2b), 0x6b) + + mstore8(add(mPtr, 0x2c), HASH_FR_SIZE_DOMAIN) // size domain + l_success := staticcall(gas(), 0x2, mPtr, 0x2d, mPtr, 0x20) + if iszero(l_success) { + error_verify() + } + + // b1 is located at mPtr. We store b2 at add(mPtr, 0x20) + + // [b0^b1 || two || dst || HASH_FR_SIZE_DOMAIN] + // <-64bytes -> <- 1 byte each -> + mstore(add(mPtr, 0x20), xor(mload(mPtr), b0)) + mstore8(add(mPtr, 0x40), HASH_FR_TWO) + + mstore8(add(mPtr, 0x41), 0x42) // dst + mstore8(add(mPtr, 0x42), 0x53) + mstore8(add(mPtr, 0x43), 0x42) + mstore8(add(mPtr, 0x44), 0x32) + mstore8(add(mPtr, 0x45), 0x32) + mstore8(add(mPtr, 0x46), 0x2d) + mstore8(add(mPtr, 0x47), 0x50) + mstore8(add(mPtr, 0x48), 0x6c) + mstore8(add(mPtr, 0x49), 0x6f) + mstore8(add(mPtr, 0x4a), 0x6e) + mstore8(add(mPtr, 0x4b), 0x6b) + + mstore8(add(mPtr, 0x4c), HASH_FR_SIZE_DOMAIN) // size domain + + let offset := add(mPtr, 0x20) + l_success := staticcall(gas(), 0x2, offset, 0x2d, offset, 0x20) + if iszero(l_success) { + error_verify() + } + + // at this point we have mPtr = [ b1 || b2] where b1 is on 32byes and b2 in 16bytes. + // we interpret it as a big integer mod r in big endian (similar to regular decimal notation) + // the result is then 2**(8*16)*mPtr[32:] + mPtr[32:48] + res := mulmod(mload(mPtr), HASH_FR_BB, R_MOD) // <- res = 2**128 * mPtr[:32] + let b1 := shr(128, mload(add(mPtr, 0x20))) // b1 <- [0, 0, .., 0 || b2[:16] ] + res := addmod(res, b1, R_MOD) + + } + {{ end }} + // END compute_pi ------------------------------------------------- + + /// @notice compute α² * 1/n * (ζ{n}-1)/(ζ - 1) where + /// * α = challenge derived in derive_gamma_beta_alpha_zeta + /// * n = vk_domain_size + /// * ω = vk_omega (generator of the multiplicative cyclic group of order n in (ℤ/rℤ)*) + /// * ζ = zeta (challenge derived with Fiat Shamir) + function compute_alpha_square_lagrange_0() { + let state := mload(0x40) + let mPtr := add(mload(0x40), STATE_LAST_MEM) + + let res := mload(add(state, STATE_ZETA_POWER_N_MINUS_ONE)) + let den := addmod(mload(add(state, STATE_ZETA)), sub(R_MOD, 1), R_MOD) + den := pow(den, sub(R_MOD, 2), mPtr) + den := mulmod(den, VK_INV_DOMAIN_SIZE, R_MOD) + res := mulmod(den, res, R_MOD) + + let l_alpha := mload(add(state, STATE_ALPHA)) + res := mulmod(res, l_alpha, R_MOD) + res := mulmod(res, l_alpha, R_MOD) + mstore(add(state, STATE_ALPHA_SQUARE_LAGRANGE_0), res) + } + + /// @notice follows alg. p.13 of https://eprint.iacr.org/2019/953.pdf + /// with t₁ = t₂ = 1, and the proofs are ([digest] + [quotient] +purported evaluation): + /// * [state_folded_state_digests], [proof_batch_opening_at_zeta_x], state_folded_evals + /// * [proof_grand_product_commitment], [proof_opening_at_zeta_omega_x], [proof_grand_product_at_zeta_omega] + /// @param aproof pointer to the proof + function batch_verify_multi_points(aproof) { + let state := mload(0x40) + let mPtr := add(state, STATE_LAST_MEM) + + // derive a random number. As there is no random generator, we + // do an FS like challenge derivation, depending on both digests and + // ζ to ensure that the prover cannot control the random numger. + // Note: adding the other point ζω is not needed, as ω is known beforehand. + mstore(mPtr, mload(add(state, STATE_FOLDED_DIGESTS_X))) + mstore(add(mPtr, 0x20), mload(add(state, STATE_FOLDED_DIGESTS_Y))) + mstore(add(mPtr, 0x40), calldataload(add(aproof, PROOF_BATCH_OPENING_AT_ZETA_X))) + mstore(add(mPtr, 0x60), calldataload(add(aproof, PROOF_BATCH_OPENING_AT_ZETA_Y))) + mstore(add(mPtr, 0x80), calldataload(add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_X))) + mstore(add(mPtr, 0xa0), calldataload(add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_Y))) + mstore(add(mPtr, 0xc0), calldataload(add(aproof, PROOF_OPENING_AT_ZETA_OMEGA_X))) + mstore(add(mPtr, 0xe0), calldataload(add(aproof, PROOF_OPENING_AT_ZETA_OMEGA_Y))) + mstore(add(mPtr, 0x100), mload(add(state, STATE_ZETA))) + mstore(add(mPtr, 0x120), mload(add(state, STATE_GAMMA_KZG))) + let random := staticcall(gas(), 0x2, mPtr, 0x140, mPtr, 0x20) + if iszero(random){ + error_random_generation() + } + random := mod(mload(mPtr), R_MOD) // use the same variable as we are one variable away from getting stack-too-deep error... + + let folded_quotients := mPtr + mPtr := add(folded_quotients, 0x40) + mstore(folded_quotients, calldataload(add(aproof, PROOF_BATCH_OPENING_AT_ZETA_X))) + mstore(add(folded_quotients, 0x20), calldataload(add(aproof, PROOF_BATCH_OPENING_AT_ZETA_Y))) + point_acc_mul_calldata(folded_quotients, add(aproof, PROOF_OPENING_AT_ZETA_OMEGA_X), random, mPtr) + + let folded_digests := add(state, STATE_FOLDED_DIGESTS_X) + point_acc_mul_calldata(folded_digests, add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_X), random, mPtr) + + let folded_evals := add(state, STATE_FOLDED_CLAIMED_VALUES) + fr_acc_mul_calldata(folded_evals, add(aproof, PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA), random) + + let folded_evals_commit := mPtr + mPtr := add(folded_evals_commit, 0x40) + mstore(folded_evals_commit, G1_SRS_X) + mstore(add(folded_evals_commit, 0x20), G1_SRS_Y) + mstore(add(folded_evals_commit, 0x40), mload(folded_evals)) + let check_staticcall := staticcall(gas(), 7, folded_evals_commit, 0x60, folded_evals_commit, 0x40) + if iszero(check_staticcall) { + error_verify() + } + + let folded_evals_commit_y := add(folded_evals_commit, 0x20) + mstore(folded_evals_commit_y, sub(P_MOD, mload(folded_evals_commit_y))) + point_add(folded_digests, folded_digests, folded_evals_commit, mPtr) + + let folded_points_quotients := mPtr + mPtr := add(mPtr, 0x40) + point_mul_calldata( + folded_points_quotients, + add(aproof, PROOF_BATCH_OPENING_AT_ZETA_X), + mload(add(state, STATE_ZETA)), + mPtr + ) + let zeta_omega := mulmod(mload(add(state, STATE_ZETA)), VK_OMEGA, R_MOD) + random := mulmod(random, zeta_omega, R_MOD) + point_acc_mul_calldata(folded_points_quotients, add(aproof, PROOF_OPENING_AT_ZETA_OMEGA_X), random, mPtr) + + point_add(folded_digests, folded_digests, folded_points_quotients, mPtr) + + let folded_quotients_y := add(folded_quotients, 0x20) + mstore(folded_quotients_y, sub(P_MOD, mload(folded_quotients_y))) + + mstore(mPtr, mload(folded_digests)) + mstore(add(mPtr, 0x20), mload(add(folded_digests, 0x20))) + mstore(add(mPtr, 0x40), G2_SRS_0_X_0) // the 4 lines are the canonical G2 point on BN254 + mstore(add(mPtr, 0x60), G2_SRS_0_X_1) + mstore(add(mPtr, 0x80), G2_SRS_0_Y_0) + mstore(add(mPtr, 0xa0), G2_SRS_0_Y_1) + mstore(add(mPtr, 0xc0), mload(folded_quotients)) + mstore(add(mPtr, 0xe0), mload(add(folded_quotients, 0x20))) + mstore(add(mPtr, 0x100), G2_SRS_1_X_0) + mstore(add(mPtr, 0x120), G2_SRS_1_X_1) + mstore(add(mPtr, 0x140), G2_SRS_1_Y_0) + mstore(add(mPtr, 0x160), G2_SRS_1_Y_1) + check_pairing_kzg(mPtr) + } + + /// @notice check_pairing_kzg checks the result of the final pairing product of the batched + /// kzg verification. The purpose of this function is to avoid exhausting the stack + /// in the function batch_verify_multi_points. + /// @param mPtr pointer storing the tuple of pairs + function check_pairing_kzg(mPtr) { + let state := mload(0x40) + + // TODO test the staticcall using the method from audit_4-5 + let l_success := staticcall(gas(), 8, mPtr, 0x180, 0x00, 0x20) + let res_pairing := mload(0x00) + let s_success := mload(add(state, STATE_SUCCESS)) + res_pairing := and(and(res_pairing, l_success), s_success) + mstore(add(state, STATE_SUCCESS), res_pairing) + } + + /// @notice Fold the opening proofs at ζ: + /// * at state+state_folded_digest we store: [H] + γ[Linearised_polynomial]+γ²[L] + γ³[R] + γ⁴[O] + γ⁵[S₁] +γ⁶[S₂] + ∑ᵢγ⁶⁺ⁱ[Pi_{i}] + /// * at state+state_folded_claimed_values we store: H(ζ) + γLinearised_polynomial(ζ)+γ²L(ζ) + γ³R(ζ)+ γ⁴O(ζ) + γ⁵S₁(ζ) +γ⁶S₂(ζ) + ∑ᵢγ⁶⁺ⁱPi_{i}(ζ) + /// @param aproof pointer to the proof + /// acc_gamma stores the γⁱ + function fold_state(aproof) { + + let state := mload(0x40) + let mPtr := add(mload(0x40), STATE_LAST_MEM) + let mPtr20 := add(mPtr, 0x20) + let mPtr40 := add(mPtr, 0x40) + + let l_gamma_kzg := mload(add(state, STATE_GAMMA_KZG)) + let acc_gamma := l_gamma_kzg + let state_folded_digests := add(state, STATE_FOLDED_DIGESTS_X) + + mstore(add(state, STATE_FOLDED_DIGESTS_X), mload(add(state, STATE_FOLDED_H_X))) + mstore(add(state, STATE_FOLDED_DIGESTS_Y), mload(add(state, STATE_FOLDED_H_Y))) + mstore(add(state, STATE_FOLDED_CLAIMED_VALUES), calldataload(add(aproof, PROOF_QUOTIENT_POLYNOMIAL_AT_ZETA))) + + point_acc_mul(state_folded_digests, add(state, STATE_LINEARISED_POLYNOMIAL_X), acc_gamma, mPtr) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_LINEARISED_POLYNOMIAL_AT_ZETA), acc_gamma) + + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + point_acc_mul_calldata(add(state, STATE_FOLDED_DIGESTS_X), add(aproof, PROOF_L_COM_X), acc_gamma, mPtr) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_L_AT_ZETA), acc_gamma) + + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + point_acc_mul_calldata(state_folded_digests, add(aproof, PROOF_R_COM_X), acc_gamma, mPtr) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_R_AT_ZETA), acc_gamma) + + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + point_acc_mul_calldata(state_folded_digests, add(aproof, PROOF_O_COM_X), acc_gamma, mPtr) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_O_AT_ZETA), acc_gamma) + + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + mstore(mPtr, VK_S1_COM_X) + mstore(mPtr20, VK_S1_COM_Y) + point_acc_mul(state_folded_digests, mPtr, acc_gamma, mPtr40) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_S1_AT_ZETA), acc_gamma) + + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + mstore(mPtr, VK_S2_COM_X) + mstore(mPtr20, VK_S2_COM_Y) + point_acc_mul(state_folded_digests, mPtr, acc_gamma, mPtr40) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), add(aproof, PROOF_S2_AT_ZETA), acc_gamma) + + {{- if (gt (len .CommitmentConstraintIndexes) 0 ) }} + let poscaz := add(aproof, PROOF_OPENING_QCP_AT_ZETA) + {{ end -}} + + {{ range $index, $element := .CommitmentConstraintIndexes }} + acc_gamma := mulmod(acc_gamma, l_gamma_kzg, R_MOD) + mstore(mPtr, VK_QCP_{{ $index }}_X) + mstore(mPtr20, VK_QCP_{{ $index }}_Y) + point_acc_mul(state_folded_digests, mPtr, acc_gamma, mPtr40) + fr_acc_mul_calldata(add(state, STATE_FOLDED_CLAIMED_VALUES), poscaz, acc_gamma) + poscaz := add(poscaz, 0x20) + {{ end }} + + } + + /// @notice generate the challenge (using Fiat Shamir) to fold the opening proofs + /// at ζ. + /// The process for deriving γ is the same as in derive_gamma but this time the inputs are + /// in this order (the [] means it's a commitment): + /// * ζ + /// * [H] ( = H₁ + ζᵐ⁺²*H₂ + ζ²⁽ᵐ⁺²⁾*H₃ ) + /// * [Linearised polynomial] + /// * [L], [R], [O] + /// * [S₁] [S₂] + /// * [Pi_{i}] (wires associated to custom gates) + /// Then there are the purported evaluations of the previous committed polynomials: + /// * H(ζ) + /// * Linearised_polynomial(ζ) + /// * L(ζ), R(ζ), O(ζ), S₁(ζ), S₂(ζ) + /// * Pi_{i}(ζ) + /// * Z(ζω) + /// @param aproof pointer to the proof + function compute_gamma_kzg(aproof) { + + let state := mload(0x40) + let mPtr := add(mload(0x40), STATE_LAST_MEM) + mstore(mPtr, 0x67616d6d61) // "gamma" + mstore(add(mPtr, 0x20), mload(add(state, STATE_ZETA))) + mstore(add(mPtr,0x40), mload(add(state, STATE_FOLDED_H_X))) + mstore(add(mPtr,0x60), mload(add(state, STATE_FOLDED_H_Y))) + mstore(add(mPtr,0x80), mload(add(state, STATE_LINEARISED_POLYNOMIAL_X))) + mstore(add(mPtr,0xa0), mload(add(state, STATE_LINEARISED_POLYNOMIAL_Y))) + calldatacopy(add(mPtr, 0xc0), add(aproof, PROOF_L_COM_X), 0xc0) + mstore(add(mPtr,0x180), VK_S1_COM_X) + mstore(add(mPtr,0x1a0), VK_S1_COM_Y) + mstore(add(mPtr,0x1c0), VK_S2_COM_X) + mstore(add(mPtr,0x1e0), VK_S2_COM_Y) + + let offset := 0x200 + {{ range $index, $element := .CommitmentConstraintIndexes }} + mstore(add(mPtr,offset), VK_QCP_{{ $index }}_X) + mstore(add(mPtr,add(offset, 0x20)), VK_QCP_{{ $index }}_Y) + offset := add(offset, 0x40) + {{ end }} + + mstore(add(mPtr, offset), calldataload(add(aproof, PROOF_QUOTIENT_POLYNOMIAL_AT_ZETA))) + mstore(add(mPtr, add(offset, 0x20)), calldataload(add(aproof, PROOF_LINEARISED_POLYNOMIAL_AT_ZETA))) + mstore(add(mPtr, add(offset, 0x40)), calldataload(add(aproof, PROOF_L_AT_ZETA))) + mstore(add(mPtr, add(offset, 0x60)), calldataload(add(aproof, PROOF_R_AT_ZETA))) + mstore(add(mPtr, add(offset, 0x80)), calldataload(add(aproof, PROOF_O_AT_ZETA))) + mstore(add(mPtr, add(offset, 0xa0)), calldataload(add(aproof, PROOF_S1_AT_ZETA))) + mstore(add(mPtr, add(offset, 0xc0)), calldataload(add(aproof, PROOF_S2_AT_ZETA))) + + let _mPtr := add(mPtr, add(offset, 0xe0)) + {{ if (gt (len .CommitmentConstraintIndexes) 0 )}} + let _poscaz := add(aproof, PROOF_OPENING_QCP_AT_ZETA) + for {let i:=0} lt(i, VK_NB_CUSTOM_GATES) {i:=add(i,1)} + { + mstore(_mPtr, calldataload(_poscaz)) + _poscaz := add(_poscaz, 0x20) + _mPtr := add(_mPtr, 0x20) + } + {{ end }} + + mstore(_mPtr, calldataload(add(aproof, PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA))) + + let start_input := 0x1b // 00.."gamma" + let size_input := add(0x17, mul(VK_NB_CUSTOM_GATES,3)) // number of 32bytes elmts = 0x17 (zeta+2*7+7 for the digests+openings) + 2*VK_NB_CUSTOM_GATES (for the commitments of the selectors) + VK_NB_CUSTOM_GATES (for the openings of the selectors) + size_input := add(0x5, mul(size_input, 0x20)) // size in bytes: 15*32 bytes + 5 bytes for gamma + let check_staticcall := staticcall(gas(), 0x2, add(mPtr,start_input), size_input, add(state, STATE_GAMMA_KZG), 0x20) + if iszero(check_staticcall) { + error_verify() + } + mstore(add(state, STATE_GAMMA_KZG), mod(mload(add(state, STATE_GAMMA_KZG)), R_MOD)) + } + + function compute_commitment_linearised_polynomial_ec(aproof, s1, s2) { + let state := mload(0x40) + let mPtr := add(mload(0x40), STATE_LAST_MEM) + + mstore(mPtr, VK_QL_COM_X) + mstore(add(mPtr, 0x20), VK_QL_COM_Y) + point_mul( + add(state, STATE_LINEARISED_POLYNOMIAL_X), + mPtr, + calldataload(add(aproof, PROOF_L_AT_ZETA)), + add(mPtr, 0x40) + ) + + mstore(mPtr, VK_QR_COM_X) + mstore(add(mPtr, 0x20), VK_QR_COM_Y) + point_acc_mul( + add(state, STATE_LINEARISED_POLYNOMIAL_X), + mPtr, + calldataload(add(aproof, PROOF_R_AT_ZETA)), + add(mPtr, 0x40) + ) + + let rl := mulmod(calldataload(add(aproof, PROOF_L_AT_ZETA)), calldataload(add(aproof, PROOF_R_AT_ZETA)), R_MOD) + mstore(mPtr, VK_QM_COM_X) + mstore(add(mPtr, 0x20), VK_QM_COM_Y) + point_acc_mul(add(state, STATE_LINEARISED_POLYNOMIAL_X), mPtr, rl, add(mPtr, 0x40)) + + mstore(mPtr, VK_QO_COM_X) + mstore(add(mPtr, 0x20), VK_QO_COM_Y) + point_acc_mul( + add(state, STATE_LINEARISED_POLYNOMIAL_X), + mPtr, + calldataload(add(aproof, PROOF_O_AT_ZETA)), + add(mPtr, 0x40) + ) + + mstore(mPtr, VK_QK_COM_X) + mstore(add(mPtr, 0x20), VK_QK_COM_Y) + point_add( + add(state, STATE_LINEARISED_POLYNOMIAL_X), + add(state, STATE_LINEARISED_POLYNOMIAL_X), + mPtr, + add(mPtr, 0x40) + ) + + let commits_api_at_zeta := add(aproof, PROOF_OPENING_QCP_AT_ZETA) + let commits_api := add(aproof, PROOF_COMMITMENTS_WIRES_CUSTOM_GATES) + for { + let i := 0 + } lt(i, VK_NB_CUSTOM_GATES) { + i := add(i, 1) + } { + mstore(mPtr, calldataload(commits_api)) + mstore(add(mPtr, 0x20), calldataload(add(commits_api, 0x20))) + point_acc_mul( + add(state, STATE_LINEARISED_POLYNOMIAL_X), + mPtr, + calldataload(commits_api_at_zeta), + add(mPtr, 0x40) + ) + commits_api_at_zeta := add(commits_api_at_zeta, 0x20) + commits_api := add(commits_api, 0x40) + } + + mstore(mPtr, VK_S3_COM_X) + mstore(add(mPtr, 0x20), VK_S3_COM_Y) + point_acc_mul(add(state, STATE_LINEARISED_POLYNOMIAL_X), mPtr, s1, add(mPtr, 0x40)) + + mstore(mPtr, calldataload(add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_X))) + mstore(add(mPtr, 0x20), calldataload(add(aproof, PROOF_GRAND_PRODUCT_COMMITMENT_Y))) + point_acc_mul(add(state, STATE_LINEARISED_POLYNOMIAL_X), mPtr, s2, add(mPtr, 0x40)) + } + + /// @notice Compute the commitment to the linearized polynomial equal to + /// L(ζ)[Qₗ]+r(ζ)[Qᵣ]+R(ζ)L(ζ)[Qₘ]+O(ζ)[Qₒ]+[Qₖ]+Σᵢqc'ᵢ(ζ)[BsbCommitmentᵢ] + + /// α*( Z(μζ)(L(ζ)+β*S₁(ζ)+γ)*(R(ζ)+β*S₂(ζ)+γ)[S₃]-[Z](L(ζ)+β*id_{1}(ζ)+γ)*(R(ζ)+β*id_{2(ζ)+γ)*(O(ζ)+β*id_{3}(ζ)+γ) ) + + /// α²*L₁(ζ)[Z] + /// where + /// * id_1 = id, id_2 = vk_coset_shift*id, id_3 = vk_coset_shift^{2}*id + /// * the [] means that it's a commitment (i.e. a point on Bn254(F_p)) + /// @param aproof pointer to the proof + function compute_commitment_linearised_polynomial(aproof) { + let state := mload(0x40) + let l_beta := mload(add(state, STATE_BETA)) + let l_gamma := mload(add(state, STATE_GAMMA)) + let l_zeta := mload(add(state, STATE_ZETA)) + let l_alpha := mload(add(state, STATE_ALPHA)) + + let u := mulmod(calldataload(add(aproof, PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA)), l_beta, R_MOD) + let v := mulmod(l_beta, calldataload(add(aproof, PROOF_S1_AT_ZETA)), R_MOD) + v := addmod(v, calldataload(add(aproof, PROOF_L_AT_ZETA)), R_MOD) + v := addmod(v, l_gamma, R_MOD) + + let w := mulmod(l_beta, calldataload(add(aproof, PROOF_S2_AT_ZETA)), R_MOD) + w := addmod(w, calldataload(add(aproof, PROOF_R_AT_ZETA)), R_MOD) + w := addmod(w, l_gamma, R_MOD) + + let s1 := mulmod(u, v, R_MOD) + s1 := mulmod(s1, w, R_MOD) + s1 := mulmod(s1, l_alpha, R_MOD) + + let coset_square := mulmod(VK_COSET_SHIFT, VK_COSET_SHIFT, R_MOD) + let betazeta := mulmod(l_beta, l_zeta, R_MOD) + u := addmod(betazeta, calldataload(add(aproof, PROOF_L_AT_ZETA)), R_MOD) + u := addmod(u, l_gamma, R_MOD) + + v := mulmod(betazeta, VK_COSET_SHIFT, R_MOD) + v := addmod(v, calldataload(add(aproof, PROOF_R_AT_ZETA)), R_MOD) + v := addmod(v, l_gamma, R_MOD) + + w := mulmod(betazeta, coset_square, R_MOD) + w := addmod(w, calldataload(add(aproof, PROOF_O_AT_ZETA)), R_MOD) + w := addmod(w, l_gamma, R_MOD) + + let s2 := mulmod(u, v, R_MOD) + s2 := mulmod(s2, w, R_MOD) + s2 := sub(R_MOD, s2) + s2 := mulmod(s2, l_alpha, R_MOD) + s2 := addmod(s2, mload(add(state, STATE_ALPHA_SQUARE_LAGRANGE_0)), R_MOD) + + // at this stage: + // * s₁ = α*Z(μζ)(l(ζ)+β*s₁(ζ)+γ)*(r(ζ)+β*s₂(ζ)+γ)*β + // * s₂ = -α*(l(ζ)+β*ζ+γ)*(r(ζ)+β*u*ζ+γ)*(o(ζ)+β*u²*ζ+γ) + α²*L₁(ζ) + + compute_commitment_linearised_polynomial_ec(aproof, s1, s2) + } + + /// @notice compute H₁ + ζᵐ⁺²*H₂ + ζ²⁽ᵐ⁺²⁾*H₃ and store the result at + /// state + state_folded_h + /// @param aproof pointer to the proof + function fold_h(aproof) { + let state := mload(0x40) + let n_plus_two := add(VK_DOMAIN_SIZE, 2) + let mPtr := add(mload(0x40), STATE_LAST_MEM) + let zeta_power_n_plus_two := pow(mload(add(state, STATE_ZETA)), n_plus_two, mPtr) + point_mul_calldata(add(state, STATE_FOLDED_H_X), add(aproof, PROOF_H_2_X), zeta_power_n_plus_two, mPtr) + point_add_calldata(add(state, STATE_FOLDED_H_X), add(state, STATE_FOLDED_H_X), add(aproof, PROOF_H_1_X), mPtr) + point_mul(add(state, STATE_FOLDED_H_X), add(state, STATE_FOLDED_H_X), zeta_power_n_plus_two, mPtr) + point_add_calldata(add(state, STATE_FOLDED_H_X), add(state, STATE_FOLDED_H_X), add(aproof, PROOF_H_0_X), mPtr) + } + + /// @notice check that + /// L(ζ)Qₗ(ζ)+r(ζ)Qᵣ(ζ)+R(ζ)L(ζ)Qₘ(ζ)+O(ζ)Qₒ(ζ)+Qₖ(ζ)+Σᵢqc'ᵢ(ζ)BsbCommitmentᵢ(ζ) + + /// α*( Z(μζ)(l(ζ)+β*s₁(ζ)+γ)*(r(ζ)+β*s₂(ζ)+γ)*β*s₃(X)-Z(X)(l(ζ)+β*id_1(ζ)+γ)*(r(ζ)+β*id_2(ζ)+γ)*(o(ζ)+β*id_3(ζ)+γ) ) ) + /// + α²*L₁(ζ) = + /// (ζⁿ-1)H(ζ) + /// @param aproof pointer to the proof + function verify_quotient_poly_eval_at_zeta(aproof) { + let state := mload(0x40) + + // (l(ζ)+β*s1(ζ)+γ) + let s1 := add(mload(0x40), STATE_LAST_MEM) + mstore(s1, mulmod(calldataload(add(aproof, PROOF_S1_AT_ZETA)), mload(add(state, STATE_BETA)), R_MOD)) + mstore(s1, addmod(mload(s1), mload(add(state, STATE_GAMMA)), R_MOD)) + mstore(s1, addmod(mload(s1), calldataload(add(aproof, PROOF_L_AT_ZETA)), R_MOD)) + + // (r(ζ)+β*s2(ζ)+γ) + let s2 := add(s1, 0x20) + mstore(s2, mulmod(calldataload(add(aproof, PROOF_S2_AT_ZETA)), mload(add(state, STATE_BETA)), R_MOD)) + mstore(s2, addmod(mload(s2), mload(add(state, STATE_GAMMA)), R_MOD)) + mstore(s2, addmod(mload(s2), calldataload(add(aproof, PROOF_R_AT_ZETA)), R_MOD)) + // _s2 := mload(s2) + + // (o(ζ)+γ) + let o := add(s1, 0x40) + mstore(o, addmod(calldataload(add(aproof, PROOF_O_AT_ZETA)), mload(add(state, STATE_GAMMA)), R_MOD)) + + // α*(Z(μζ))*(l(ζ)+β*s1(ζ)+γ)*(r(ζ)+β*s2(ζ)+γ)*(o(ζ)+γ) + mstore(s1, mulmod(mload(s1), mload(s2), R_MOD)) + mstore(s1, mulmod(mload(s1), mload(o), R_MOD)) + mstore(s1, mulmod(mload(s1), mload(add(state, STATE_ALPHA)), R_MOD)) + mstore(s1, mulmod(mload(s1), calldataload(add(aproof, PROOF_GRAND_PRODUCT_AT_ZETA_OMEGA)), R_MOD)) + + let computed_quotient := add(s1, 0x60) + + // linearizedpolynomial + pi(zeta) + mstore(computed_quotient,addmod(calldataload(add(aproof, PROOF_LINEARISED_POLYNOMIAL_AT_ZETA)), mload(add(state, STATE_PI)), R_MOD)) + mstore(computed_quotient, addmod(mload(computed_quotient), mload(s1), R_MOD)) + mstore(computed_quotient,addmod(mload(computed_quotient), sub(R_MOD, mload(add(state, STATE_ALPHA_SQUARE_LAGRANGE_0))), R_MOD)) + mstore(s2,mulmod(calldataload(add(aproof, PROOF_QUOTIENT_POLYNOMIAL_AT_ZETA)),mload(add(state, STATE_ZETA_POWER_N_MINUS_ONE)),R_MOD)) + + mstore(add(state, STATE_SUCCESS), eq(mload(computed_quotient), mload(s2))) + } + + // BEGINNING utils math functions ------------------------------------------------- + + /// @param dst pointer storing the result + /// @param p pointer to the first point + /// @param q pointer to the second point + /// @param mPtr pointer to free memory + function point_add(dst, p, q, mPtr) { + let state := mload(0x40) + mstore(mPtr, mload(p)) + mstore(add(mPtr, 0x20), mload(add(p, 0x20))) + mstore(add(mPtr, 0x40), mload(q)) + mstore(add(mPtr, 0x60), mload(add(q, 0x20))) + let l_success := staticcall(gas(),6,mPtr,0x80,dst,0x40) + if iszero(l_success) { + error_ec_op() + } + } + + /// @param dst pointer storing the result + /// @param p pointer to the first point (calldata) + /// @param q pointer to the second point (calladata) + /// @param mPtr pointer to free memory + function point_add_calldata(dst, p, q, mPtr) { + let state := mload(0x40) + mstore(mPtr, mload(p)) + mstore(add(mPtr, 0x20), mload(add(p, 0x20))) + mstore(add(mPtr, 0x40), calldataload(q)) + mstore(add(mPtr, 0x60), calldataload(add(q, 0x20))) + let l_success := staticcall(gas(), 6, mPtr, 0x80, dst, 0x40) + if iszero(l_success) { + error_ec_op() + } + } + + /// @parma dst pointer storing the result + /// @param src pointer to a point on Bn254(𝔽_p) + /// @param s scalar + /// @param mPtr free memory + function point_mul(dst,src,s, mPtr) { + let state := mload(0x40) + mstore(mPtr,mload(src)) + mstore(add(mPtr,0x20),mload(add(src,0x20))) + mstore(add(mPtr,0x40),s) + let l_success := staticcall(gas(),7,mPtr,0x60,dst,0x40) + if iszero(l_success) { + error_ec_op() + } + } + + /// @parma dst pointer storing the result + /// @param src pointer to a point on Bn254(𝔽_p) on calldata + /// @param s scalar + /// @param mPtr free memory + function point_mul_calldata(dst, src, s, mPtr) { + let state := mload(0x40) + mstore(mPtr, calldataload(src)) + mstore(add(mPtr, 0x20), calldataload(add(src, 0x20))) + mstore(add(mPtr, 0x40), s) + let l_success := staticcall(gas(), 7, mPtr, 0x60, dst, 0x40) + if iszero(l_success) { + error_ec_op() + } + } + + /// @notice dst <- dst + [s]src (Elliptic curve) + /// @param dst pointer accumulator point storing the result + /// @param src pointer to the point to multiply and add + /// @param s scalar + /// @param mPtr free memory + function point_acc_mul(dst,src,s, mPtr) { + let state := mload(0x40) + mstore(mPtr,mload(src)) + mstore(add(mPtr,0x20),mload(add(src,0x20))) + mstore(add(mPtr,0x40),s) + let l_success := staticcall(gas(),7,mPtr,0x60,mPtr,0x40) + mstore(add(mPtr,0x40),mload(dst)) + mstore(add(mPtr,0x60),mload(add(dst,0x20))) + l_success := and(l_success, staticcall(gas(),6,mPtr,0x80,dst, 0x40)) + if iszero(l_success) { + error_ec_op() + } + } + + /// @notice dst <- dst + [s]src (Elliptic curve) + /// @param dst pointer accumulator point storing the result + /// @param src pointer to the point to multiply and add (on calldata) + /// @param s scalar + /// @mPtr free memory + function point_acc_mul_calldata(dst, src, s, mPtr) { + let state := mload(0x40) + mstore(mPtr, calldataload(src)) + mstore(add(mPtr, 0x20), calldataload(add(src, 0x20))) + mstore(add(mPtr, 0x40), s) + let l_success := staticcall(gas(), 7, mPtr, 0x60, mPtr, 0x40) + mstore(add(mPtr, 0x40), mload(dst)) + mstore(add(mPtr, 0x60), mload(add(dst, 0x20))) + l_success := and(l_success, staticcall(gas(), 6, mPtr, 0x80, dst, 0x40)) + if iszero(l_success) { + error_ec_op() + } + } + + /// @notice dst <- dst + src*s (Fr) dst,src are addresses, s is a value + /// @param dst pointer storing the result + /// @param src pointer to the scalar to multiply and add (on calldata) + /// @param s scalar + function fr_acc_mul_calldata(dst, src, s) { + let tmp := mulmod(calldataload(src), s, R_MOD) + mstore(dst, addmod(mload(dst), tmp, R_MOD)) + } + + /// @param x element to exponentiate + /// @param e exponent + /// @param mPtr free memory + /// @return res x ** e mod r + function pow(x, e, mPtr)->res { + mstore(mPtr, 0x20) + mstore(add(mPtr, 0x20), 0x20) + mstore(add(mPtr, 0x40), 0x20) + mstore(add(mPtr, 0x60), x) + mstore(add(mPtr, 0x80), e) + mstore(add(mPtr, 0xa0), R_MOD) + let check_staticcall := staticcall(gas(),0x05,mPtr,0xc0,mPtr,0x20) + if eq(check_staticcall, 0) { + error_verify() + } + res := mload(mPtr) + } + } + } +} +` + +// MarshalSolidity converts a proof to a byte array that can be used in a +// Solidity contract. +func (proof *Proof) MarshalSolidity() []byte { + + res := make([]byte, 0, 1024) + + // uint256 l_com_x; + // uint256 l_com_y; + // uint256 r_com_x; + // uint256 r_com_y; + // uint256 o_com_x; + // uint256 o_com_y; + var tmp64 [64]byte + for i := 0; i < 3; i++ { + tmp64 = proof.LRO[i].RawBytes() + res = append(res, tmp64[:]...) + } + + // uint256 h_0_x; + // uint256 h_0_y; + // uint256 h_1_x; + // uint256 h_1_y; + // uint256 h_2_x; + // uint256 h_2_y; + for i := 0; i < 3; i++ { + tmp64 = proof.H[i].RawBytes() + res = append(res, tmp64[:]...) + } + var tmp32 [32]byte + + // uint256 l_at_zeta; + // uint256 r_at_zeta; + // uint256 o_at_zeta; + // uint256 s1_at_zeta; + // uint256 s2_at_zeta; + for i := 2; i < 7; i++ { + tmp32 = proof.BatchedProof.ClaimedValues[i].Bytes() + res = append(res, tmp32[:]...) + } + + // uint256 grand_product_commitment_x; + // uint256 grand_product_commitment_y; + tmp64 = proof.Z.RawBytes() + res = append(res, tmp64[:]...) + + // uint256 grand_product_at_zeta_omega; + tmp32 = proof.ZShiftedOpening.ClaimedValue.Bytes() + res = append(res, tmp32[:]...) + + // uint256 quotient_polynomial_at_zeta; + // uint256 linearization_polynomial_at_zeta; + tmp32 = proof.BatchedProof.ClaimedValues[0].Bytes() + res = append(res, tmp32[:]...) + tmp32 = proof.BatchedProof.ClaimedValues[1].Bytes() + res = append(res, tmp32[:]...) + + // uint256 opening_at_zeta_proof_x; + // uint256 opening_at_zeta_proof_y; + tmp64 = proof.BatchedProof.H.RawBytes() + res = append(res, tmp64[:]...) + + // uint256 opening_at_zeta_omega_proof_x; + // uint256 opening_at_zeta_omega_proof_y; + tmp64 = proof.ZShiftedOpening.H.RawBytes() + res = append(res, tmp64[:]...) + + // uint256[] selector_commit_api_at_zeta; + // uint256[] wire_committed_commitments; + if len(proof.Bsb22Commitments) > 0 { + for i := 0; i < len(proof.Bsb22Commitments); i++ { + tmp32 = proof.BatchedProof.ClaimedValues[7+i].Bytes() + res = append(res, tmp32[:]...) + } + + for _, bc := range proof.Bsb22Commitments { + tmp64 = bc.RawBytes() + res = append(res, tmp64[:]...) + } + } + + return res +} diff --git a/backend/plonk/bn254/icicle/unmarshal.go b/backend/plonk/bn254/icicle/unmarshal.go new file mode 100644 index 0000000000..9edd6a4730 --- /dev/null +++ b/backend/plonk/bn254/icicle/unmarshal.go @@ -0,0 +1,88 @@ +package icicle_bn254 + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" +) + +func UnmarshalSolidity(s []byte, nbCommits int) Proof { + + var proof Proof + offset := 0 + point_size := 64 + fr_size := 32 + proof.BatchedProof.ClaimedValues = make([]fr.Element, 7+nbCommits) + proof.Bsb22Commitments = make([]bn254.G1Affine, nbCommits) + + // uint256 l_com_x; + // uint256 l_com_y; + // uint256 r_com_x; + // uint256 r_com_y; + // uint256 o_com_x; + // uint256 o_com_y; + for i := 0; i < 3; i++ { + proof.LRO[i].Unmarshal(s[offset : offset+point_size]) + offset += point_size + } + + // uint256 h_0_x; + // uint256 h_0_y; + // uint256 h_1_x; + // uint256 h_1_y; + // uint256 h_2_x; + // uint256 h_2_y; + for i := 0; i < 3; i++ { + proof.H[i].Unmarshal(s[offset : offset+point_size]) + offset += point_size + } + + // uint256 l_at_zeta; + // uint256 r_at_zeta; + // uint256 o_at_zeta; + // uint256 s1_at_zeta; + // uint256 s2_at_zeta; + for i := 2; i < 7; i++ { + proof.BatchedProof.ClaimedValues[i].SetBytes(s[offset : offset+fr_size]) + offset += fr_size + } + + // uint256 grand_product_commitment_x; + // uint256 grand_product_commitment_y; + proof.Z.Unmarshal(s[offset : offset+point_size]) + offset += point_size + + // uint256 grand_product_at_zeta_omega; + proof.ZShiftedOpening.ClaimedValue.SetBytes(s[offset : offset+fr_size]) + offset += fr_size + + // uint256 quotient_polynomial_at_zeta; + // uint256 linearization_polynomial_at_zeta; + proof.BatchedProof.ClaimedValues[0].SetBytes(s[offset : offset+fr_size]) + offset += fr_size + proof.BatchedProof.ClaimedValues[1].SetBytes(s[offset : offset+fr_size]) + offset += fr_size + + // uint256 opening_at_zeta_proof_x; + // uint256 opening_at_zeta_proof_y; + proof.BatchedProof.H.Unmarshal(s[offset : offset+point_size]) + offset += point_size + + // uint256 opening_at_zeta_omega_proof_x; + // uint256 opening_at_zeta_omega_proof_y; + proof.ZShiftedOpening.H.Unmarshal(s[offset : offset+point_size]) + offset += point_size + + // uint256[] selector_commit_api_at_zeta; + // uint256[] wire_committed_commitments; + for i := 0; i < nbCommits; i++ { + proof.BatchedProof.ClaimedValues[7+i].SetBytes(s[offset : offset+fr_size]) + offset += fr_size + } + + for i := 0; i < nbCommits; i++ { + proof.Bsb22Commitments[i].Unmarshal(s[offset : offset+point_size]) + offset += point_size + } + + return proof +} diff --git a/backend/plonk/bn254/icicle/verify.go b/backend/plonk/bn254/icicle/verify.go new file mode 100644 index 0000000000..1b7902282e --- /dev/null +++ b/backend/plonk/bn254/icicle/verify.go @@ -0,0 +1,401 @@ +// Copyright 2020 ConsenSys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by gnark DO NOT EDIT + +package icicle_bn254 + +import ( + "errors" + "fmt" + "io" + "math/big" + "text/template" + "time" + + "github.com/consensys/gnark-crypto/ecc" + + curve "github.com/consensys/gnark-crypto/ecc/bn254" + + "github.com/consensys/gnark-crypto/ecc/bn254/fp" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/hash_to_field" + + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" + "github.com/consensys/gnark/backend" + "github.com/consensys/gnark/logger" +) + +var ( + errWrongClaimedQuotient = errors.New("claimed quotient is not as expected") +) + +func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...backend.VerifierOption) error { + log := logger.Logger().With().Str("curve", "bn254").Str("backend", "plonk").Logger() + start := time.Now() + cfg, err := backend.NewVerifierConfig(opts...) + if err != nil { + return fmt.Errorf("create backend config: %w", err) + } + + if len(proof.Bsb22Commitments) != len(vk.Qcp) { + return errors.New("BSB22 Commitment number mismatch") + } + + // transcript to derive the challenge + fs := fiatshamir.NewTranscript(cfg.ChallengeHash, "gamma", "beta", "alpha", "zeta") + + // The first challenge is derived using the public data: the commitments to the permutation, + // the coefficients of the circuit, and the public inputs. + // derive gamma from the Comm(blinded cl), Comm(blinded cr), Comm(blinded co) + if err := bindPublicData(&fs, "gamma", vk, publicWitness); err != nil { + return err + } + gamma, err := deriveRandomness(&fs, "gamma", &proof.LRO[0], &proof.LRO[1], &proof.LRO[2]) + if err != nil { + return err + } + + // derive beta from Comm(l), Comm(r), Comm(o) + beta, err := deriveRandomness(&fs, "beta") + if err != nil { + return err + } + + // derive alpha from Comm(l), Comm(r), Comm(o), Com(Z), Bsb22Commitments + alphaDeps := make([]*curve.G1Affine, len(proof.Bsb22Commitments)+1) + for i := range proof.Bsb22Commitments { + alphaDeps[i] = &proof.Bsb22Commitments[i] + } + alphaDeps[len(alphaDeps)-1] = &proof.Z + alpha, err := deriveRandomness(&fs, "alpha", alphaDeps...) + if err != nil { + return err + } + + // derive zeta, the point of evaluation + zeta, err := deriveRandomness(&fs, "zeta", &proof.H[0], &proof.H[1], &proof.H[2]) + if err != nil { + return err + } + + // evaluation of Z=Xⁿ⁻¹ at ζ + var zetaPowerM, zzeta fr.Element + var bExpo big.Int + one := fr.One() + bExpo.SetUint64(vk.Size) + zetaPowerM.Exp(zeta, &bExpo) + zzeta.Sub(&zetaPowerM, &one) + + // compute PI = ∑_{i Date: Sun, 4 Feb 2024 12:45:48 -0700 Subject: [PATCH 30/58] add logging --- backend/plonk/bn254/icicle/prove.go | 49 +++++++++++++++++++++++++++-- 1 file changed, 46 insertions(+), 3 deletions(-) diff --git a/backend/plonk/bn254/icicle/prove.go b/backend/plonk/bn254/icicle/prove.go index 909b664835..e9c7d15f63 100644 --- a/backend/plonk/bn254/icicle/prove.go +++ b/backend/plonk/bn254/icicle/prove.go @@ -488,6 +488,9 @@ func (s *instance) solveConstraints() error { } func (s *instance) completeQk() error { + log := logger.Logger() + start := time.Now() + qk := s.pk.trace.Qk.Clone().ToLagrange(&s.pk.Domain[0]).ToRegular() qkCoeffs := qk.Coefficients() @@ -512,6 +515,7 @@ func (s *instance) completeQk() error { s.x[id_Qk] = qk close(s.chQk) + log.Debug().Dur("took", time.Since(start)).Msg("Complete Qk:") return nil } @@ -583,6 +587,8 @@ func (s *instance) deriveGammaAndBeta() error { // and add the contribution of a blinding polynomial b (small degree) // /!\ The polynomial p is supposed to be in Lagrange form. func (s *instance) commitToPolyAndBlinding(p, b *iop.Polynomial) (commit curve.G1Affine, err error) { + log := logger.Logger() + start := time.Now() commit, err = kzgDeviceCommit(p.Coefficients(), s.pk.deviceInfo.G1Device.G1Lagrange) @@ -592,6 +598,7 @@ func (s *instance) commitToPolyAndBlinding(p, b *iop.Polynomial) (commit curve.G commit.Add(&commit, &cb) + log.Debug().Dur("took", time.Since(start)).Int("size", p.Size()).Msg("MSM (commitToPolyAndBlinding):") return } @@ -612,6 +619,9 @@ func (s *instance) deriveZeta() (err error) { // evaluateConstraints computes H func (s *instance) evaluateConstraints() (err error) { + log := logger.Logger() + start := time.Now() + // clone polys from the proving key. s.x[id_Ql] = s.pk.trace.Ql.Clone() s.x[id_Qr] = s.pk.trace.Qr.Clone() @@ -688,10 +698,12 @@ func (s *instance) evaluateConstraints() (err error) { close(s.chH) + log.Debug().Dur("took", time.Since(start)).Msg("evaluateConstraints:") return nil } func (s *instance) buildRatioCopyConstraint() (err error) { + log := logger.Logger() // wait for gamma and beta to be derived (or ctx.Done()) select { case <-s.ctx.Done(): @@ -701,6 +713,7 @@ func (s *instance) buildRatioCopyConstraint() (err error) { // TODO @gbotrel having iop.BuildRatioCopyConstraint return something // with capacity = len() + 4 would avoid extra alloc / copy during openZ + start := time.Now() s.x[id_Z], err = iop.BuildRatioCopyConstraint( []*iop.Polynomial{ s.x[id_L], @@ -716,9 +729,11 @@ func (s *instance) buildRatioCopyConstraint() (err error) { if err != nil { return err } + log.Debug().Dur("took", time.Since(start)).Msg("FFT (BuildRatioCopyConstraint):") // commit to the blinded version of z s.proof.Z, err = s.commitToPolyAndBlinding(s.x[id_Z], s.bp[id_Bz]) + log.Debug().Dur("took", time.Since(start)).Msg("BuildRatioCopyConstraints:") close(s.chZ) @@ -727,6 +742,8 @@ func (s *instance) buildRatioCopyConstraint() (err error) { // open Z (blinded) at ωζ func (s *instance) openZ() (err error) { + log := logger.Logger() + // wait for H to be committed and zeta to be derived (or ctx.Done()) select { case <-s.ctx.Done(): @@ -737,10 +754,12 @@ func (s *instance) openZ() (err error) { zetaShifted.Mul(&s.zeta, &s.pk.Vk.Generator) s.blindedZ = getBlindedCoefficients(s.x[id_Z], s.bp[id_Bz]) // open z at zeta + start := time.Now() s.proof.ZShiftedOpening, err = kzgDeviceOpen(s.blindedZ, zetaShifted, s.pk) if err != nil { return err } + log.Debug().Dur("took", time.Since(start)).Msg("MSM (Open Z)") close(s.chZOpening) return nil } @@ -799,6 +818,7 @@ func (s *instance) foldH() error { } func (s *instance) computeLinearizedPolynomial() error { + log := logger.Logger() // wait for H to be committed and zeta to be derived (or ctx.Done()) select { @@ -860,15 +880,18 @@ func (s *instance) computeLinearizedPolynomial() error { ) var err error + timeCommit := time.Now() s.linearizedPolynomialDigest, err = kzg.Commit(s.linearizedPolynomial, s.pk.Kzg, runtime.NumCPU()*2) if err != nil { return err } + log.Debug().Dur("took", time.Since(timeCommit)).Int("size", len(s.linearizedPolynomial)).Msg("MSM (linearizedPolynomial):") close(s.chLinearizedPolynomial) return nil } func (s *instance) batchOpening() error { + log := logger.Logger() polysQcp := coefficients(s.pk.trace.Qcp) polysToOpen := make([][]fr.Element, 7+len(polysQcp)) copy(polysToOpen[7:], polysQcp) @@ -914,6 +937,7 @@ func (s *instance) batchOpening() error { digestsToOpen[6] = s.pk.Vk.S[1] var err error + start := time.Now() s.proof.BatchedProof, err = kzg.BatchOpenSinglePoint( polysToOpen, digestsToOpen, @@ -922,6 +946,7 @@ func (s *instance) batchOpening() error { s.pk.Kzg, s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) + log.Debug().Dur("took", time.Since(start)).Msg("MSM (batchOpeningSinglePoint):") return err } @@ -929,6 +954,9 @@ func (s *instance) batchOpening() error { // evaluate the full set of constraints, all polynomials in x are back in // canonical regular form at the end func (s *instance) computeNumerator() (*iop.Polynomial, error) { + log := logger.Logger() + start := time.Now() + n := s.pk.Domain[0].Cardinality nbBsbGates := (len(s.x) - id_Qci + 1) >> 1 @@ -1077,7 +1105,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { acc.Mul(&acc, &shifters[i]) } } - + batchTime := time.Now() //batchApply(s.x, func(p *iop.Polynomial) { // // ON Device // n := p.Size() @@ -1144,6 +1172,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { // fft in the correct coset p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() }) + log.Debug().Dur("took", time.Since(batchTime)).Msg("FFT (batchApply)") wgBuf.Wait() if _, err := iop.Evaluate( @@ -1175,6 +1204,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { // scale everything back go func() { + batchTime := time.Now() for i := id_ZS; i < len(s.x); i++ { s.x[i] = nil } @@ -1195,6 +1225,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { scalePowers(q, cs) } + log.Debug().Dur("took", time.Since(batchTime)).Msg("FFT (Scale back batchApply):") close(s.chRestoreLRO) }() @@ -1202,7 +1233,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { wgBuf.Wait() res := iop.NewPolynomial(&cres, iop.Form{Basis: iop.LagrangeCoset, Layout: iop.BitReverse}) - + log.Debug().Dur("took", time.Since(start)).Msg("computeNumerator") return res, nil } @@ -1399,23 +1430,33 @@ func coefficients(p []*iop.Polynomial) [][]fr.Element { } func commitToQuotient(h1, h2, h3 []fr.Element, proof *Proof, pk *ProvingKey) error { + log := logger.Logger() + start := time.Now() + g := new(errgroup.Group) g.Go(func() (err error) { + start := time.Now() proof.H[0], err = kzgDeviceCommit(h1, pk.deviceInfo.G1Device.G1) + log.Debug().Dur("took", time.Since(start)).Int("size", len(h3)).Msg("MSM (commitToQuotient):") return }) g.Go(func() (err error) { + start := time.Now() proof.H[1], err = kzgDeviceCommit(h2, pk.deviceInfo.G1Device.G1) + log.Debug().Dur("took", time.Since(start)).Int("size", len(h3)).Msg("MSM (commitToQuotient):") return }) g.Go(func() (err error) { + start := time.Now() proof.H[2], err = kzgDeviceCommit(h3, pk.deviceInfo.G1Device.G1) + log.Debug().Dur("took", time.Since(start)).Int("size", len(h3)).Msg("MSM (commitToQuotient):") return }) + log.Debug().Dur("took", time.Since(start)).Msg("commitToQuotient") return g.Wait() } @@ -1423,7 +1464,8 @@ func commitToQuotient(h1, h2, h3 []fr.Element, proof *Proof, pk *ProvingKey) err // The input must be in LagrangeCoset. // The result is in Canonical Regular. (in place using a) func divideByXMinusOne(a *iop.Polynomial, domains [2]*fft.Domain) (*iop.Polynomial, error) { - + log := logger.Logger() + start := time.Now() // check that the basis is LagrangeCoset if a.Basis != iop.LagrangeCoset || a.Layout != iop.BitReverse { return nil, errors.New("invalid form") @@ -1447,6 +1489,7 @@ func divideByXMinusOne(a *iop.Polynomial, domains [2]*fft.Domain) (*iop.Polynomi // since a is in bit reverse order, ToRegular shouldn't do anything a.ToCanonical(domains[1]).ToRegular() + log.Debug().Dur("took", time.Since(start)).Msg("FFT (divideByXMinusOne):") return a, nil } From e6b8c96d774b2e00ec0f7ae706cc1f3bb91c7a61 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 5 Feb 2024 07:27:18 -0700 Subject: [PATCH 31/58] impliment second batch apply (works!) --- backend/plonk/bn254/icicle/prove.go | 206 +++++++++++++++++++--------- 1 file changed, 138 insertions(+), 68 deletions(-) diff --git a/backend/plonk/bn254/icicle/prove.go b/backend/plonk/bn254/icicle/prove.go index e9c7d15f63..9d5ce8ebee 100644 --- a/backend/plonk/bn254/icicle/prove.go +++ b/backend/plonk/bn254/icicle/prove.go @@ -753,6 +753,7 @@ func (s *instance) openZ() (err error) { var zetaShifted fr.Element zetaShifted.Mul(&s.zeta, &s.pk.Vk.Generator) s.blindedZ = getBlindedCoefficients(s.x[id_Z], s.bp[id_Bz]) + // open z at zeta start := time.Now() s.proof.ZShiftedOpening, err = kzgDeviceOpen(s.blindedZ, zetaShifted, s.pk) @@ -760,6 +761,7 @@ func (s *instance) openZ() (err error) { return err } log.Debug().Dur("took", time.Since(start)).Msg("MSM (Open Z)") + close(s.chZOpening) return nil } @@ -881,7 +883,7 @@ func (s *instance) computeLinearizedPolynomial() error { var err error timeCommit := time.Now() - s.linearizedPolynomialDigest, err = kzg.Commit(s.linearizedPolynomial, s.pk.Kzg, runtime.NumCPU()*2) + s.linearizedPolynomialDigest, err = kzgDeviceCommit(s.linearizedPolynomial, s.pk.deviceInfo.G1Device.G1, runtime.NumCPU()*2) if err != nil { return err } @@ -1068,7 +1070,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } // select the correct scaling vector to scale by shifter[i] - selectScalingVector := func(i int, l iop.Layout) []fr.Element { + selectScalingVector := func(i int, l iop.Layout, np int) []fr.Element { var w []fr.Element if i == 0 { if l == iop.Regular { @@ -1106,72 +1108,98 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } } batchTime := time.Now() - //batchApply(s.x, func(p *iop.Polynomial) { - // // ON Device - // n := p.Size() - // sizeBytes := p.Size() * fr.Bytes + batchApply(s.x, func(p *iop.Polynomial, pn int) { + // ON Device + n := p.Size() + sizeBytes := p.Size() * fr.Bytes - // copyADone := make(chan unsafe.Pointer, 1) - // go iciclegnark.CopyToDevice(p.Coefficients(), sizeBytes, copyADone) - // a_device := <-copyADone + copyADone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyToDevice(p.Coefficients(), sizeBytes, copyADone) + a_device := <-copyADone - // // scale by shifter[i] - // w := selectScalingVector(i, p.Layout) - // - // copyWDone := make(chan unsafe.Pointer, 1) - // go iciclegnark.CopyToDevice(w, sizeBytes, copyWDone) - // w_device := <-copyWDone - - // // Initialize channels - // computeInttNttDone := make(chan error, 1) - // computeInttNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { - // a_intt_d := iciclegnark.INttOnDevice(devicePointer, s.pk.deviceInfo.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) - - // iciclegnark.VecMulOnDevice(a_intt_d, scaleVecPtr, n) - // iciclegnark.MontConvOnDevice(a_intt_d, n, true) - // iciclegnark.NttOnDevice(devicePointer, a_intt_d, s.pk.deviceInfo.DomainDevice.Twiddles, s.pk.deviceInfo.DomainDevice.CosetTable, n, n, sizeBytes, true) - // - // computeInttNttDone <- nil - // iciclegnark.FreeDevicePointer(a_intt_d) - // } - // // Run computeInttNttOnDevice on device - // go computeInttNttOnDevice(w_device, a_device) - // _ = <-computeInttNttDone - - // res := iciclegnark.CopyScalarsToHost(a_device, n, sizeBytes) - // p = iop.NewPolynomial(&res, iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}) - // fmt.Print("GPU", p.Coefficients()[0], "\n") - - // go func() { - // iciclegnark.FreeDevicePointer(a_device) - // iciclegnark.FreeDevicePointer(w_device) - // }() - //}) + // scale by shifter[i] + w := selectScalingVector(i, p.Layout, pn) + + copyWDone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyToDevice(w, sizeBytes, copyWDone) + w_device := <-copyWDone + + // Initialize channels + computeInttNttDone := make(chan error, 1) + computeInttNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { + + a_intt_d := iciclegnark.INttOnDevice(devicePointer, s.pk.deviceInfo.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) + + iciclegnark.VecMulOnDevice(a_intt_d, scaleVecPtr, n) + iciclegnark.MontConvOnDevice(a_intt_d, n, true) + iciclegnark.NttOnDevice(devicePointer, a_intt_d, s.pk.deviceInfo.DomainDevice.Twiddles, s.pk.deviceInfo.DomainDevice.CosetTable, n, n, sizeBytes, true) + + res := iciclegnark.CopyScalarsToHost(a_intt_d, n, sizeBytes) + + nbTasks := calculateNbTasks(len(s.x)-1) * 2 + p.ToCanonical(&s.pk.Domain[0], nbTasks).ToRegular() + + cp := p.Coefficients() + utils.Parallelize(len(cp), func(start, end int) { + for j := start; j < end; j++ { + cp[j].Mul(&cp[j], &w[j]) + } + }, nbTasks) + + isEqual := 0 + notEqual := 0 + for j := 0; j < len(res); j++ { + if res[j] != cp[j] { + notEqual++ + } else { + isEqual++ + } + } + //fmt.Print("GPU == CPU for poly:", pn, " isEqual: ", isEqual, " notEqual: ", notEqual, "\n") + + p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() + + computeInttNttDone <- nil + iciclegnark.FreeDevicePointer(a_intt_d) + } + // Run computeInttNttOnDevice on device + go computeInttNttOnDevice(w_device, a_device) + _ = <-computeInttNttDone + + //p = iop.NewPolynomial(&res, iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}) + //fmt.Print("GPU", p.Coefficients()[0], "\n") + + go func() { + iciclegnark.FreeDevicePointer(a_device) + iciclegnark.FreeDevicePointer(w_device) + }() + }) // we do **a lot** of FFT here, but on the small domain. // note that for all the polynomials in the proving key // (Ql, Qr, Qm, Qo, S1, S2, S3, Qcp, Qc) and ID, LOne // we could pre-compute theses rho*2 FFTs and store them // at the cost of a huge memory footprint. - batchApply(s.x, func(p *iop.Polynomial) { - nbTasks := calculateNbTasks(len(s.x)-1) * 2 - // shift polynomials to be in the correct coset - p.ToCanonical(&s.pk.Domain[0], nbTasks) - //fmt.Print("CPU",p.Coefficients()[0], "\n") + //batchApply(s.x, func(p *iop.Polynomial, pn int) { + // nbTasks := calculateNbTasks(len(s.x)-1) * 2 + // // shift polynomials to be in the correct coset + // p.ToCanonical(&s.pk.Domain[0], nbTasks) - // scale by shifter[i] - w := selectScalingVector(i, p.Layout) + // // scale by shifter[i] + // w := selectScalingVector(i, p.Layout) - cp := p.Coefficients() - utils.Parallelize(len(cp), func(start, end int) { - for j := start; j < end; j++ { - cp[j].Mul(&cp[j], &w[j]) - } - }, nbTasks) + // cp := p.Coefficients() + // utils.Parallelize(len(cp), func(start, end int) { + // for j := start; j < end; j++ { + // cp[j].Mul(&cp[j], &w[j]) + // } + // }, nbTasks) + // //fmt.Print("CPU", pn, cp[0], "\n") - // fft in the correct coset - p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() - }) + // // fft in the correct coset + // p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() + //}) + //os.Exit(0) log.Debug().Dur("took", time.Since(batchTime)).Msg("FFT (batchApply)") wgBuf.Wait() @@ -1216,9 +1244,48 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } cs.Inverse(&cs) - batchApply(s.x[:id_ZS], func(p *iop.Polynomial) { - p.ToCanonical(&s.pk.Domain[0], 8).ToRegular() - scalePowers(p, cs) + batchApply(s.x[:id_ZS], func(p *iop.Polynomial, pn int) { + // ON Device + n := p.Size() + sizeBytes := p.Size() * fr.Bytes + + copyADone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyToDevice(p.Coefficients(), sizeBytes, copyADone) + a_device := <-copyADone + + // HACK do a better way + var acc fr.Element + acc.SetOne() + accList := make([]fr.Element, p.Size()) + for j := 0; j < p.Size(); j++ { + accList[j].Set(&acc) + acc.Mul(&acc, &cs) + } + + copyWDone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyToDevice(accList, sizeBytes, copyWDone) + w_device := <-copyWDone + + // Initialize channels + computeInttNttDone := make(chan error, 1) + computeInttNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { + + a_intt_d := iciclegnark.INttOnDevice(devicePointer, s.pk.deviceInfo.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) + + iciclegnark.VecMulOnDevice(a_intt_d, scaleVecPtr, n) + iciclegnark.MontConvOnDevice(a_intt_d, n, true) + + res := iciclegnark.CopyScalarsToHost(a_intt_d, n, sizeBytes) + + s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Canonical, Layout: iop.Regular}) + + computeInttNttDone <- nil + iciclegnark.FreeDevicePointer(a_intt_d) + } + // Run computeInttNttOnDevice on device + go computeInttNttOnDevice(w_device, a_device) + _ = <-computeInttNttDone + }) for _, q := range s.bp { @@ -1248,19 +1315,19 @@ func calculateNbTasks(n int) int { } // batchApply executes fn on all polynomials in x except x[id_ZS] in parallel. -func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial)) { - var wg sync.WaitGroup +func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial, int)) { + //var wg sync.WaitGroup for i := 0; i < len(x); i++ { if i == id_ZS { continue } - wg.Add(1) - go func(i int) { - fn(x[i]) - wg.Done() - }(i) + //wg.Add(1) + //go func(i int) { + fn(x[i], i) + // wg.Done() + // }(i) } - wg.Wait() + // wg.Wait() } // p <- @@ -1441,6 +1508,7 @@ func commitToQuotient(h1, h2, h3 []fr.Element, proof *Proof, pk *ProvingKey) err log.Debug().Dur("took", time.Since(start)).Int("size", len(h3)).Msg("MSM (commitToQuotient):") return }) + g.Wait() g.Go(func() (err error) { start := time.Now() @@ -1448,6 +1516,7 @@ func commitToQuotient(h1, h2, h3 []fr.Element, proof *Proof, pk *ProvingKey) err log.Debug().Dur("took", time.Since(start)).Int("size", len(h3)).Msg("MSM (commitToQuotient):") return }) + g.Wait() g.Go(func() (err error) { start := time.Now() @@ -1455,6 +1524,7 @@ func commitToQuotient(h1, h2, h3 []fr.Element, proof *Proof, pk *ProvingKey) err log.Debug().Dur("took", time.Since(start)).Int("size", len(h3)).Msg("MSM (commitToQuotient):") return }) + g.Wait() log.Debug().Dur("took", time.Since(start)).Msg("commitToQuotient") return g.Wait() From 8ddcc5b2459f6c1008706945f5022355edcebd05 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 5 Feb 2024 13:56:32 -0700 Subject: [PATCH 32/58] batch apply ntt works --- backend/plonk/bn254/icicle/prove.go | 60 +++++++++++------------------ 1 file changed, 22 insertions(+), 38 deletions(-) diff --git a/backend/plonk/bn254/icicle/prove.go b/backend/plonk/bn254/icicle/prove.go index 9d5ce8ebee..aa1e581cac 100644 --- a/backend/plonk/bn254/icicle/prove.go +++ b/backend/plonk/bn254/icicle/prove.go @@ -1109,6 +1109,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } batchTime := time.Now() batchApply(s.x, func(p *iop.Polynomial, pn int) { + nbTasks := calculateNbTasks(len(s.x)-1) * 2 // ON Device n := p.Size() sizeBytes := p.Size() * fr.Bytes @@ -1127,14 +1128,13 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { // Initialize channels computeInttNttDone := make(chan error, 1) computeInttNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { - a_intt_d := iciclegnark.INttOnDevice(devicePointer, s.pk.deviceInfo.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) iciclegnark.VecMulOnDevice(a_intt_d, scaleVecPtr, n) - iciclegnark.MontConvOnDevice(a_intt_d, n, true) - iciclegnark.NttOnDevice(devicePointer, a_intt_d, s.pk.deviceInfo.DomainDevice.Twiddles, s.pk.deviceInfo.DomainDevice.CosetTable, n, n, sizeBytes, true) + iciclegnark.NttOnDevice(devicePointer, a_intt_d, s.pk.deviceInfo.DomainDevice.Twiddles, nil, n, n, sizeBytes, false) + iciclegnark.MontConvOnDevice(devicePointer, n, true) - res := iciclegnark.CopyScalarsToHost(a_intt_d, n, sizeBytes) + res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) nbTasks := calculateNbTasks(len(s.x)-1) * 2 p.ToCanonical(&s.pk.Domain[0], nbTasks).ToRegular() @@ -1146,6 +1146,9 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } }, nbTasks) + p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() + + cp = p.Coefficients() isEqual := 0 notEqual := 0 for j := 0; j < len(res); j++ { @@ -1155,51 +1158,34 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { isEqual++ } } - //fmt.Print("GPU == CPU for poly:", pn, " isEqual: ", isEqual, " notEqual: ", notEqual, "\n") + fmt.Print("GPU == CPU for poly:", pn, " isEqual: ", isEqual, " notEqual: ", notEqual, "\n") - p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() + s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}) computeInttNttDone <- nil iciclegnark.FreeDevicePointer(a_intt_d) } // Run computeInttNttOnDevice on device - go computeInttNttOnDevice(w_device, a_device) - _ = <-computeInttNttDone + if p.Basis == iop.Lagrange { + go computeInttNttOnDevice(w_device, a_device) + _ = <-computeInttNttDone + + } else { + cp := p.Coefficients() + utils.Parallelize(len(cp), func(start, end int) { + for j := start; j < end; j++ { + cp[j].Mul(&cp[j], &w[j]) + } + }, nbTasks) - //p = iop.NewPolynomial(&res, iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}) - //fmt.Print("GPU", p.Coefficients()[0], "\n") + p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() + } go func() { iciclegnark.FreeDevicePointer(a_device) iciclegnark.FreeDevicePointer(w_device) }() }) - - // we do **a lot** of FFT here, but on the small domain. - // note that for all the polynomials in the proving key - // (Ql, Qr, Qm, Qo, S1, S2, S3, Qcp, Qc) and ID, LOne - // we could pre-compute theses rho*2 FFTs and store them - // at the cost of a huge memory footprint. - //batchApply(s.x, func(p *iop.Polynomial, pn int) { - // nbTasks := calculateNbTasks(len(s.x)-1) * 2 - // // shift polynomials to be in the correct coset - // p.ToCanonical(&s.pk.Domain[0], nbTasks) - - // // scale by shifter[i] - // w := selectScalingVector(i, p.Layout) - - // cp := p.Coefficients() - // utils.Parallelize(len(cp), func(start, end int) { - // for j := start; j < end; j++ { - // cp[j].Mul(&cp[j], &w[j]) - // } - // }, nbTasks) - // //fmt.Print("CPU", pn, cp[0], "\n") - - // // fft in the correct coset - // p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() - //}) - //os.Exit(0) log.Debug().Dur("took", time.Since(batchTime)).Msg("FFT (batchApply)") wgBuf.Wait() @@ -1253,7 +1239,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { go iciclegnark.CopyToDevice(p.Coefficients(), sizeBytes, copyADone) a_device := <-copyADone - // HACK do a better way var acc fr.Element acc.SetOne() accList := make([]fr.Element, p.Size()) @@ -1269,7 +1254,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { // Initialize channels computeInttNttDone := make(chan error, 1) computeInttNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { - a_intt_d := iciclegnark.INttOnDevice(devicePointer, s.pk.deviceInfo.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) iciclegnark.VecMulOnDevice(a_intt_d, scaleVecPtr, n) From fbfcf8da1f8d62e20e524626f45b1f1363644e23 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Tue, 6 Feb 2024 08:51:28 -0700 Subject: [PATCH 33/58] batch apply fft works --- .../bn254/icicle/{prove.go => icicle.go} | 41 +++++++------------ backend/plonk/plonk.go | 8 +++- 2 files changed, 21 insertions(+), 28 deletions(-) rename backend/plonk/bn254/icicle/{prove.go => icicle.go} (98%) diff --git a/backend/plonk/bn254/icicle/prove.go b/backend/plonk/bn254/icicle/icicle.go similarity index 98% rename from backend/plonk/bn254/icicle/prove.go rename to backend/plonk/bn254/icicle/icicle.go index aa1e581cac..14206b58a6 100644 --- a/backend/plonk/bn254/icicle/prove.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1136,49 +1136,36 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) - nbTasks := calculateNbTasks(len(s.x)-1) * 2 - p.ToCanonical(&s.pk.Domain[0], nbTasks).ToRegular() - cp := p.Coefficients() utils.Parallelize(len(cp), func(start, end int) { for j := start; j < end; j++ { - cp[j].Mul(&cp[j], &w[j]) + cp[j].Set(&res[j]) } }, nbTasks) - p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() + computeInttNttDone <- nil + iciclegnark.FreeDevicePointer(a_intt_d) + } - cp = p.Coefficients() - isEqual := 0 - notEqual := 0 - for j := 0; j < len(res); j++ { - if res[j] != cp[j] { - notEqual++ - } else { - isEqual++ - } - } - fmt.Print("GPU == CPU for poly:", pn, " isEqual: ", isEqual, " notEqual: ", notEqual, "\n") + computeNttDone := make(chan error, 1) + computeNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { + iciclegnark.VecMulOnDevice(devicePointer, scaleVecPtr, n) + iciclegnark.NttOnDevice(devicePointer, devicePointer, s.pk.deviceInfo.DomainDevice.Twiddles, nil, n, n, sizeBytes, false) + iciclegnark.MontConvOnDevice(devicePointer, n, true) + res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}) - computeInttNttDone <- nil - iciclegnark.FreeDevicePointer(a_intt_d) + computeNttDone <- nil } + // Run computeInttNttOnDevice on device if p.Basis == iop.Lagrange { go computeInttNttOnDevice(w_device, a_device) _ = <-computeInttNttDone - } else { - cp := p.Coefficients() - utils.Parallelize(len(cp), func(start, end int) { - for j := start; j < end; j++ { - cp[j].Mul(&cp[j], &w[j]) - } - }, nbTasks) - - p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() + go computeNttOnDevice(w_device, a_device) + _ = <- computeNttDone } go func() { diff --git a/backend/plonk/plonk.go b/backend/plonk/plonk.go index 2fd7634273..3c5cceac7b 100644 --- a/backend/plonk/plonk.go +++ b/backend/plonk/plonk.go @@ -172,7 +172,13 @@ func Verify(proof Proof, vk VerifyingKey, publicWitness witness.Witness, opts .. return witness.ErrInvalidWitness } return icicle_bn254.Verify(_proof, vk.(*icicle_bn254.VerifyingKey), w, opts...) - // TODO add plonk_bn254.Verify + + case *plonk_bn254.Proof: + w, ok := publicWitness.Vector().(fr_bn254.Vector) + if !ok { + return witness.ErrInvalidWitness + } + return plonk_bn254.Verify(_proof, vk.(*plonk_bn254.VerifyingKey), w, opts...) case *plonk_bls12381.Proof: w, ok := publicWitness.Vector().(fr_bls12381.Vector) From fe085358553f617a6e318af579f0e6c55871e8a9 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Wed, 7 Feb 2024 07:18:31 -0700 Subject: [PATCH 34/58] update to import from kzg lib --- backend/plonk/bn254/icicle/icicle.go | 19 ++++++++++--------- go.mod | 14 ++++++++------ go.sum | 23 +++++++++++------------ 3 files changed, 29 insertions(+), 27 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 14206b58a6..9d9ddc9b2e 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -41,7 +41,7 @@ import ( "github.com/consensys/gnark-crypto/ecc/bn254/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bn254/fr/iop" - "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + kzg "github.com/consensys/gnark-crypto/ecc/bn254/kzg" fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/witness" @@ -590,7 +590,7 @@ func (s *instance) commitToPolyAndBlinding(p, b *iop.Polynomial) (commit curve.G log := logger.Logger() start := time.Now() - commit, err = kzgDeviceCommit(p.Coefficients(), s.pk.deviceInfo.G1Device.G1Lagrange) + commit, err = kzg.OnDeviceCommit(p.Coefficients(), s.pk.deviceInfo.G1Device.G1Lagrange) // we add in the blinding contribution n := int(s.pk.Domain[0].Cardinality) @@ -756,7 +756,7 @@ func (s *instance) openZ() (err error) { // open z at zeta start := time.Now() - s.proof.ZShiftedOpening, err = kzgDeviceOpen(s.blindedZ, zetaShifted, s.pk) + s.proof.ZShiftedOpening, err = kzg.OnDeviceOpen(s.blindedZ, zetaShifted, s.pk.deviceInfo.G1Device.G1) if err != nil { return err } @@ -883,7 +883,7 @@ func (s *instance) computeLinearizedPolynomial() error { var err error timeCommit := time.Now() - s.linearizedPolynomialDigest, err = kzgDeviceCommit(s.linearizedPolynomial, s.pk.deviceInfo.G1Device.G1, runtime.NumCPU()*2) + s.linearizedPolynomialDigest, err = kzg.OnDeviceCommit(s.linearizedPolynomial, s.pk.deviceInfo.G1Device.G1, runtime.NumCPU()*2) if err != nil { return err } @@ -940,12 +940,13 @@ func (s *instance) batchOpening() error { var err error start := time.Now() - s.proof.BatchedProof, err = kzg.BatchOpenSinglePoint( + s.proof.BatchedProof, err = kzg.OnDeviceBatchOpenSinglePoint( polysToOpen, digestsToOpen, s.zeta, s.kzgFoldingHash, s.pk.Kzg, + s.pk.deviceInfo.G1Device.G1, s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) log.Debug().Dur("took", time.Since(start)).Msg("MSM (batchOpeningSinglePoint):") @@ -1040,7 +1041,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { cres := s.cres buf := make([]fr.Element, n) var wgBuf sync.WaitGroup - + allConstraints := func(i int, u ...fr.Element) fr.Element { // scale S1, S2, S3 by β u[id_S1].Mul(&u[id_S1], &s.beta) @@ -1475,7 +1476,7 @@ func commitToQuotient(h1, h2, h3 []fr.Element, proof *Proof, pk *ProvingKey) err g.Go(func() (err error) { start := time.Now() - proof.H[0], err = kzgDeviceCommit(h1, pk.deviceInfo.G1Device.G1) + proof.H[0], err = kzg.OnDeviceCommit(h1, pk.deviceInfo.G1Device.G1) log.Debug().Dur("took", time.Since(start)).Int("size", len(h3)).Msg("MSM (commitToQuotient):") return }) @@ -1483,7 +1484,7 @@ func commitToQuotient(h1, h2, h3 []fr.Element, proof *Proof, pk *ProvingKey) err g.Go(func() (err error) { start := time.Now() - proof.H[1], err = kzgDeviceCommit(h2, pk.deviceInfo.G1Device.G1) + proof.H[1], err = kzg.OnDeviceCommit(h2, pk.deviceInfo.G1Device.G1) log.Debug().Dur("took", time.Since(start)).Int("size", len(h3)).Msg("MSM (commitToQuotient):") return }) @@ -1491,7 +1492,7 @@ func commitToQuotient(h1, h2, h3 []fr.Element, proof *Proof, pk *ProvingKey) err g.Go(func() (err error) { start := time.Now() - proof.H[2], err = kzgDeviceCommit(h3, pk.deviceInfo.G1Device.G1) + proof.H[2], err = kzg.OnDeviceCommit(h3, pk.deviceInfo.G1Device.G1) log.Debug().Dur("took", time.Since(start)).Int("size", len(h3)).Msg("MSM (commitToQuotient):") return }) diff --git a/go.mod b/go.mod index 9d65bff128..9d2b7cae87 100644 --- a/go.mod +++ b/go.mod @@ -6,29 +6,31 @@ require ( github.com/bits-and-blooms/bitset v1.8.0 github.com/blang/semver/v4 v4.0.0 github.com/consensys/bavard v0.1.13 - github.com/consensys/gnark-crypto v0.12.2-0.20231023220848-538dff926c15 + github.com/consensys/gnark-crypto v0.12.2-0.20231208203441-d4eab6ddd2af github.com/fxamacker/cbor/v2 v2.5.0 github.com/google/go-cmp v0.5.9 github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b - github.com/ingonyama-zk/iciclegnark v0.1.0 + //github.com/ingonyama-zk/iciclegnark v0.1.0. + github.com/ingonyama-zk/iciclegnark v0.1.2-0.20240131141109-5f8923e3fbd5 github.com/leanovate/gopter v0.2.9 github.com/rs/zerolog v1.30.0 github.com/stretchr/testify v1.8.4 - golang.org/x/crypto v0.12.0 + golang.org/x/crypto v0.17.0 golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 golang.org/x/sync v0.3.0 ) require ( github.com/davecgh/go-spew v1.1.1 // indirect - github.com/ingonyama-zk/icicle v0.0.0-20230928131117-97f0079e5c71 // indirect + github.com/ingonyama-zk/icicle v0.1.0 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.19 // indirect github.com/mmcloughlin/addchain v0.4.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/rogpeppe/go-internal v1.11.0 // indirect github.com/x448/float16 v0.8.4 // indirect - golang.org/x/sys v0.11.0 // indirect + golang.org/x/sys v0.15.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect rsc.io/tmplfunc v0.0.3 // indirect ) + +replace github.com/consensys/gnark-crypto v0.12.2-0.20231208203441-d4eab6ddd2af => github.com/ingonyama-zk/gnark-crypto v0.0.0-20240207133741-7e078bb23cf3 diff --git a/go.sum b/go.sum index 9b26c86b07..40c777036f 100644 --- a/go.sum +++ b/go.sum @@ -4,8 +4,6 @@ github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ= github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI= -github.com/consensys/gnark-crypto v0.12.2-0.20231023220848-538dff926c15 h1:fu5ienFKWWqrfMPbWnhw4zfIFZW3pzVIbv3KtASymbU= -github.com/consensys/gnark-crypto v0.12.2-0.20231023220848-538dff926c15/go.mod h1:v2Gy7L/4ZRosZ7Ivs+9SfUDr0f5UlG+EM5t7MPHiLuY= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -17,10 +15,12 @@ github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b h1:h9U78+dx9a4BKdQkBBos92HalKpaGKHrp+3Uo6yTodo= github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= -github.com/ingonyama-zk/icicle v0.0.0-20230928131117-97f0079e5c71 h1:YxI1RTPzpFJ3MBmxPl3Bo0F7ume7CmQEC1M9jL6CT94= -github.com/ingonyama-zk/icicle v0.0.0-20230928131117-97f0079e5c71/go.mod h1:kAK8/EoN7fUEmakzgZIYdWy1a2rBnpCaZLqSHwZWxEk= -github.com/ingonyama-zk/iciclegnark v0.1.0 h1:88MkEghzjQBMjrYRJFxZ9oR9CTIpB8NG2zLeCJSvXKQ= -github.com/ingonyama-zk/iciclegnark v0.1.0/go.mod h1:wz6+IpyHKs6UhMMoQpNqz1VY+ddfKqC/gRwR/64W6WU= +github.com/ingonyama-zk/gnark-crypto v0.0.0-20240207133741-7e078bb23cf3 h1:N5oouLARz+PIZD9FWr2U/UvNf6IMIvqch9tDlC4t3c8= +github.com/ingonyama-zk/gnark-crypto v0.0.0-20240207133741-7e078bb23cf3/go.mod h1:Rkbv3haumAdndgfVlSAxACC2p0661ly3oAhTbQBknCY= +github.com/ingonyama-zk/icicle v0.1.0 h1:9zbHaYv8/4g3HWRabBCpeH+64U8GJ99K1qeqE2jO6LM= +github.com/ingonyama-zk/icicle v0.1.0/go.mod h1:kAK8/EoN7fUEmakzgZIYdWy1a2rBnpCaZLqSHwZWxEk= +github.com/ingonyama-zk/iciclegnark v0.1.2-0.20240131141109-5f8923e3fbd5 h1:LZeh9IVCrZ8RiHZy1JuC+kKlTESUENxRrdBHwVBDFZ4= +github.com/ingonyama-zk/iciclegnark v0.1.2-0.20240131141109-5f8923e3fbd5/go.mod h1:g17CDuMfNBiN4hhZ4aA0rGF24Abv5GBFHJqE7aLxaZQ= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= @@ -38,8 +38,7 @@ github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFV github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= -github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= +github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/zerolog v1.30.0 h1:SymVODrcRsaRaSInD9yQtKbtWqwsfoPcRff/oRXLj4c= github.com/rs/zerolog v1.30.0/go.mod h1:/tk+P47gFdPXq4QYjvCmT5/Gsug2nagsFWBWhAiSi1w= @@ -47,8 +46,8 @@ github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcU github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= -golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk= -golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= +golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= +golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 h1:m64FZMko/V45gv0bNmrNYoDEq8U5YUhetc9cBWKS1TQ= golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63/go.mod h1:0v4NqG35kSWCMzLaMeX+IQrlSnVE/bqGSyC2cz/9Le8= golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= @@ -57,8 +56,8 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM= -golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= From 57d9c354425654249fbfc67823d2638bfb159bcc Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Wed, 7 Feb 2024 08:33:39 -0700 Subject: [PATCH 35/58] update for fast ntt(broken) --- backend/plonk/bn254/icicle/icicle.go | 29 +++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 9d9ddc9b2e..351f1ec2b6 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1094,6 +1094,17 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { m := uint64(s.pk.Domain[1].Cardinality) mm := uint64(64 - bits.TrailingZeros64(m)) + var devicePointers []unsafe.Pointer + for i := 0; i < len(s.x); i++ { + sizeBytes := s.x[i].Size() * fr.Bytes + + copyADone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyToDevice(s.x[i].Coefficients(), sizeBytes, copyADone) + a_device := <-copyADone + + devicePointers = append(devicePointers, a_device) + } + for i := 0; i < rho; i++ { coset.Mul(&coset, &shifters[i]) @@ -1111,13 +1122,14 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { batchTime := time.Now() batchApply(s.x, func(p *iop.Polynomial, pn int) { nbTasks := calculateNbTasks(len(s.x)-1) * 2 - // ON Device + n := p.Size() sizeBytes := p.Size() * fr.Bytes - copyADone := make(chan unsafe.Pointer, 1) - go iciclegnark.CopyToDevice(p.Coefficients(), sizeBytes, copyADone) - a_device := <-copyADone + //copyADone := make(chan unsafe.Pointer, 1) + //go iciclegnark.CopyToDevice(p.Coefficients(), sizeBytes, copyADone) + //a_device := <-copyADone + a_device := devicePointers[pn] // scale by shifter[i] w := selectScalingVector(i, p.Layout, pn) @@ -1128,15 +1140,19 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { // Initialize channels computeInttNttDone := make(chan error, 1) + + // INTT and NTT on device computeInttNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { a_intt_d := iciclegnark.INttOnDevice(devicePointer, s.pk.deviceInfo.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) + // Multiply by shifter[i] iciclegnark.VecMulOnDevice(a_intt_d, scaleVecPtr, n) iciclegnark.NttOnDevice(devicePointer, a_intt_d, s.pk.deviceInfo.DomainDevice.Twiddles, nil, n, n, sizeBytes, false) iciclegnark.MontConvOnDevice(devicePointer, n, true) res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) + // Copy back to host cp := p.Coefficients() utils.Parallelize(len(cp), func(start, end int) { for j := start; j < end; j++ { @@ -1148,19 +1164,22 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { iciclegnark.FreeDevicePointer(a_intt_d) } + // NTT on device computeNttDone := make(chan error, 1) computeNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { + // Multiply by shifter[i] iciclegnark.VecMulOnDevice(devicePointer, scaleVecPtr, n) iciclegnark.NttOnDevice(devicePointer, devicePointer, s.pk.deviceInfo.DomainDevice.Twiddles, nil, n, n, sizeBytes, false) iciclegnark.MontConvOnDevice(devicePointer, n, true) + // Copy back to host res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}) computeNttDone <- nil } - // Run computeInttNttOnDevice on device + // Run INTT and NTT on device if p.Basis == iop.Lagrange { go computeInttNttOnDevice(w_device, a_device) _ = <-computeInttNttDone From b5f55ffd17aa241eccf08f781730e9bc3c774af8 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Wed, 7 Feb 2024 08:45:26 -0700 Subject: [PATCH 36/58] ntt done --- backend/plonk/bn254/icicle/icicle.go | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 351f1ec2b6..b97dd7be36 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1094,17 +1094,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { m := uint64(s.pk.Domain[1].Cardinality) mm := uint64(64 - bits.TrailingZeros64(m)) - var devicePointers []unsafe.Pointer - for i := 0; i < len(s.x); i++ { - sizeBytes := s.x[i].Size() * fr.Bytes - - copyADone := make(chan unsafe.Pointer, 1) - go iciclegnark.CopyToDevice(s.x[i].Coefficients(), sizeBytes, copyADone) - a_device := <-copyADone - - devicePointers = append(devicePointers, a_device) - } - for i := 0; i < rho; i++ { coset.Mul(&coset, &shifters[i]) @@ -1126,10 +1115,9 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { n := p.Size() sizeBytes := p.Size() * fr.Bytes - //copyADone := make(chan unsafe.Pointer, 1) - //go iciclegnark.CopyToDevice(p.Coefficients(), sizeBytes, copyADone) - //a_device := <-copyADone - a_device := devicePointers[pn] + copyADone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyToDevice(p.Coefficients(), sizeBytes, copyADone) + a_device := <-copyADone // scale by shifter[i] w := selectScalingVector(i, p.Layout, pn) From 83d72ee538984de0237e581298f5f95bfa8fe8e7 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Wed, 7 Feb 2024 11:02:03 -0700 Subject: [PATCH 37/58] move ntt polys to device earlier --- backend/plonk/bn254/icicle/icicle.go | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index b97dd7be36..5e02e502bc 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1094,6 +1094,17 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { m := uint64(s.pk.Domain[1].Cardinality) mm := uint64(64 - bits.TrailingZeros64(m)) + var devicePointers []unsafe.Pointer + for i := 0; i < len(s.x); i++ { + sizeBytes := s.x[i].Size() * fr.Bytes + + copyADone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyToDevice(s.x[i].Coefficients(), sizeBytes, copyADone) + a_device := <-copyADone + + devicePointers = append(devicePointers, a_device) + } + for i := 0; i < rho; i++ { coset.Mul(&coset, &shifters[i]) @@ -1115,10 +1126,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { n := p.Size() sizeBytes := p.Size() * fr.Bytes - copyADone := make(chan unsafe.Pointer, 1) - go iciclegnark.CopyToDevice(p.Coefficients(), sizeBytes, copyADone) - a_device := <-copyADone - // scale by shifter[i] w := selectScalingVector(i, p.Layout, pn) @@ -1148,6 +1155,8 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } }, nbTasks) + iciclegnark.MontConvOnDevice(devicePointer, n, false) + computeInttNttDone <- nil iciclegnark.FreeDevicePointer(a_intt_d) } @@ -1164,22 +1173,21 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}) + iciclegnark.MontConvOnDevice(devicePointer, n, false) + computeNttDone <- nil } // Run INTT and NTT on device if p.Basis == iop.Lagrange { - go computeInttNttOnDevice(w_device, a_device) + go computeInttNttOnDevice(w_device, devicePointers[pn]) _ = <-computeInttNttDone } else { - go computeNttOnDevice(w_device, a_device) + go computeNttOnDevice(w_device, devicePointers[pn]) _ = <- computeNttDone } - go func() { - iciclegnark.FreeDevicePointer(a_device) - iciclegnark.FreeDevicePointer(w_device) - }() + go iciclegnark.FreeDevicePointer(w_device) }) log.Debug().Dur("took", time.Since(batchTime)).Msg("FFT (batchApply)") From 545df18b1e27ae87599770fd1aec538a1f279c8c Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Wed, 7 Feb 2024 11:04:43 -0700 Subject: [PATCH 38/58] refactor scale back fft --- backend/plonk/bn254/icicle/icicle.go | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 5e02e502bc..9e320807b6 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1238,10 +1238,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { n := p.Size() sizeBytes := p.Size() * fr.Bytes - copyADone := make(chan unsafe.Pointer, 1) - go iciclegnark.CopyToDevice(p.Coefficients(), sizeBytes, copyADone) - a_device := <-copyADone - var acc fr.Element acc.SetOne() accList := make([]fr.Element, p.Size()) @@ -1266,11 +1262,13 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Canonical, Layout: iop.Regular}) + iciclegnark.MontConvOnDevice(a_intt_d, n, false) + computeInttNttDone <- nil iciclegnark.FreeDevicePointer(a_intt_d) } // Run computeInttNttOnDevice on device - go computeInttNttOnDevice(w_device, a_device) + go computeInttNttOnDevice(w_device, devicePointers[pn]) _ = <-computeInttNttDone }) From 421732a184d5ba8289c80d0db2af1af74a7c2f6b Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Wed, 7 Feb 2024 12:21:36 -0700 Subject: [PATCH 39/58] remove kzg.go --- backend/plonk/bn254/icicle/kzg.go | 143 ------------------------------ 1 file changed, 143 deletions(-) delete mode 100644 backend/plonk/bn254/icicle/kzg.go diff --git a/backend/plonk/bn254/icicle/kzg.go b/backend/plonk/bn254/icicle/kzg.go deleted file mode 100644 index 75d9aad88c..0000000000 --- a/backend/plonk/bn254/icicle/kzg.go +++ /dev/null @@ -1,143 +0,0 @@ -package icicle_bn254 - -import ( - "errors" - "fmt" - "sync" - "unsafe" - - "github.com/consensys/gnark-crypto/ecc/bn254" - "github.com/consensys/gnark-crypto/ecc/bn254/fr" - "github.com/consensys/gnark-crypto/ecc/bn254/kzg" - iciclegnark "github.com/ingonyama-zk/iciclegnark/curves/bn254" -) - -var ( - ErrInvalidPolynomialSize = errors.New("invalid polynomial size (larger than SRS or == 0)") -) - -// Digest commitment of a polynomial. -type Digest = bn254.G1Affine - -// Commit commits to a polynomial using a multi exponentiation with the SRS. -// It is assumed that the polynomial is in canonical form, in Montgomery form. -func kzgDeviceCommit(p []fr.Element, G1 unsafe.Pointer, nbTasks ...int) (Digest, error) { - // Size of the polynomial - np := len(p) - - // Size of the polynomial in bytes - sizeBytesScalars := np * fr.Bytes - - // Initialize Scalar channels - copyCpDone := make(chan unsafe.Pointer, 1) - cpDeviceData := make(chan iciclegnark.OnDeviceData, 1) - - // Copy Scalar to device - go func() { - // Perform copy operation - iciclegnark.CopyToDevice(p, sizeBytesScalars, copyCpDone) - - // Receive result once copy operation is done - cpDevice := <-copyCpDone - - // Create OnDeviceData - cpDeviceValue := iciclegnark.OnDeviceData{ - P: cpDevice, - Size: sizeBytesScalars, - } - - // Send OnDeviceData to respective channel - cpDeviceData <- cpDeviceValue - - // Close channels - close(copyCpDone) - close(cpDeviceData) - }() - - // Wait for copy operation to finish - cpDeviceValue := <-cpDeviceData - - // KZG Committment on device - var wg sync.WaitGroup - - // Perform multi exponentiation on device - wg.Add(1) - tmpChan := make(chan bn254.G1Affine, 1) - go func() { - defer wg.Done() - tmp, _, err := iciclegnark.MsmOnDevice(cpDeviceValue.P, G1, np, true) - //fmt.Println("tmp", tmp) - if err != nil { - fmt.Print("error", err) - } - var res bn254.G1Affine - res.FromJacobian(&tmp) - tmpChan <- res - }() - wg.Wait() - - // Receive result once copy operation is done - res := <-tmpChan - - // Free device memory - go func() { - iciclegnark.FreeDevicePointer(unsafe.Pointer(&cpDeviceValue)) - }() - - return res, nil -} - -// Open computes an opening proof of polynomial p at given point. -// fft.Domain Cardinality must be larger than p.Degree() -func kzgDeviceOpen(p []fr.Element, point fr.Element, pk *ProvingKey) (kzg.OpeningProof, error) { - // build the proof - res := kzg.OpeningProof{ - ClaimedValue: eval(p, point), - } - - // compute H - // h reuses memory from _p - _p := make([]fr.Element, len(p)) - copy(_p, p) - h := dividePolyByXminusA(_p, res.ClaimedValue, point) - - // commit to H - hCommit, err := kzgDeviceCommit(h, pk.deviceInfo.G1Device.G1) - if err != nil { - return kzg.OpeningProof{}, err - } - res.H.Set(&hCommit) - - return res, nil -} - -// dividePolyByXminusA computes (f-f(a))/(x-a), in canonical basis, in regular form -// f memory is re-used for the result -func dividePolyByXminusA(f []fr.Element, fa, a fr.Element) []fr.Element { - - // first we compute f-f(a) - f[0].Sub(&f[0], &fa) - - // now we use synthetic division to divide by x-a - var t fr.Element - for i := len(f) - 2; i >= 0; i-- { - t.Mul(&f[i+1], &a) - - f[i].Add(&f[i], &t) - } - - // the result is of degree deg(f)-1 - return f[1:] -} - -// eval returns p(point) where p is interpreted as a polynomial -// ∑_{i= 0; i-- { - res.Mul(&res, &point).Add(&res, &p[i]) - } - return res -} From 21c2f7dcf74c4670592e3466a735417e3c775973 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Thu, 8 Feb 2024 05:30:31 -0700 Subject: [PATCH 40/58] add wait group to batchApply --- backend/plonk/bn254/icicle/icicle.go | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 9e320807b6..39b6191a61 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1094,6 +1094,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { m := uint64(s.pk.Domain[1].Cardinality) mm := uint64(64 - bits.TrailingZeros64(m)) + // Set up device pointers to polynomial coefficients var devicePointers []unsafe.Pointer for i := 0; i < len(s.x); i++ { sizeBytes := s.x[i].Size() * fr.Bytes @@ -1119,6 +1120,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { acc.Mul(&acc, &shifters[i]) } } + batchTime := time.Now() batchApply(s.x, func(p *iop.Polynomial, pn int) { nbTasks := calculateNbTasks(len(s.x)-1) * 2 @@ -1272,7 +1274,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { _ = <-computeInttNttDone }) - for _, q := range s.bp { scalePowers(q, cs) } @@ -1301,18 +1302,18 @@ func calculateNbTasks(n int) int { // batchApply executes fn on all polynomials in x except x[id_ZS] in parallel. func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial, int)) { - //var wg sync.WaitGroup + var wg sync.WaitGroup for i := 0; i < len(x); i++ { if i == id_ZS { continue } - //wg.Add(1) - //go func(i int) { - fn(x[i], i) - // wg.Done() - // }(i) + wg.Add(1) + go func(i int) { + fn(x[i], i) + wg.Done() + }(i) } - // wg.Wait() + wg.Wait() } // p <- From 828e6ec82ad324d7591a10a9148df73689f85803 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Sun, 11 Feb 2024 10:12:00 -0700 Subject: [PATCH 41/58] update scaling vector to device pointers --- backend/plonk/bn254/icicle/icicle.go | 52 +++++++++++++++------------- 1 file changed, 27 insertions(+), 25 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 39b6191a61..52c0f2f75b 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1034,8 +1034,13 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { // wait for init go routine <-s.chNumeratorInit - cosetTable := s.pk.Domain[0].CosetTable - twiddles := s.pk.Domain[1].Twiddles[0][:n] + + // Copy the twiddles slice to the device + sizeBytes := len(s.x[0].Coefficients()) * fr.Bytes + + copyTDone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyToDevice(s.pk.Domain[1].Twiddles[0][:n], sizeBytes, copyTDone) + twiddles := <-copyTDone // init the result polynomial & buffer cres := s.cres @@ -1071,19 +1076,19 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } // select the correct scaling vector to scale by shifter[i] - selectScalingVector := func(i int, l iop.Layout, np int) []fr.Element { - var w []fr.Element + selectScalingVector := func(i int, l iop.Layout, np int) unsafe.Pointer { + var w unsafe.Pointer if i == 0 { if l == iop.Regular { - w = cosetTable + w = s.pk.deviceInfo.DomainDevice.CosetTable } else { - w = s.cosetTableRev + w = s.pk.deviceInfo.DomainDevice.CosetTableInv } } else { if l == iop.Regular { w = twiddles } else { - w = s.twiddlesRev + w = s.pk.deviceInfo.DomainDevice.TwiddlesInv } } return w @@ -1129,11 +1134,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { sizeBytes := p.Size() * fr.Bytes // scale by shifter[i] - w := selectScalingVector(i, p.Layout, pn) - - copyWDone := make(chan unsafe.Pointer, 1) - go iciclegnark.CopyToDevice(w, sizeBytes, copyWDone) - w_device := <-copyWDone + w_device := selectScalingVector(i, p.Layout, pn) // Initialize channels computeInttNttDone := make(chan error, 1) @@ -1189,7 +1190,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { _ = <- computeNttDone } - go iciclegnark.FreeDevicePointer(w_device) }) log.Debug().Dur("took", time.Since(batchTime)).Msg("FFT (batchApply)") @@ -1235,23 +1235,25 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } cs.Inverse(&cs) + // send the scale back factor to the GPU + var acc fr.Element + acc.SetOne() + accList := make([]fr.Element, s.x[0].Size()) + for j := 0; j < s.x[0].Size(); j++ { + accList[j].Set(&acc) + acc.Mul(&acc, &cs) + } + + sizeBytes := s.x[0].Size() * fr.Bytes + copyWDone := make(chan unsafe.Pointer, 1) + go iciclegnark.CopyToDevice(accList, sizeBytes, copyWDone) + w_device := <-copyWDone + batchApply(s.x[:id_ZS], func(p *iop.Polynomial, pn int) { // ON Device n := p.Size() sizeBytes := p.Size() * fr.Bytes - var acc fr.Element - acc.SetOne() - accList := make([]fr.Element, p.Size()) - for j := 0; j < p.Size(); j++ { - accList[j].Set(&acc) - acc.Mul(&acc, &cs) - } - - copyWDone := make(chan unsafe.Pointer, 1) - go iciclegnark.CopyToDevice(accList, sizeBytes, copyWDone) - w_device := <-copyWDone - // Initialize channels computeInttNttDone := make(chan error, 1) computeInttNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { From a1f7ff5078fef5675276fadf288d88a388599155 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 07:02:15 -0700 Subject: [PATCH 42/58] remove async waitgroup --- backend/plonk/bn254/icicle/icicle.go | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 52c0f2f75b..479465c040 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1304,18 +1304,18 @@ func calculateNbTasks(n int) int { // batchApply executes fn on all polynomials in x except x[id_ZS] in parallel. func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial, int)) { - var wg sync.WaitGroup + // var wg sync.WaitGroup for i := 0; i < len(x); i++ { if i == id_ZS { continue } - wg.Add(1) - go func(i int) { + //wg.Add(1) + //go func(i int) { fn(x[i], i) - wg.Done() - }(i) - } - wg.Wait() + //wg.Done() + //}(i) + } + //wg.Wait() } // p <- From e3122a85d39e3b2bb53d0f6e462dc5b24644b2d7 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 07:09:45 -0700 Subject: [PATCH 43/58] add debug to first batch apply --- backend/plonk/bn254/icicle/icicle.go | 59 +++++++++++++++++++++++++++- 1 file changed, 57 insertions(+), 2 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 479465c040..70e01152d6 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1040,7 +1040,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { copyTDone := make(chan unsafe.Pointer, 1) go iciclegnark.CopyToDevice(s.pk.Domain[1].Twiddles[0][:n], sizeBytes, copyTDone) - twiddles := <-copyTDone + twiddlesPtr := <-copyTDone // init the result polynomial & buffer cres := s.cres @@ -1086,7 +1086,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } } else { if l == iop.Regular { - w = twiddles + w = twiddlesPtr } else { w = s.pk.deviceInfo.DomainDevice.TwiddlesInv } @@ -1094,6 +1094,32 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { return w } + + // CPU Debug + cosetTable := s.pk.Domain[0].CosetTable + twiddles := s.pk.Domain[1].Twiddles[0][:n] + + // select the correct scaling vector to scale by shifter[i] + selectScalingVectorCpu := func(i int, l iop.Layout) []fr.Element { + var w []fr.Element + if i == 0 { + if l == iop.Regular { + w = cosetTable + } else { + w = s.cosetTableRev + } + } else { + if l == iop.Regular { + w = twiddles + } else { + w = s.twiddlesRev + } + } + return w + } + + + // pre-computed to compute the bit reverse index // of the result polynomial m := uint64(s.pk.Domain[1].Cardinality) @@ -1150,6 +1176,35 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) + // Debug CPU + p.ToCanonical(&s.pk.Domain[0], nbTasks) + + // scale by shifter[i] + w := selectScalingVectorCpu(i, p.Layout) + + cd := p.Coefficients() + utils.Parallelize(len(cd), func(start, end int) { + for j := start; j < end; j++ { + cd[j].Mul(&cd[j], &w[j]) + } + }, nbTasks) + + // fft in the correct coset + p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() + + + isEqual := 0 + notEqual := 0 + for j := 0; j < int(n); j++ { + if res[j] != cd[j] { + notEqual++ + } else { + isEqual++ + } + } + fmt.Println("n", pn,"isEqual", isEqual, "notEqual", notEqual) + + // Copy back to host cp := p.Coefficients() utils.Parallelize(len(cp), func(start, end int) { From 1fcc25d38864db098ba3ec6af0718404fb07e4f5 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 07:11:38 -0700 Subject: [PATCH 44/58] add debug to second batch apply --- backend/plonk/bn254/icicle/icicle.go | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 70e01152d6..93cda22ee3 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1192,7 +1192,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { // fft in the correct coset p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() - isEqual := 0 notEqual := 0 for j := 0; j < int(n); j++ { @@ -1231,6 +1230,33 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}) + // Debug CPU + p.ToCanonical(&s.pk.Domain[0], nbTasks) + + // scale by shifter[i] + w := selectScalingVectorCpu(i, p.Layout) + + cd := p.Coefficients() + utils.Parallelize(len(cd), func(start, end int) { + for j := start; j < end; j++ { + cd[j].Mul(&cd[j], &w[j]) + } + }, nbTasks) + + // fft in the correct coset + p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() + + isEqual := 0 + notEqual := 0 + for j := 0; j < int(n); j++ { + if res[j] != cd[j] { + notEqual++ + } else { + isEqual++ + } + } + fmt.Println("n", pn,"isEqual", isEqual, "notEqual", notEqual) + iciclegnark.MontConvOnDevice(devicePointer, n, false) computeNttDone <- nil From 981d9eacddb06463a70a6e8ad82c292524d8d8a3 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 07:13:18 -0700 Subject: [PATCH 45/58] add debug to third batch apply --- backend/plonk/bn254/icicle/icicle.go | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 93cda22ee3..6102e44959 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1349,6 +1349,21 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { iciclegnark.MontConvOnDevice(a_intt_d, n, false) + + p.ToCanonical(&s.pk.Domain[0], 8).ToRegular() + scalePowers(p, cs) + cd := p.Coefficients() + isEqual := 0 + notEqual := 0 + for j := 0; j < int(n); j++ { + if res[j] != cd[j] { + notEqual++ + } else { + isEqual++ + } + } + fmt.Println("n", pn,"isEqual", isEqual, "notEqual", notEqual) + computeInttNttDone <- nil iciclegnark.FreeDevicePointer(a_intt_d) } From 7a15cd00b8cc29f078b9bbb2ddb9747f985a88d0 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 07:23:41 -0700 Subject: [PATCH 46/58] run shifted opening on cpu --- backend/plonk/bn254/icicle/icicle.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 6102e44959..a7cd482e7b 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -756,7 +756,8 @@ func (s *instance) openZ() (err error) { // open z at zeta start := time.Now() - s.proof.ZShiftedOpening, err = kzg.OnDeviceOpen(s.blindedZ, zetaShifted, s.pk.deviceInfo.G1Device.G1) + //s.proof.ZShiftedOpening, err = kzg.OnDeviceOpen(s.blindedZ, zetaShifted, s.pk.deviceInfo.G1Device.G1) + s.proof.ZShiftedOpening, err = kzg.Open(s.blindedZ, zetaShifted, s.pk.Kzg) if err != nil { return err } @@ -1349,7 +1350,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { iciclegnark.MontConvOnDevice(a_intt_d, n, false) - p.ToCanonical(&s.pk.Domain[0], 8).ToRegular() scalePowers(p, cs) cd := p.Coefficients() From 185cbadf36d60232025c1a7f8a660d593642339c Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 07:35:39 -0700 Subject: [PATCH 47/58] add log to ZShiftOpening --- backend/plonk/bn254/icicle/icicle.go | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index a7cd482e7b..d3ae234710 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -756,8 +756,16 @@ func (s *instance) openZ() (err error) { // open z at zeta start := time.Now() - //s.proof.ZShiftedOpening, err = kzg.OnDeviceOpen(s.blindedZ, zetaShifted, s.pk.deviceInfo.G1Device.G1) - s.proof.ZShiftedOpening, err = kzg.Open(s.blindedZ, zetaShifted, s.pk.Kzg) + + s.proof.ZShiftedOpening, err = kzg.OnDeviceOpen(s.blindedZ, zetaShifted, s.pk.deviceInfo.G1Device.G1) + res, err := kzg.Open(s.blindedZ, zetaShifted, s.pk.Kzg) + + if res != s.proof.ZShiftedOpening { + fmt.Println("res != s.proof.ZShiftedOpening") + } else { + fmt.Println("res == s.proof.ZShiftedOpening") + } + if err != nil { return err } @@ -1119,8 +1127,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { return w } - - // pre-computed to compute the bit reverse index // of the result polynomial m := uint64(s.pk.Domain[1].Cardinality) From 7efe778a01e852457ff4669482b64f1c99d65f85 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 07:42:42 -0700 Subject: [PATCH 48/58] debug batch open --- backend/plonk/bn254/icicle/icicle.go | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index d3ae234710..7b348bfa6e 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -958,6 +958,22 @@ func (s *instance) batchOpening() error { s.pk.deviceInfo.G1Device.G1, s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) + + res, err := kzg.BatchOpenSinglePoint( + polysToOpen, + digestsToOpen, + s.zeta, + s.kzgFoldingHash, + s.pk.Kzg, + s.proof.ZShiftedOpening.ClaimedValue.Marshal(), + ) + + if res.H != s.proof.BatchedProof.H { + fmt.Println("res != s.proof.BatchedProof") + } else { + fmt.Println("res == s.proof.BatchedProof") + } + log.Debug().Dur("took", time.Since(start)).Msg("MSM (batchOpeningSinglePoint):") return err From bd486f60acd18ce5bb297d490eaa2cab56e501ad Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 07:45:05 -0700 Subject: [PATCH 49/58] add wait to LRO commitment --- backend/plonk/bn254/icicle/icicle.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 7b348bfa6e..aa7ce666d7 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -533,16 +533,19 @@ func (s *instance) commitToLRO() error { s.proof.LRO[0], err = s.commitToPolyAndBlinding(s.x[id_L], s.bp[id_Bl]) return }) + g.Wait() g.Go(func() (err error) { s.proof.LRO[1], err = s.commitToPolyAndBlinding(s.x[id_R], s.bp[id_Br]) return }) + g.Wait() g.Go(func() (err error) { s.proof.LRO[2], err = s.commitToPolyAndBlinding(s.x[id_O], s.bp[id_Bo]) return }) + g.Wait() return g.Wait() } From 71cabd57202e507998a2198327e858e9e70a79cf Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 07:48:49 -0700 Subject: [PATCH 50/58] remove old debug statements --- backend/plonk/bn254/icicle/icicle.go | 115 --------------------------- 1 file changed, 115 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index aa7ce666d7..c4b2f29130 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -761,13 +761,6 @@ func (s *instance) openZ() (err error) { start := time.Now() s.proof.ZShiftedOpening, err = kzg.OnDeviceOpen(s.blindedZ, zetaShifted, s.pk.deviceInfo.G1Device.G1) - res, err := kzg.Open(s.blindedZ, zetaShifted, s.pk.Kzg) - - if res != s.proof.ZShiftedOpening { - fmt.Println("res != s.proof.ZShiftedOpening") - } else { - fmt.Println("res == s.proof.ZShiftedOpening") - } if err != nil { return err @@ -962,21 +955,6 @@ func (s *instance) batchOpening() error { s.proof.ZShiftedOpening.ClaimedValue.Marshal(), ) - res, err := kzg.BatchOpenSinglePoint( - polysToOpen, - digestsToOpen, - s.zeta, - s.kzgFoldingHash, - s.pk.Kzg, - s.proof.ZShiftedOpening.ClaimedValue.Marshal(), - ) - - if res.H != s.proof.BatchedProof.H { - fmt.Println("res != s.proof.BatchedProof") - } else { - fmt.Println("res == s.proof.BatchedProof") - } - log.Debug().Dur("took", time.Since(start)).Msg("MSM (batchOpeningSinglePoint):") return err @@ -1122,30 +1100,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { return w } - - // CPU Debug - cosetTable := s.pk.Domain[0].CosetTable - twiddles := s.pk.Domain[1].Twiddles[0][:n] - - // select the correct scaling vector to scale by shifter[i] - selectScalingVectorCpu := func(i int, l iop.Layout) []fr.Element { - var w []fr.Element - if i == 0 { - if l == iop.Regular { - w = cosetTable - } else { - w = s.cosetTableRev - } - } else { - if l == iop.Regular { - w = twiddles - } else { - w = s.twiddlesRev - } - } - return w - } - // pre-computed to compute the bit reverse index // of the result polynomial m := uint64(s.pk.Domain[1].Cardinality) @@ -1202,34 +1156,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) - // Debug CPU - p.ToCanonical(&s.pk.Domain[0], nbTasks) - - // scale by shifter[i] - w := selectScalingVectorCpu(i, p.Layout) - - cd := p.Coefficients() - utils.Parallelize(len(cd), func(start, end int) { - for j := start; j < end; j++ { - cd[j].Mul(&cd[j], &w[j]) - } - }, nbTasks) - - // fft in the correct coset - p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() - - isEqual := 0 - notEqual := 0 - for j := 0; j < int(n); j++ { - if res[j] != cd[j] { - notEqual++ - } else { - isEqual++ - } - } - fmt.Println("n", pn,"isEqual", isEqual, "notEqual", notEqual) - - // Copy back to host cp := p.Coefficients() utils.Parallelize(len(cp), func(start, end int) { @@ -1256,33 +1182,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}) - // Debug CPU - p.ToCanonical(&s.pk.Domain[0], nbTasks) - - // scale by shifter[i] - w := selectScalingVectorCpu(i, p.Layout) - - cd := p.Coefficients() - utils.Parallelize(len(cd), func(start, end int) { - for j := start; j < end; j++ { - cd[j].Mul(&cd[j], &w[j]) - } - }, nbTasks) - - // fft in the correct coset - p.ToLagrange(&s.pk.Domain[0], nbTasks).ToRegular() - - isEqual := 0 - notEqual := 0 - for j := 0; j < int(n); j++ { - if res[j] != cd[j] { - notEqual++ - } else { - isEqual++ - } - } - fmt.Println("n", pn,"isEqual", isEqual, "notEqual", notEqual) - iciclegnark.MontConvOnDevice(devicePointer, n, false) computeNttDone <- nil @@ -1375,20 +1274,6 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { iciclegnark.MontConvOnDevice(a_intt_d, n, false) - p.ToCanonical(&s.pk.Domain[0], 8).ToRegular() - scalePowers(p, cs) - cd := p.Coefficients() - isEqual := 0 - notEqual := 0 - for j := 0; j < int(n); j++ { - if res[j] != cd[j] { - notEqual++ - } else { - isEqual++ - } - } - fmt.Println("n", pn,"isEqual", isEqual, "notEqual", notEqual) - computeInttNttDone <- nil iciclegnark.FreeDevicePointer(a_intt_d) } From afc5e1efc94aeab001b9a4c78be3e90eb8935edf Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 08:06:07 -0700 Subject: [PATCH 51/58] add async back to wait group --- backend/plonk/bn254/icicle/icicle.go | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index c4b2f29130..7c45ce0028 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1082,7 +1082,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } // select the correct scaling vector to scale by shifter[i] - selectScalingVector := func(i int, l iop.Layout, np int) unsafe.Pointer { + selectScalingVector := func(i int, l iop.Layout) unsafe.Pointer { var w unsafe.Pointer if i == 0 { if l == iop.Regular { @@ -1140,7 +1140,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { sizeBytes := p.Size() * fr.Bytes // scale by shifter[i] - w_device := selectScalingVector(i, p.Layout, pn) + w_device := selectScalingVector(i, p.Layout) // Initialize channels computeInttNttDone := make(chan error, 1) @@ -1154,9 +1154,10 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { iciclegnark.NttOnDevice(devicePointer, a_intt_d, s.pk.deviceInfo.DomainDevice.Twiddles, nil, n, n, sizeBytes, false) iciclegnark.MontConvOnDevice(devicePointer, n, true) + // Copy results back to host res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) - // Copy back to host + // Set the coefficients of the polynomial cp := p.Coefficients() utils.Parallelize(len(cp), func(start, end int) { for j := start; j < end; j++ { @@ -1164,6 +1165,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } }, nbTasks) + // Convert back to Monticarlo form iciclegnark.MontConvOnDevice(devicePointer, n, false) computeInttNttDone <- nil @@ -1182,6 +1184,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { res := iciclegnark.CopyScalarsToHost(devicePointer, n, sizeBytes) s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Lagrange, Layout: iop.Regular}) + // Convert back to Monticarlo form iciclegnark.MontConvOnDevice(devicePointer, n, false) computeNttDone <- nil @@ -1269,9 +1272,9 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { iciclegnark.MontConvOnDevice(a_intt_d, n, true) res := iciclegnark.CopyScalarsToHost(a_intt_d, n, sizeBytes) - s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Canonical, Layout: iop.Regular}) + // Convert back to Monticarlo form iciclegnark.MontConvOnDevice(a_intt_d, n, false) computeInttNttDone <- nil @@ -1310,18 +1313,18 @@ func calculateNbTasks(n int) int { // batchApply executes fn on all polynomials in x except x[id_ZS] in parallel. func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial, int)) { - // var wg sync.WaitGroup + var wg sync.WaitGroup for i := 0; i < len(x); i++ { if i == id_ZS { continue } - //wg.Add(1) - //go func(i int) { + wg.Add(1) + go func(i int) { fn(x[i], i) - //wg.Done() - //}(i) + wg.Done() + }(i) } - //wg.Wait() + wg.Wait() } // p <- From 2828af043321fe877e75ccb6b350909764921678 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 08:40:12 -0700 Subject: [PATCH 52/58] lock cuda threads --- backend/plonk/bn254/icicle/icicle.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 7c45ce0028..660f5c10d5 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1320,6 +1320,10 @@ func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial, int)) { } wg.Add(1) go func(i int) { + // lock thread to prevent weird cuda errors + runtime.LockOSThread() + defer runtime.UnlockOSThread() + fn(x[i], i) wg.Done() }(i) From 630607819d64f5b93b2b67a01f4357d8dc3ee990 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 09:39:09 -0700 Subject: [PATCH 53/58] add wait group --- backend/plonk/bn254/icicle/icicle.go | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index 660f5c10d5..a06ab9717b 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1231,7 +1231,8 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } // scale everything back - go func() { + g := new(errgroup.Group) + g.Go(func() (err error) { batchTime := time.Now() for i := id_ZS; i < len(s.x); i++ { s.x[i] = nil @@ -1291,7 +1292,9 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { log.Debug().Dur("took", time.Since(batchTime)).Msg("FFT (Scale back batchApply):") close(s.chRestoreLRO) - }() + return + }) + g.Wait() // ensure all the goroutines are done wgBuf.Wait() @@ -1327,7 +1330,7 @@ func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial, int)) { fn(x[i], i) wg.Done() }(i) - } + } wg.Wait() } From 8e2fd5726651564adae4807e21519988baebdda8 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 09:56:06 -0700 Subject: [PATCH 54/58] move close outside of waitgroup --- backend/plonk/bn254/icicle/icicle.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index a06ab9717b..c7e4d44263 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1291,11 +1291,13 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } log.Debug().Dur("took", time.Since(batchTime)).Msg("FFT (Scale back batchApply):") - close(s.chRestoreLRO) return }) g.Wait() + // move outside of function to ensure everything is copied back + close(s.chRestoreLRO) + // ensure all the goroutines are done wgBuf.Wait() From 99db1d6f03ec57706f4bbbdeda3d2768b45a6998 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 10:12:37 -0700 Subject: [PATCH 55/58] run last batch apply on cpu --- backend/plonk/bn254/icicle/icicle.go | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index c7e4d44263..af17e35b4e 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1231,6 +1231,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } // scale everything back + // not sure if this waitgroup is necessary g := new(errgroup.Group) g.Go(func() (err error) { batchTime := time.Now() @@ -1261,25 +1262,27 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { batchApply(s.x[:id_ZS], func(p *iop.Polynomial, pn int) { // ON Device - n := p.Size() - sizeBytes := p.Size() * fr.Bytes + //n := p.Size() + //sizeBytes := p.Size() * fr.Bytes // Initialize channels computeInttNttDone := make(chan error, 1) computeInttNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { - a_intt_d := iciclegnark.INttOnDevice(devicePointer, s.pk.deviceInfo.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) + //a_intt_d := iciclegnark.INttOnDevice(devicePointer, s.pk.deviceInfo.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) - iciclegnark.VecMulOnDevice(a_intt_d, scaleVecPtr, n) - iciclegnark.MontConvOnDevice(a_intt_d, n, true) + //iciclegnark.VecMulOnDevice(a_intt_d, scaleVecPtr, n) + //iciclegnark.MontConvOnDevice(a_intt_d, n, true) - res := iciclegnark.CopyScalarsToHost(a_intt_d, n, sizeBytes) - s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Canonical, Layout: iop.Regular}) + //res := iciclegnark.CopyScalarsToHost(a_intt_d, n, sizeBytes) + //s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Canonical, Layout: iop.Regular}) - // Convert back to Monticarlo form - iciclegnark.MontConvOnDevice(a_intt_d, n, false) + //// Convert back to Monticarlo form + //iciclegnark.MontConvOnDevice(a_intt_d, n, false) + p.ToCanonical(&s.pk.Domain[0], 8).ToRegular() + scalePowers(p, cs) computeInttNttDone <- nil - iciclegnark.FreeDevicePointer(a_intt_d) + //iciclegnark.FreeDevicePointer(a_intt_d) } // Run computeInttNttOnDevice on device go computeInttNttOnDevice(w_device, devicePointers[pn]) @@ -1325,7 +1328,7 @@ func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial, int)) { } wg.Add(1) go func(i int) { - // lock thread to prevent weird cuda errors + // lock thread to prevent weird cuda errors, not sure if works correctly runtime.LockOSThread() defer runtime.UnlockOSThread() From 9b5a90fe1b338e2860028f0d045fd0b7a053353c Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 10:42:49 -0700 Subject: [PATCH 56/58] update to correct icicle gnark --- backend/plonk/bn254/icicle/icicle.go | 36 +++++++++++----------------- go.mod | 2 +- go.sum | 2 ++ 3 files changed, 17 insertions(+), 23 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index af17e35b4e..b28bd7b483 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1231,9 +1231,7 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } // scale everything back - // not sure if this waitgroup is necessary - g := new(errgroup.Group) - g.Go(func() (err error) { + go func() { batchTime := time.Now() for i := id_ZS; i < len(s.x); i++ { s.x[i] = nil @@ -1262,27 +1260,25 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { batchApply(s.x[:id_ZS], func(p *iop.Polynomial, pn int) { // ON Device - //n := p.Size() - //sizeBytes := p.Size() * fr.Bytes + n := p.Size() + sizeBytes := p.Size() * fr.Bytes // Initialize channels computeInttNttDone := make(chan error, 1) computeInttNttOnDevice := func(scaleVecPtr, devicePointer unsafe.Pointer) { - //a_intt_d := iciclegnark.INttOnDevice(devicePointer, s.pk.deviceInfo.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) + a_intt_d := iciclegnark.INttOnDevice(devicePointer, s.pk.deviceInfo.DomainDevice.TwiddlesInv, nil, n, sizeBytes, false) - //iciclegnark.VecMulOnDevice(a_intt_d, scaleVecPtr, n) - //iciclegnark.MontConvOnDevice(a_intt_d, n, true) + iciclegnark.VecMulOnDevice(a_intt_d, scaleVecPtr, n) + iciclegnark.MontConvOnDevice(a_intt_d, n, true) - //res := iciclegnark.CopyScalarsToHost(a_intt_d, n, sizeBytes) - //s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Canonical, Layout: iop.Regular}) + res := iciclegnark.CopyScalarsToHost(a_intt_d, n, sizeBytes) + s.x[pn] = iop.NewPolynomial(&res, iop.Form{Basis: iop.Canonical, Layout: iop.Regular}) - //// Convert back to Monticarlo form - //iciclegnark.MontConvOnDevice(a_intt_d, n, false) - p.ToCanonical(&s.pk.Domain[0], 8).ToRegular() - scalePowers(p, cs) + // Convert back to Monticarlo form + iciclegnark.MontConvOnDevice(a_intt_d, n, false) computeInttNttDone <- nil - //iciclegnark.FreeDevicePointer(a_intt_d) + iciclegnark.FreeDevicePointer(a_intt_d) } // Run computeInttNttOnDevice on device go computeInttNttOnDevice(w_device, devicePointers[pn]) @@ -1294,12 +1290,8 @@ func (s *instance) computeNumerator() (*iop.Polynomial, error) { } log.Debug().Dur("took", time.Since(batchTime)).Msg("FFT (Scale back batchApply):") - return - }) - g.Wait() - - // move outside of function to ensure everything is copied back - close(s.chRestoreLRO) + close(s.chRestoreLRO) + }() // ensure all the goroutines are done wgBuf.Wait() @@ -1328,7 +1320,7 @@ func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial, int)) { } wg.Add(1) go func(i int) { - // lock thread to prevent weird cuda errors, not sure if works correctly + // lock thread to prevent weird cuda errors runtime.LockOSThread() defer runtime.UnlockOSThread() diff --git a/go.mod b/go.mod index 9d2b7cae87..27ed2d46eb 100644 --- a/go.mod +++ b/go.mod @@ -11,7 +11,7 @@ require ( github.com/google/go-cmp v0.5.9 github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b //github.com/ingonyama-zk/iciclegnark v0.1.0. - github.com/ingonyama-zk/iciclegnark v0.1.2-0.20240131141109-5f8923e3fbd5 + github.com/ingonyama-zk/iciclegnark v0.1.2-0.20240207160517-9558f9ad3baf github.com/leanovate/gopter v0.2.9 github.com/rs/zerolog v1.30.0 github.com/stretchr/testify v1.8.4 diff --git a/go.sum b/go.sum index 40c777036f..b4778a24e6 100644 --- a/go.sum +++ b/go.sum @@ -21,6 +21,8 @@ github.com/ingonyama-zk/icicle v0.1.0 h1:9zbHaYv8/4g3HWRabBCpeH+64U8GJ99K1qeqE2j github.com/ingonyama-zk/icicle v0.1.0/go.mod h1:kAK8/EoN7fUEmakzgZIYdWy1a2rBnpCaZLqSHwZWxEk= github.com/ingonyama-zk/iciclegnark v0.1.2-0.20240131141109-5f8923e3fbd5 h1:LZeh9IVCrZ8RiHZy1JuC+kKlTESUENxRrdBHwVBDFZ4= github.com/ingonyama-zk/iciclegnark v0.1.2-0.20240131141109-5f8923e3fbd5/go.mod h1:g17CDuMfNBiN4hhZ4aA0rGF24Abv5GBFHJqE7aLxaZQ= +github.com/ingonyama-zk/iciclegnark v0.1.2-0.20240207160517-9558f9ad3baf h1:Z08V0nMJMwHoa6c4ASysBEZc1UU6GzRxG7XulNUGqHw= +github.com/ingonyama-zk/iciclegnark v0.1.2-0.20240207160517-9558f9ad3baf/go.mod h1:g17CDuMfNBiN4hhZ4aA0rGF24Abv5GBFHJqE7aLxaZQ= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= From b0dc9bf8a622885107f4237c0e2380b9e382fbc7 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 13:30:47 -0700 Subject: [PATCH 57/58] remove wg --- backend/plonk/bn254/icicle/icicle.go | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index b28bd7b483..b963985c99 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1313,22 +1313,22 @@ func calculateNbTasks(n int) int { // batchApply executes fn on all polynomials in x except x[id_ZS] in parallel. func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial, int)) { - var wg sync.WaitGroup + //var wg sync.WaitGroup for i := 0; i < len(x); i++ { if i == id_ZS { continue } - wg.Add(1) - go func(i int) { + //wg.Add(1) + //go func(i int) { // lock thread to prevent weird cuda errors - runtime.LockOSThread() - defer runtime.UnlockOSThread() + //runtime.LockOSThread() + //defer runtime.UnlockOSThread() fn(x[i], i) - wg.Done() - }(i) + //wg.Done() + // }(i) } - wg.Wait() + //wg.Wait() } // p <- From 3ae088ef5e07ff331f2489b1f0464ed5255fefe0 Mon Sep 17 00:00:00 2001 From: bigsky77 Date: Mon, 12 Feb 2024 13:54:46 -0700 Subject: [PATCH 58/58] working NTT and batch apply --- backend/plonk/bn254/icicle/icicle.go | 9 --------- 1 file changed, 9 deletions(-) diff --git a/backend/plonk/bn254/icicle/icicle.go b/backend/plonk/bn254/icicle/icicle.go index b963985c99..93740aec2b 100644 --- a/backend/plonk/bn254/icicle/icicle.go +++ b/backend/plonk/bn254/icicle/icicle.go @@ -1318,17 +1318,8 @@ func batchApply(x []*iop.Polynomial, fn func(*iop.Polynomial, int)) { if i == id_ZS { continue } - //wg.Add(1) - //go func(i int) { - // lock thread to prevent weird cuda errors - //runtime.LockOSThread() - //defer runtime.UnlockOSThread() - fn(x[i], i) - //wg.Done() - // }(i) } - //wg.Wait() } // p <-