github.com/consensys/gnark-crypto@v0.14.0/internal/generator/kzg/template/kzg.go.tmpl (about) 1 import ( 2 "errors" 3 "hash" 4 "math/big" 5 "sync" 6 7 "github.com/consensys/gnark-crypto/ecc" 8 "github.com/consensys/gnark-crypto/ecc/{{ .Name }}" 9 "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" 10 "github.com/consensys/gnark-crypto/fiat-shamir" 11 12 "github.com/consensys/gnark-crypto/internal/parallel" 13 ) 14 15 var ( 16 ErrInvalidNbDigests = errors.New("number of digests is not the same as the number of polynomials") 17 ErrZeroNbDigests = errors.New("number of digests is zero") 18 ErrInvalidPolynomialSize = errors.New("invalid polynomial size (larger than SRS or == 0)") 19 ErrVerifyOpeningProof = errors.New("can't verify opening proof") 20 ErrVerifyBatchOpeningSinglePoint = errors.New("can't verify batch opening proof at single point") 21 ErrMinSRSSize = errors.New("minimum srs size is 2") 22 ) 23 24 // Digest commitment of a polynomial. 25 type Digest = {{ .CurvePackage }}.G1Affine 26 27 // ProvingKey used to create or open commitments 28 type ProvingKey struct { 29 G1 []{{ .CurvePackage }}.G1Affine // [G₁ [α]G₁ , [α²]G₁, ... ] 30 } 31 32 // VerifyingKey used to verify opening proofs 33 type VerifyingKey struct { 34 G2 [2]{{ .CurvePackage }}.G2Affine // [G₂, [α]G₂ ] 35 G1 {{ .CurvePackage }}.G1Affine 36 {{- if (eq .Name "bn254")}} 37 Lines [2][2][len({{ .CurvePackage }}.LoopCounter)]{{ .CurvePackage }}.LineEvaluationAff // precomputed pairing lines corresponding to G₂, [α]G₂ 38 {{- else}} 39 Lines [2][2][len({{ .CurvePackage }}.LoopCounter)-1]{{ .CurvePackage }}.LineEvaluationAff // precomputed pairing lines corresponding to G₂, [α]G₂ 40 {{- end}} 41 } 42 43 // SRS must be computed through MPC and comprises the ProvingKey and the VerifyingKey 44 type SRS struct { 45 Pk ProvingKey 46 Vk VerifyingKey 47 } 48 49 // TODO @Tabaie get rid of this and use the polynomial package 50 // eval returns p(point) where p is interpreted as a polynomial 51 // ∑_{i<len(p)}p[i]Xⁱ 52 func eval(p []fr.Element, point fr.Element) fr.Element { 53 var res fr.Element 54 n := len(p) 55 res.Set(&p[n-1]) 56 for i := n - 2; i >= 0; i-- { 57 res.Mul(&res, &point).Add(&res, &p[i]) 58 } 59 return res 60 } 61 62 // NewSRS returns a new SRS using alpha as randomness source 63 // 64 // In production, a SRS generated through MPC should be used. 65 // 66 // Set Alpha = -1 to generate quickly a balanced, valid SRS (useful for benchmarking). 67 // 68 // implements io.ReaderFrom and io.WriterTo 69 func NewSRS(size uint64, bAlpha *big.Int) (*SRS, error) { 70 71 if size < 2 { 72 return nil, ErrMinSRSSize 73 } 74 var srs SRS 75 srs.Pk.G1 = make([]{{ .CurvePackage }}.G1Affine, size) 76 77 var alpha fr.Element 78 alpha.SetBigInt(bAlpha) 79 80 var bMOne big.Int 81 bMOne.SetInt64(-1) 82 83 _, _, gen1Aff, gen2Aff := {{ .CurvePackage }}.Generators() 84 85 // in this case, the SRS is <αⁱ[G₁]> whera α is of order 4 86 // so no need to run the batch scalar multiplication. We cannot use alpha=1 87 // because it tampers the benchmarks (the SRS is not balanced). 88 if bAlpha.Cmp(&bMOne) == 0 { 89 90 t, err := fr.Generator(4) 91 if err != nil { 92 return &srs, nil 93 } 94 var bt big.Int 95 t.BigInt(&bt) 96 97 var g [4]{{ .CurvePackage }}.G1Affine 98 g[0] = gen1Aff 99 for i := 1; i < 4; i++ { 100 g[i].ScalarMultiplication(&g[i-1], &bt) 101 } 102 parallel.Execute(int(size), func(start, end int) { 103 for i := start; i < int(end); i++ { 104 srs.Pk.G1[i] = g[i%4] 105 } 106 }) 107 srs.Vk.G1 = gen1Aff 108 srs.Vk.G2[0] = gen2Aff 109 srs.Vk.G2[1].ScalarMultiplication(&srs.Vk.G2[0], &bt) 110 srs.Vk.Lines[0] = {{ .CurvePackage }}.PrecomputeLines(srs.Vk.G2[0]) 111 srs.Vk.Lines[1] = {{ .CurvePackage }}.PrecomputeLines(srs.Vk.G2[1]) 112 return &srs, nil 113 } 114 srs.Pk.G1[0] = gen1Aff 115 srs.Vk.G1 = gen1Aff 116 srs.Vk.G2[0] = gen2Aff 117 srs.Vk.G2[1].ScalarMultiplication(&gen2Aff, bAlpha) 118 srs.Vk.Lines[0] = {{ .CurvePackage }}.PrecomputeLines(srs.Vk.G2[0]) 119 srs.Vk.Lines[1] = {{ .CurvePackage }}.PrecomputeLines(srs.Vk.G2[1]) 120 121 alphas := make([]fr.Element, size-1) 122 alphas[0] = alpha 123 for i := 1; i < len(alphas); i++ { 124 alphas[i].Mul(&alphas[i-1], &alpha) 125 } 126 g1s := {{ .CurvePackage }}.BatchScalarMultiplicationG1(&gen1Aff, alphas) 127 copy(srs.Pk.G1[1:], g1s) 128 129 return &srs, nil 130 } 131 132 // OpeningProof KZG proof for opening at a single point. 133 // 134 // implements io.ReaderFrom and io.WriterTo 135 type OpeningProof struct { 136 // H quotient polynomial (f - f(z))/(x-z) 137 H {{ .CurvePackage }}.G1Affine 138 139 // ClaimedValue purported value 140 ClaimedValue fr.Element 141 } 142 143 // BatchOpeningProof opening proof for many polynomials at the same point 144 // 145 // implements io.ReaderFrom and io.WriterTo 146 type BatchOpeningProof struct { 147 // H quotient polynomial Sum_i gamma**i*(f - f(z))/(x-z) 148 H {{ .CurvePackage }}.G1Affine 149 150 // ClaimedValues purported values 151 ClaimedValues []fr.Element 152 } 153 154 // Commit commits to a polynomial using a multi exponentiation with the SRS. 155 // It is assumed that the polynomial is in canonical form, in Montgomery form. 156 func Commit(p []fr.Element, pk ProvingKey, nbTasks ...int) (Digest, error) { 157 158 if len(p) == 0 || len(p) > len(pk.G1) { 159 return Digest{}, ErrInvalidPolynomialSize 160 } 161 162 var res {{ .CurvePackage }}.G1Affine 163 164 config := ecc.MultiExpConfig{} 165 if len(nbTasks) > 0 { 166 config.NbTasks = nbTasks[0] 167 } 168 if _, err := res.MultiExp(pk.G1[:len(p)], p, config); err != nil { 169 return Digest{}, err 170 } 171 172 return res, nil 173 } 174 175 176 // Open computes an opening proof of polynomial p at given point. 177 // fft.Domain Cardinality must be larger than p.Degree() 178 func Open(p []fr.Element, point fr.Element, pk ProvingKey) (OpeningProof, error) { 179 if len(p) == 0 || len(p) > len(pk.G1) { 180 return OpeningProof{}, ErrInvalidPolynomialSize 181 } 182 183 // build the proof 184 res := OpeningProof{ 185 ClaimedValue: eval(p, point), 186 } 187 188 // compute H 189 // h reuses memory from _p 190 _p := make([]fr.Element, len(p)) 191 copy(_p, p) 192 h := dividePolyByXminusA(_p, res.ClaimedValue, point) 193 194 // commit to H 195 hCommit, err := Commit(h, pk) 196 if err != nil { 197 return OpeningProof{}, err 198 } 199 res.H.Set(&hCommit) 200 201 return res, nil 202 } 203 204 // Verify verifies a KZG opening proof at a single point 205 func Verify(commitment *Digest, proof *OpeningProof, point fr.Element, vk VerifyingKey) error { 206 207 // [f(a)]G₁ + [-a]([H(α)]G₁) = [f(a) - a*H(α)]G₁ 208 var totalG1 {{ .CurvePackage }}.G1Jac 209 var pointNeg fr.Element 210 var cmInt, pointInt big.Int 211 proof.ClaimedValue.BigInt(&cmInt) 212 pointNeg.Neg(&point).BigInt(&pointInt) 213 totalG1.JointScalarMultiplication(&vk.G1, &proof.H, &cmInt, &pointInt) 214 215 // [f(a) - a*H(α)]G₁ + [-f(α)]G₁ = [f(a) - f(α) - a*H(α)]G₁ 216 var commitmentJac {{ .CurvePackage }}.G1Jac 217 commitmentJac.FromAffine(commitment) 218 totalG1.SubAssign(&commitmentJac) 219 220 // e([f(α)-f(a)+aH(α)]G₁], G₂).e([-H(α)]G₁, [α]G₂) == 1 221 var totalG1Aff {{ .CurvePackage }}.G1Affine 222 totalG1Aff.FromJacobian(&totalG1) 223 check, err := {{ .CurvePackage }}.PairingCheckFixedQ( 224 []{{ .CurvePackage }}.G1Affine{totalG1Aff, proof.H}, 225 vk.Lines[:], 226 ) 227 228 if err != nil { 229 return err 230 } 231 if !check { 232 return ErrVerifyOpeningProof 233 } 234 return nil 235 } 236 237 // BatchOpenSinglePoint creates a batch opening proof at point of a list of polynomials. 238 // It's an interactive protocol, made non-interactive using Fiat Shamir. 239 // 240 // * point is the point at which the polynomials are opened. 241 // * digests is the list of committed polynomials to open, need to derive the challenge using Fiat Shamir. 242 // * polynomials is the list of polynomials to open, they are supposed to be of the same size. 243 // * dataTranscript extra data that might be needed to derive the challenge used for folding 244 func BatchOpenSinglePoint(polynomials [][]fr.Element, digests []Digest, point fr.Element, hf hash.Hash, pk ProvingKey, dataTranscript ...[]byte) (BatchOpeningProof, error) { 245 246 // check for invalid sizes 247 nbDigests := len(digests) 248 if nbDigests != len(polynomials) { 249 return BatchOpeningProof{}, ErrInvalidNbDigests 250 } 251 252 // TODO ensure the polynomials are of the same size 253 largestPoly := -1 254 for _, p := range polynomials { 255 if len(p) == 0 || len(p) > len(pk.G1) { 256 return BatchOpeningProof{}, ErrInvalidPolynomialSize 257 } 258 if len(p) > largestPoly { 259 largestPoly = len(p) 260 } 261 } 262 263 var res BatchOpeningProof 264 265 // compute the purported values 266 res.ClaimedValues = make([]fr.Element, len(polynomials)) 267 var wg sync.WaitGroup 268 wg.Add(len(polynomials)) 269 for i := 0; i < len(polynomials); i++ { 270 go func(_i int) { 271 res.ClaimedValues[_i] = eval(polynomials[_i], point) 272 wg.Done() 273 }(i) 274 } 275 276 // wait for polynomial evaluations to be completed (res.ClaimedValues) 277 wg.Wait() 278 279 // derive the challenge γ, binded to the point and the commitments 280 gamma, err := deriveGamma(point, digests, res.ClaimedValues, hf, dataTranscript...) 281 if err != nil { 282 return BatchOpeningProof{}, err 283 } 284 285 // ∑ᵢγⁱf(a) 286 var foldedEvaluations fr.Element 287 chSumGammai := make(chan struct{}, 1) 288 go func() { 289 foldedEvaluations = res.ClaimedValues[nbDigests-1] 290 for i := nbDigests - 2; i >= 0; i-- { 291 foldedEvaluations.Mul(&foldedEvaluations, &gamma). 292 Add(&foldedEvaluations, &res.ClaimedValues[i]) 293 } 294 close(chSumGammai) 295 }() 296 297 // compute ∑ᵢγⁱfᵢ 298 // note: if we are willing to parallelize that, we could clone the poly and scale them by 299 // gamma n in parallel, before reducing into foldedPolynomials 300 foldedPolynomials := make([]fr.Element, largestPoly) 301 copy(foldedPolynomials, polynomials[0]) 302 gammas := make([]fr.Element, len(polynomials)) 303 gammas[0] = gamma 304 for i := 1 ; i < len(polynomials); i++ { 305 gammas[i].Mul(&gammas[i-1], &gamma) 306 } 307 308 for i := 1; i < len(polynomials); i++ { 309 i := i 310 parallel.Execute(len(polynomials[i]), func(start, end int) { 311 var pj fr.Element 312 for j:= start; j < end; j ++ { 313 pj.Mul(&polynomials[i][j], &gammas[i-1]) 314 foldedPolynomials[j].Add(&foldedPolynomials[j], &pj) 315 } 316 }) 317 } 318 319 // compute H 320 <-chSumGammai 321 h := dividePolyByXminusA(foldedPolynomials, foldedEvaluations, point) 322 foldedPolynomials = nil // same memory as h 323 324 res.H, err = Commit(h, pk) 325 if err != nil { 326 return BatchOpeningProof{}, err 327 } 328 329 return res, nil 330 } 331 332 // FoldProof fold the digests and the proofs in batchOpeningProof using Fiat Shamir 333 // to obtain an opening proof at a single point. 334 // 335 // * digests list of digests on which batchOpeningProof is based 336 // * batchOpeningProof opening proof of digests 337 // * transcript extra data needed to derive the challenge used for folding. 338 // * returns the folded version of batchOpeningProof, Digest, the folded version of digests 339 func FoldProof(digests []Digest, batchOpeningProof *BatchOpeningProof, point fr.Element, hf hash.Hash, dataTranscript ...[]byte) (OpeningProof, Digest, error) { 340 341 nbDigests := len(digests) 342 343 // check consistency between numbers of claims vs number of digests 344 if nbDigests != len(batchOpeningProof.ClaimedValues) { 345 return OpeningProof{}, Digest{}, ErrInvalidNbDigests 346 } 347 348 // derive the challenge γ, binded to the point and the commitments 349 gamma, err := deriveGamma(point, digests, batchOpeningProof.ClaimedValues, hf, dataTranscript...) 350 if err != nil { 351 return OpeningProof{}, Digest{}, ErrInvalidNbDigests 352 } 353 354 // fold the claimed values and digests 355 // gammai = [1,γ,γ²,..,γⁿ⁻¹] 356 gammai := make([]fr.Element, nbDigests) 357 gammai[0].SetOne() 358 if nbDigests > 1 { 359 gammai[1] = gamma 360 } 361 for i := 2; i < nbDigests; i++ { 362 gammai[i].Mul(&gammai[i-1], &gamma) 363 } 364 365 foldedDigests, foldedEvaluations, err := fold(digests, batchOpeningProof.ClaimedValues, gammai) 366 if err != nil { 367 return OpeningProof{}, Digest{}, err 368 } 369 370 // create the folded opening proof 371 var res OpeningProof 372 res.ClaimedValue.Set(&foldedEvaluations) 373 res.H.Set(&batchOpeningProof.H) 374 375 return res, foldedDigests, nil 376 } 377 378 // BatchVerifySinglePoint verifies a batched opening proof at a single point of a list of polynomials. 379 // 380 // * digests list of digests on which opening proof is done 381 // * batchOpeningProof proof of correct opening on the digests 382 // * dataTranscript extra data that might be needed to derive the challenge used for the folding 383 func BatchVerifySinglePoint(digests []Digest, batchOpeningProof *BatchOpeningProof, point fr.Element, hf hash.Hash, vk VerifyingKey, dataTranscript ...[]byte) error { 384 385 // fold the proof 386 foldedProof, foldedDigest, err := FoldProof(digests, batchOpeningProof, point, hf, dataTranscript...) 387 if err != nil { 388 return err 389 } 390 391 // verify the foldedProof against the foldedDigest 392 err = Verify(&foldedDigest, &foldedProof, point, vk) 393 return err 394 395 } 396 397 // BatchVerifyMultiPoints batch verifies a list of opening proofs at different points. 398 // The purpose of the batching is to have only one pairing for verifying several proofs. 399 // 400 // * digests list of committed polynomials 401 // * proofs list of opening proofs, one for each digest 402 // * points the list of points at which the opening are done 403 func BatchVerifyMultiPoints(digests []Digest, proofs []OpeningProof, points []fr.Element, vk VerifyingKey) error { 404 405 // check consistency nb proogs vs nb digests 406 if len(digests) != len(proofs) || len(digests) != len(points) { 407 return ErrInvalidNbDigests 408 } 409 410 // len(digests) should be nonzero because of randomNumbers 411 if len(digests) == 0 { 412 return ErrZeroNbDigests 413 } 414 415 // if only one digest, call Verify 416 if len(digests) == 1 { 417 return Verify(&digests[0], &proofs[0], points[0], vk) 418 } 419 420 // sample random numbers λᵢ for sampling 421 randomNumbers := make([]fr.Element, len(digests)) 422 randomNumbers[0].SetOne() 423 for i := 1; i < len(randomNumbers); i++ { 424 _, err := randomNumbers[i].SetRandom() 425 if err != nil { 426 return err 427 } 428 } 429 430 // fold the committed quotients compute ∑ᵢλᵢ[Hᵢ(α)]G₁ 431 var foldedQuotients {{ .CurvePackage }}.G1Affine 432 quotients := make([]{{ .CurvePackage }}.G1Affine, len(proofs)) 433 for i := 0; i < len(randomNumbers); i++ { 434 quotients[i].Set(&proofs[i].H) 435 } 436 config := ecc.MultiExpConfig{} 437 if _, err := foldedQuotients.MultiExp(quotients, randomNumbers, config); err != nil { 438 return err 439 } 440 441 // fold digests and evals 442 evals := make([]fr.Element, len(digests)) 443 for i := 0; i < len(randomNumbers); i++ { 444 evals[i].Set(&proofs[i].ClaimedValue) 445 } 446 447 // fold the digests: ∑ᵢλᵢ[f_i(α)]G₁ 448 // fold the evals : ∑ᵢλᵢfᵢ(aᵢ) 449 foldedDigests, foldedEvals, err := fold(digests, evals, randomNumbers) 450 if err != nil { 451 return err 452 } 453 454 // compute commitment to folded Eval [∑ᵢλᵢfᵢ(aᵢ)]G₁ 455 var foldedEvalsCommit {{ .CurvePackage }}.G1Affine 456 var foldedEvalsBigInt big.Int 457 foldedEvals.BigInt(&foldedEvalsBigInt) 458 foldedEvalsCommit.ScalarMultiplication(&vk.G1, &foldedEvalsBigInt) 459 460 // compute foldedDigests = ∑ᵢλᵢ[fᵢ(α)]G₁ - [∑ᵢλᵢfᵢ(aᵢ)]G₁ 461 foldedDigests.Sub(&foldedDigests, &foldedEvalsCommit) 462 463 // combien the points and the quotients using γᵢ 464 // ∑ᵢλᵢ[p_i]([Hᵢ(α)]G₁) 465 var foldedPointsQuotients {{ .CurvePackage }}.G1Affine 466 for i := 0; i < len(randomNumbers); i++ { 467 randomNumbers[i].Mul(&randomNumbers[i], &points[i]) 468 } 469 _, err = foldedPointsQuotients.MultiExp(quotients, randomNumbers, config) 470 if err != nil { 471 return err 472 } 473 474 // ∑ᵢλᵢ[f_i(α)]G₁ - [∑ᵢλᵢfᵢ(aᵢ)]G₁ + ∑ᵢλᵢ[p_i]([Hᵢ(α)]G₁) 475 // = [∑ᵢλᵢf_i(α) - ∑ᵢλᵢfᵢ(aᵢ) + ∑ᵢλᵢpᵢHᵢ(α)]G₁ 476 foldedDigests.Add(&foldedDigests, &foldedPointsQuotients) 477 478 // -∑ᵢλᵢ[Qᵢ(α)]G₁ 479 foldedQuotients.Neg(&foldedQuotients) 480 481 // pairing check 482 // e([∑ᵢλᵢ(fᵢ(α) - fᵢ(pᵢ) + pᵢHᵢ(α))]G₁, G₂).e([-∑ᵢλᵢ[Hᵢ(α)]G₁), [α]G₂) 483 check, err := {{ .CurvePackage }}.PairingCheckFixedQ( 484 []{{ .CurvePackage }}.G1Affine{foldedDigests, foldedQuotients}, 485 vk.Lines[:], 486 ) 487 if err != nil { 488 return err 489 } 490 if !check { 491 return ErrVerifyOpeningProof 492 } 493 return nil 494 495 } 496 497 // fold folds digests and evaluations using the list of factors as random numbers. 498 // 499 // * digests list of digests to fold 500 // * evaluations list of evaluations to fold 501 // * factors list of multiplicative factors used for the folding (in Montgomery form) 502 // 503 // * Returns ∑ᵢcᵢdᵢ, ∑ᵢcᵢf(aᵢ) 504 func fold(di []Digest, fai []fr.Element, ci []fr.Element) (Digest, fr.Element, error) { 505 506 // length inconsistency between digests and evaluations should have been done before calling this function 507 nbDigests := len(di) 508 509 // fold the claimed values ∑ᵢcᵢf(aᵢ) 510 var foldedEvaluations, tmp fr.Element 511 for i := 0; i < nbDigests; i++ { 512 tmp.Mul(&fai[i], &ci[i]) 513 foldedEvaluations.Add(&foldedEvaluations, &tmp) 514 } 515 516 // fold the digests ∑ᵢ[cᵢ]([fᵢ(α)]G₁) 517 var foldedDigests Digest 518 _, err := foldedDigests.MultiExp(di, ci, ecc.MultiExpConfig{}) 519 if err != nil { 520 return foldedDigests, foldedEvaluations, err 521 } 522 523 // folding done 524 return foldedDigests, foldedEvaluations, nil 525 526 } 527 528 // deriveGamma derives a challenge using Fiat Shamir to fold proofs. 529 func deriveGamma(point fr.Element, digests []Digest, claimedValues []fr.Element, hf hash.Hash, dataTranscript ...[]byte) (fr.Element, error) { 530 531 // derive the challenge gamma, binded to the point and the commitments 532 fs := fiatshamir.NewTranscript(hf, "gamma") 533 if err := fs.Bind("gamma", point.Marshal()); err != nil { 534 return fr.Element{}, err 535 } 536 for i := range digests { 537 if err := fs.Bind("gamma", digests[i].Marshal()); err != nil { 538 return fr.Element{}, err 539 } 540 } 541 for i := range claimedValues { 542 if err := fs.Bind("gamma", claimedValues[i].Marshal()); err != nil { 543 return fr.Element{}, err 544 } 545 } 546 547 for i := 0; i < len(dataTranscript); i++ { 548 if err := fs.Bind("gamma", dataTranscript[i]); err != nil { 549 return fr.Element{}, err 550 } 551 } 552 553 gammaByte, err := fs.ComputeChallenge("gamma") 554 if err != nil { 555 return fr.Element{}, err 556 } 557 var gamma fr.Element 558 gamma.SetBytes(gammaByte) 559 560 return gamma, nil 561 } 562 563 // dividePolyByXminusA computes (f-f(a))/(x-a), in canonical basis, in regular form 564 // f memory is re-used for the result 565 func dividePolyByXminusA(f []fr.Element, fa, a fr.Element) []fr.Element { 566 567 // first we compute f-f(a) 568 f[0].Sub(&f[0], &fa) 569 570 // now we use synthetic division to divide by x-a 571 var t fr.Element 572 for i := len(f) - 2; i >= 0; i-- { 573 t.Mul(&f[i+1], &a) 574 575 f[i].Add(&f[i], &t) 576 } 577 578 // the result is of degree deg(f)-1 579 return f[1:] 580 }