github.com/consensys/gnark-crypto@v0.14.0/ecc/bls12-377/kzg/kzg.go (about) 1 // Copyright 2020 Consensys Software Inc. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // http://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 // See the License for the specific language governing permissions and 13 // limitations under the License. 14 15 // Code generated by consensys/gnark-crypto DO NOT EDIT 16 17 package kzg 18 19 import ( 20 "errors" 21 "hash" 22 "math/big" 23 "sync" 24 25 "github.com/consensys/gnark-crypto/ecc" 26 "github.com/consensys/gnark-crypto/ecc/bls12-377" 27 "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" 28 "github.com/consensys/gnark-crypto/fiat-shamir" 29 30 "github.com/consensys/gnark-crypto/internal/parallel" 31 ) 32 33 var ( 34 ErrInvalidNbDigests = errors.New("number of digests is not the same as the number of polynomials") 35 ErrZeroNbDigests = errors.New("number of digests is zero") 36 ErrInvalidPolynomialSize = errors.New("invalid polynomial size (larger than SRS or == 0)") 37 ErrVerifyOpeningProof = errors.New("can't verify opening proof") 38 ErrVerifyBatchOpeningSinglePoint = errors.New("can't verify batch opening proof at single point") 39 ErrMinSRSSize = errors.New("minimum srs size is 2") 40 ) 41 42 // Digest commitment of a polynomial. 43 type Digest = bls12377.G1Affine 44 45 // ProvingKey used to create or open commitments 46 type ProvingKey struct { 47 G1 []bls12377.G1Affine // [G₁ [α]G₁ , [α²]G₁, ... ] 48 } 49 50 // VerifyingKey used to verify opening proofs 51 type VerifyingKey struct { 52 G2 [2]bls12377.G2Affine // [G₂, [α]G₂ ] 53 G1 bls12377.G1Affine 54 Lines [2][2][len(bls12377.LoopCounter) - 1]bls12377.LineEvaluationAff // precomputed pairing lines corresponding to G₂, [α]G₂ 55 } 56 57 // SRS must be computed through MPC and comprises the ProvingKey and the VerifyingKey 58 type SRS struct { 59 Pk ProvingKey 60 Vk VerifyingKey 61 } 62 63 // TODO @Tabaie get rid of this and use the polynomial package 64 // eval returns p(point) where p is interpreted as a polynomial 65 // ∑_{i<len(p)}p[i]Xⁱ 66 func eval(p []fr.Element, point fr.Element) fr.Element { 67 var res fr.Element 68 n := len(p) 69 res.Set(&p[n-1]) 70 for i := n - 2; i >= 0; i-- { 71 res.Mul(&res, &point).Add(&res, &p[i]) 72 } 73 return res 74 } 75 76 // NewSRS returns a new SRS using alpha as randomness source 77 // 78 // In production, a SRS generated through MPC should be used. 79 // 80 // Set Alpha = -1 to generate quickly a balanced, valid SRS (useful for benchmarking). 81 // 82 // implements io.ReaderFrom and io.WriterTo 83 func NewSRS(size uint64, bAlpha *big.Int) (*SRS, error) { 84 85 if size < 2 { 86 return nil, ErrMinSRSSize 87 } 88 var srs SRS 89 srs.Pk.G1 = make([]bls12377.G1Affine, size) 90 91 var alpha fr.Element 92 alpha.SetBigInt(bAlpha) 93 94 var bMOne big.Int 95 bMOne.SetInt64(-1) 96 97 _, _, gen1Aff, gen2Aff := bls12377.Generators() 98 99 // in this case, the SRS is <αⁱ[G₁]> whera α is of order 4 100 // so no need to run the batch scalar multiplication. We cannot use alpha=1 101 // because it tampers the benchmarks (the SRS is not balanced). 102 if bAlpha.Cmp(&bMOne) == 0 { 103 104 t, err := fr.Generator(4) 105 if err != nil { 106 return &srs, nil 107 } 108 var bt big.Int 109 t.BigInt(&bt) 110 111 var g [4]bls12377.G1Affine 112 g[0] = gen1Aff 113 for i := 1; i < 4; i++ { 114 g[i].ScalarMultiplication(&g[i-1], &bt) 115 } 116 parallel.Execute(int(size), func(start, end int) { 117 for i := start; i < int(end); i++ { 118 srs.Pk.G1[i] = g[i%4] 119 } 120 }) 121 srs.Vk.G1 = gen1Aff 122 srs.Vk.G2[0] = gen2Aff 123 srs.Vk.G2[1].ScalarMultiplication(&srs.Vk.G2[0], &bt) 124 srs.Vk.Lines[0] = bls12377.PrecomputeLines(srs.Vk.G2[0]) 125 srs.Vk.Lines[1] = bls12377.PrecomputeLines(srs.Vk.G2[1]) 126 return &srs, nil 127 } 128 srs.Pk.G1[0] = gen1Aff 129 srs.Vk.G1 = gen1Aff 130 srs.Vk.G2[0] = gen2Aff 131 srs.Vk.G2[1].ScalarMultiplication(&gen2Aff, bAlpha) 132 srs.Vk.Lines[0] = bls12377.PrecomputeLines(srs.Vk.G2[0]) 133 srs.Vk.Lines[1] = bls12377.PrecomputeLines(srs.Vk.G2[1]) 134 135 alphas := make([]fr.Element, size-1) 136 alphas[0] = alpha 137 for i := 1; i < len(alphas); i++ { 138 alphas[i].Mul(&alphas[i-1], &alpha) 139 } 140 g1s := bls12377.BatchScalarMultiplicationG1(&gen1Aff, alphas) 141 copy(srs.Pk.G1[1:], g1s) 142 143 return &srs, nil 144 } 145 146 // OpeningProof KZG proof for opening at a single point. 147 // 148 // implements io.ReaderFrom and io.WriterTo 149 type OpeningProof struct { 150 // H quotient polynomial (f - f(z))/(x-z) 151 H bls12377.G1Affine 152 153 // ClaimedValue purported value 154 ClaimedValue fr.Element 155 } 156 157 // BatchOpeningProof opening proof for many polynomials at the same point 158 // 159 // implements io.ReaderFrom and io.WriterTo 160 type BatchOpeningProof struct { 161 // H quotient polynomial Sum_i gamma**i*(f - f(z))/(x-z) 162 H bls12377.G1Affine 163 164 // ClaimedValues purported values 165 ClaimedValues []fr.Element 166 } 167 168 // Commit commits to a polynomial using a multi exponentiation with the SRS. 169 // It is assumed that the polynomial is in canonical form, in Montgomery form. 170 func Commit(p []fr.Element, pk ProvingKey, nbTasks ...int) (Digest, error) { 171 172 if len(p) == 0 || len(p) > len(pk.G1) { 173 return Digest{}, ErrInvalidPolynomialSize 174 } 175 176 var res bls12377.G1Affine 177 178 config := ecc.MultiExpConfig{} 179 if len(nbTasks) > 0 { 180 config.NbTasks = nbTasks[0] 181 } 182 if _, err := res.MultiExp(pk.G1[:len(p)], p, config); err != nil { 183 return Digest{}, err 184 } 185 186 return res, nil 187 } 188 189 // Open computes an opening proof of polynomial p at given point. 190 // fft.Domain Cardinality must be larger than p.Degree() 191 func Open(p []fr.Element, point fr.Element, pk ProvingKey) (OpeningProof, error) { 192 if len(p) == 0 || len(p) > len(pk.G1) { 193 return OpeningProof{}, ErrInvalidPolynomialSize 194 } 195 196 // build the proof 197 res := OpeningProof{ 198 ClaimedValue: eval(p, point), 199 } 200 201 // compute H 202 // h reuses memory from _p 203 _p := make([]fr.Element, len(p)) 204 copy(_p, p) 205 h := dividePolyByXminusA(_p, res.ClaimedValue, point) 206 207 // commit to H 208 hCommit, err := Commit(h, pk) 209 if err != nil { 210 return OpeningProof{}, err 211 } 212 res.H.Set(&hCommit) 213 214 return res, nil 215 } 216 217 // Verify verifies a KZG opening proof at a single point 218 func Verify(commitment *Digest, proof *OpeningProof, point fr.Element, vk VerifyingKey) error { 219 220 // [f(a)]G₁ + [-a]([H(α)]G₁) = [f(a) - a*H(α)]G₁ 221 var totalG1 bls12377.G1Jac 222 var pointNeg fr.Element 223 var cmInt, pointInt big.Int 224 proof.ClaimedValue.BigInt(&cmInt) 225 pointNeg.Neg(&point).BigInt(&pointInt) 226 totalG1.JointScalarMultiplication(&vk.G1, &proof.H, &cmInt, &pointInt) 227 228 // [f(a) - a*H(α)]G₁ + [-f(α)]G₁ = [f(a) - f(α) - a*H(α)]G₁ 229 var commitmentJac bls12377.G1Jac 230 commitmentJac.FromAffine(commitment) 231 totalG1.SubAssign(&commitmentJac) 232 233 // e([f(α)-f(a)+aH(α)]G₁], G₂).e([-H(α)]G₁, [α]G₂) == 1 234 var totalG1Aff bls12377.G1Affine 235 totalG1Aff.FromJacobian(&totalG1) 236 check, err := bls12377.PairingCheckFixedQ( 237 []bls12377.G1Affine{totalG1Aff, proof.H}, 238 vk.Lines[:], 239 ) 240 241 if err != nil { 242 return err 243 } 244 if !check { 245 return ErrVerifyOpeningProof 246 } 247 return nil 248 } 249 250 // BatchOpenSinglePoint creates a batch opening proof at point of a list of polynomials. 251 // It's an interactive protocol, made non-interactive using Fiat Shamir. 252 // 253 // * point is the point at which the polynomials are opened. 254 // * digests is the list of committed polynomials to open, need to derive the challenge using Fiat Shamir. 255 // * polynomials is the list of polynomials to open, they are supposed to be of the same size. 256 // * dataTranscript extra data that might be needed to derive the challenge used for folding 257 func BatchOpenSinglePoint(polynomials [][]fr.Element, digests []Digest, point fr.Element, hf hash.Hash, pk ProvingKey, dataTranscript ...[]byte) (BatchOpeningProof, error) { 258 259 // check for invalid sizes 260 nbDigests := len(digests) 261 if nbDigests != len(polynomials) { 262 return BatchOpeningProof{}, ErrInvalidNbDigests 263 } 264 265 // TODO ensure the polynomials are of the same size 266 largestPoly := -1 267 for _, p := range polynomials { 268 if len(p) == 0 || len(p) > len(pk.G1) { 269 return BatchOpeningProof{}, ErrInvalidPolynomialSize 270 } 271 if len(p) > largestPoly { 272 largestPoly = len(p) 273 } 274 } 275 276 var res BatchOpeningProof 277 278 // compute the purported values 279 res.ClaimedValues = make([]fr.Element, len(polynomials)) 280 var wg sync.WaitGroup 281 wg.Add(len(polynomials)) 282 for i := 0; i < len(polynomials); i++ { 283 go func(_i int) { 284 res.ClaimedValues[_i] = eval(polynomials[_i], point) 285 wg.Done() 286 }(i) 287 } 288 289 // wait for polynomial evaluations to be completed (res.ClaimedValues) 290 wg.Wait() 291 292 // derive the challenge γ, binded to the point and the commitments 293 gamma, err := deriveGamma(point, digests, res.ClaimedValues, hf, dataTranscript...) 294 if err != nil { 295 return BatchOpeningProof{}, err 296 } 297 298 // ∑ᵢγⁱf(a) 299 var foldedEvaluations fr.Element 300 chSumGammai := make(chan struct{}, 1) 301 go func() { 302 foldedEvaluations = res.ClaimedValues[nbDigests-1] 303 for i := nbDigests - 2; i >= 0; i-- { 304 foldedEvaluations.Mul(&foldedEvaluations, &gamma). 305 Add(&foldedEvaluations, &res.ClaimedValues[i]) 306 } 307 close(chSumGammai) 308 }() 309 310 // compute ∑ᵢγⁱfᵢ 311 // note: if we are willing to parallelize that, we could clone the poly and scale them by 312 // gamma n in parallel, before reducing into foldedPolynomials 313 foldedPolynomials := make([]fr.Element, largestPoly) 314 copy(foldedPolynomials, polynomials[0]) 315 gammas := make([]fr.Element, len(polynomials)) 316 gammas[0] = gamma 317 for i := 1; i < len(polynomials); i++ { 318 gammas[i].Mul(&gammas[i-1], &gamma) 319 } 320 321 for i := 1; i < len(polynomials); i++ { 322 i := i 323 parallel.Execute(len(polynomials[i]), func(start, end int) { 324 var pj fr.Element 325 for j := start; j < end; j++ { 326 pj.Mul(&polynomials[i][j], &gammas[i-1]) 327 foldedPolynomials[j].Add(&foldedPolynomials[j], &pj) 328 } 329 }) 330 } 331 332 // compute H 333 <-chSumGammai 334 h := dividePolyByXminusA(foldedPolynomials, foldedEvaluations, point) 335 foldedPolynomials = nil // same memory as h 336 337 res.H, err = Commit(h, pk) 338 if err != nil { 339 return BatchOpeningProof{}, err 340 } 341 342 return res, nil 343 } 344 345 // FoldProof fold the digests and the proofs in batchOpeningProof using Fiat Shamir 346 // to obtain an opening proof at a single point. 347 // 348 // * digests list of digests on which batchOpeningProof is based 349 // * batchOpeningProof opening proof of digests 350 // * transcript extra data needed to derive the challenge used for folding. 351 // * returns the folded version of batchOpeningProof, Digest, the folded version of digests 352 func FoldProof(digests []Digest, batchOpeningProof *BatchOpeningProof, point fr.Element, hf hash.Hash, dataTranscript ...[]byte) (OpeningProof, Digest, error) { 353 354 nbDigests := len(digests) 355 356 // check consistency between numbers of claims vs number of digests 357 if nbDigests != len(batchOpeningProof.ClaimedValues) { 358 return OpeningProof{}, Digest{}, ErrInvalidNbDigests 359 } 360 361 // derive the challenge γ, binded to the point and the commitments 362 gamma, err := deriveGamma(point, digests, batchOpeningProof.ClaimedValues, hf, dataTranscript...) 363 if err != nil { 364 return OpeningProof{}, Digest{}, ErrInvalidNbDigests 365 } 366 367 // fold the claimed values and digests 368 // gammai = [1,γ,γ²,..,γⁿ⁻¹] 369 gammai := make([]fr.Element, nbDigests) 370 gammai[0].SetOne() 371 if nbDigests > 1 { 372 gammai[1] = gamma 373 } 374 for i := 2; i < nbDigests; i++ { 375 gammai[i].Mul(&gammai[i-1], &gamma) 376 } 377 378 foldedDigests, foldedEvaluations, err := fold(digests, batchOpeningProof.ClaimedValues, gammai) 379 if err != nil { 380 return OpeningProof{}, Digest{}, err 381 } 382 383 // create the folded opening proof 384 var res OpeningProof 385 res.ClaimedValue.Set(&foldedEvaluations) 386 res.H.Set(&batchOpeningProof.H) 387 388 return res, foldedDigests, nil 389 } 390 391 // BatchVerifySinglePoint verifies a batched opening proof at a single point of a list of polynomials. 392 // 393 // * digests list of digests on which opening proof is done 394 // * batchOpeningProof proof of correct opening on the digests 395 // * dataTranscript extra data that might be needed to derive the challenge used for the folding 396 func BatchVerifySinglePoint(digests []Digest, batchOpeningProof *BatchOpeningProof, point fr.Element, hf hash.Hash, vk VerifyingKey, dataTranscript ...[]byte) error { 397 398 // fold the proof 399 foldedProof, foldedDigest, err := FoldProof(digests, batchOpeningProof, point, hf, dataTranscript...) 400 if err != nil { 401 return err 402 } 403 404 // verify the foldedProof against the foldedDigest 405 err = Verify(&foldedDigest, &foldedProof, point, vk) 406 return err 407 408 } 409 410 // BatchVerifyMultiPoints batch verifies a list of opening proofs at different points. 411 // The purpose of the batching is to have only one pairing for verifying several proofs. 412 // 413 // * digests list of committed polynomials 414 // * proofs list of opening proofs, one for each digest 415 // * points the list of points at which the opening are done 416 func BatchVerifyMultiPoints(digests []Digest, proofs []OpeningProof, points []fr.Element, vk VerifyingKey) error { 417 418 // check consistency nb proogs vs nb digests 419 if len(digests) != len(proofs) || len(digests) != len(points) { 420 return ErrInvalidNbDigests 421 } 422 423 // len(digests) should be nonzero because of randomNumbers 424 if len(digests) == 0 { 425 return ErrZeroNbDigests 426 } 427 428 // if only one digest, call Verify 429 if len(digests) == 1 { 430 return Verify(&digests[0], &proofs[0], points[0], vk) 431 } 432 433 // sample random numbers λᵢ for sampling 434 randomNumbers := make([]fr.Element, len(digests)) 435 randomNumbers[0].SetOne() 436 for i := 1; i < len(randomNumbers); i++ { 437 _, err := randomNumbers[i].SetRandom() 438 if err != nil { 439 return err 440 } 441 } 442 443 // fold the committed quotients compute ∑ᵢλᵢ[Hᵢ(α)]G₁ 444 var foldedQuotients bls12377.G1Affine 445 quotients := make([]bls12377.G1Affine, len(proofs)) 446 for i := 0; i < len(randomNumbers); i++ { 447 quotients[i].Set(&proofs[i].H) 448 } 449 config := ecc.MultiExpConfig{} 450 if _, err := foldedQuotients.MultiExp(quotients, randomNumbers, config); err != nil { 451 return err 452 } 453 454 // fold digests and evals 455 evals := make([]fr.Element, len(digests)) 456 for i := 0; i < len(randomNumbers); i++ { 457 evals[i].Set(&proofs[i].ClaimedValue) 458 } 459 460 // fold the digests: ∑ᵢλᵢ[f_i(α)]G₁ 461 // fold the evals : ∑ᵢλᵢfᵢ(aᵢ) 462 foldedDigests, foldedEvals, err := fold(digests, evals, randomNumbers) 463 if err != nil { 464 return err 465 } 466 467 // compute commitment to folded Eval [∑ᵢλᵢfᵢ(aᵢ)]G₁ 468 var foldedEvalsCommit bls12377.G1Affine 469 var foldedEvalsBigInt big.Int 470 foldedEvals.BigInt(&foldedEvalsBigInt) 471 foldedEvalsCommit.ScalarMultiplication(&vk.G1, &foldedEvalsBigInt) 472 473 // compute foldedDigests = ∑ᵢλᵢ[fᵢ(α)]G₁ - [∑ᵢλᵢfᵢ(aᵢ)]G₁ 474 foldedDigests.Sub(&foldedDigests, &foldedEvalsCommit) 475 476 // combien the points and the quotients using γᵢ 477 // ∑ᵢλᵢ[p_i]([Hᵢ(α)]G₁) 478 var foldedPointsQuotients bls12377.G1Affine 479 for i := 0; i < len(randomNumbers); i++ { 480 randomNumbers[i].Mul(&randomNumbers[i], &points[i]) 481 } 482 _, err = foldedPointsQuotients.MultiExp(quotients, randomNumbers, config) 483 if err != nil { 484 return err 485 } 486 487 // ∑ᵢλᵢ[f_i(α)]G₁ - [∑ᵢλᵢfᵢ(aᵢ)]G₁ + ∑ᵢλᵢ[p_i]([Hᵢ(α)]G₁) 488 // = [∑ᵢλᵢf_i(α) - ∑ᵢλᵢfᵢ(aᵢ) + ∑ᵢλᵢpᵢHᵢ(α)]G₁ 489 foldedDigests.Add(&foldedDigests, &foldedPointsQuotients) 490 491 // -∑ᵢλᵢ[Qᵢ(α)]G₁ 492 foldedQuotients.Neg(&foldedQuotients) 493 494 // pairing check 495 // e([∑ᵢλᵢ(fᵢ(α) - fᵢ(pᵢ) + pᵢHᵢ(α))]G₁, G₂).e([-∑ᵢλᵢ[Hᵢ(α)]G₁), [α]G₂) 496 check, err := bls12377.PairingCheckFixedQ( 497 []bls12377.G1Affine{foldedDigests, foldedQuotients}, 498 vk.Lines[:], 499 ) 500 if err != nil { 501 return err 502 } 503 if !check { 504 return ErrVerifyOpeningProof 505 } 506 return nil 507 508 } 509 510 // fold folds digests and evaluations using the list of factors as random numbers. 511 // 512 // * digests list of digests to fold 513 // * evaluations list of evaluations to fold 514 // * factors list of multiplicative factors used for the folding (in Montgomery form) 515 // 516 // * Returns ∑ᵢcᵢdᵢ, ∑ᵢcᵢf(aᵢ) 517 func fold(di []Digest, fai []fr.Element, ci []fr.Element) (Digest, fr.Element, error) { 518 519 // length inconsistency between digests and evaluations should have been done before calling this function 520 nbDigests := len(di) 521 522 // fold the claimed values ∑ᵢcᵢf(aᵢ) 523 var foldedEvaluations, tmp fr.Element 524 for i := 0; i < nbDigests; i++ { 525 tmp.Mul(&fai[i], &ci[i]) 526 foldedEvaluations.Add(&foldedEvaluations, &tmp) 527 } 528 529 // fold the digests ∑ᵢ[cᵢ]([fᵢ(α)]G₁) 530 var foldedDigests Digest 531 _, err := foldedDigests.MultiExp(di, ci, ecc.MultiExpConfig{}) 532 if err != nil { 533 return foldedDigests, foldedEvaluations, err 534 } 535 536 // folding done 537 return foldedDigests, foldedEvaluations, nil 538 539 } 540 541 // deriveGamma derives a challenge using Fiat Shamir to fold proofs. 542 func deriveGamma(point fr.Element, digests []Digest, claimedValues []fr.Element, hf hash.Hash, dataTranscript ...[]byte) (fr.Element, error) { 543 544 // derive the challenge gamma, binded to the point and the commitments 545 fs := fiatshamir.NewTranscript(hf, "gamma") 546 if err := fs.Bind("gamma", point.Marshal()); err != nil { 547 return fr.Element{}, err 548 } 549 for i := range digests { 550 if err := fs.Bind("gamma", digests[i].Marshal()); err != nil { 551 return fr.Element{}, err 552 } 553 } 554 for i := range claimedValues { 555 if err := fs.Bind("gamma", claimedValues[i].Marshal()); err != nil { 556 return fr.Element{}, err 557 } 558 } 559 560 for i := 0; i < len(dataTranscript); i++ { 561 if err := fs.Bind("gamma", dataTranscript[i]); err != nil { 562 return fr.Element{}, err 563 } 564 } 565 566 gammaByte, err := fs.ComputeChallenge("gamma") 567 if err != nil { 568 return fr.Element{}, err 569 } 570 var gamma fr.Element 571 gamma.SetBytes(gammaByte) 572 573 return gamma, nil 574 } 575 576 // dividePolyByXminusA computes (f-f(a))/(x-a), in canonical basis, in regular form 577 // f memory is re-used for the result 578 func dividePolyByXminusA(f []fr.Element, fa, a fr.Element) []fr.Element { 579 580 // first we compute f-f(a) 581 f[0].Sub(&f[0], &fa) 582 583 // now we use synthetic division to divide by x-a 584 var t fr.Element 585 for i := len(f) - 2; i >= 0; i-- { 586 t.Mul(&f[i+1], &a) 587 588 f[i].Add(&f[i], &t) 589 } 590 591 // the result is of degree deg(f)-1 592 return f[1:] 593 }