github.com/consensys/gnark@v0.11.0/backend/groth16/bn254/mpcsetup/phase2.go (about)

     1  // Copyright 2020 ConsenSys Software Inc.
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //     http://www.apache.org/licenses/LICENSE-2.0
     8  //
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  // See the License for the specific language governing permissions and
    13  // limitations under the License.
    14  
    15  // Code generated by gnark DO NOT EDIT
    16  
    17  package mpcsetup
    18  
    19  import (
    20  	"crypto/sha256"
    21  	"errors"
    22  	"math/big"
    23  
    24  	curve "github.com/consensys/gnark-crypto/ecc/bn254"
    25  	"github.com/consensys/gnark-crypto/ecc/bn254/fr"
    26  	"github.com/consensys/gnark/constraint"
    27  	cs "github.com/consensys/gnark/constraint/bn254"
    28  )
    29  
    30  type Phase2Evaluations struct {
    31  	G1 struct {
    32  		A, B, VKK []curve.G1Affine
    33  	}
    34  	G2 struct {
    35  		B []curve.G2Affine
    36  	}
    37  }
    38  
    39  type Phase2 struct {
    40  	Parameters struct {
    41  		G1 struct {
    42  			Delta curve.G1Affine
    43  			L, Z  []curve.G1Affine
    44  		}
    45  		G2 struct {
    46  			Delta curve.G2Affine
    47  		}
    48  	}
    49  	PublicKey PublicKey
    50  	Hash      []byte
    51  }
    52  
    53  func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) {
    54  	srs := srs1.Parameters
    55  	size := len(srs.G1.AlphaTau)
    56  	if size < r1cs.GetNbConstraints() {
    57  		panic("Number of constraints is larger than expected")
    58  	}
    59  
    60  	c2 := Phase2{}
    61  
    62  	accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) {
    63  		cID := t.CoeffID()
    64  		switch cID {
    65  		case constraint.CoeffIdZero:
    66  			return
    67  		case constraint.CoeffIdOne:
    68  			res.Add(res, value)
    69  		case constraint.CoeffIdMinusOne:
    70  			res.Sub(res, value)
    71  		case constraint.CoeffIdTwo:
    72  			res.Add(res, value).Add(res, value)
    73  		default:
    74  			var tmp curve.G1Affine
    75  			var vBi big.Int
    76  			r1cs.Coefficients[cID].BigInt(&vBi)
    77  			tmp.ScalarMultiplication(value, &vBi)
    78  			res.Add(res, &tmp)
    79  		}
    80  	}
    81  
    82  	accumulateG2 := func(res *curve.G2Affine, t constraint.Term, value *curve.G2Affine) {
    83  		cID := t.CoeffID()
    84  		switch cID {
    85  		case constraint.CoeffIdZero:
    86  			return
    87  		case constraint.CoeffIdOne:
    88  			res.Add(res, value)
    89  		case constraint.CoeffIdMinusOne:
    90  			res.Sub(res, value)
    91  		case constraint.CoeffIdTwo:
    92  			res.Add(res, value).Add(res, value)
    93  		default:
    94  			var tmp curve.G2Affine
    95  			var vBi big.Int
    96  			r1cs.Coefficients[cID].BigInt(&vBi)
    97  			tmp.ScalarMultiplication(value, &vBi)
    98  			res.Add(res, &tmp)
    99  		}
   100  	}
   101  
   102  	// Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁
   103  	coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size)
   104  	coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size)
   105  	coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size)
   106  	coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size)
   107  
   108  	internal, secret, public := r1cs.GetNbVariables()
   109  	nWires := internal + secret + public
   110  	var evals Phase2Evaluations
   111  	evals.G1.A = make([]curve.G1Affine, nWires)
   112  	evals.G1.B = make([]curve.G1Affine, nWires)
   113  	evals.G2.B = make([]curve.G2Affine, nWires)
   114  	bA := make([]curve.G1Affine, nWires)
   115  	aB := make([]curve.G1Affine, nWires)
   116  	C := make([]curve.G1Affine, nWires)
   117  
   118  	// TODO @gbotrel use constraint iterator when available.
   119  
   120  	i := 0
   121  	it := r1cs.GetR1CIterator()
   122  	for c := it.Next(); c != nil; c = it.Next() {
   123  		// A
   124  		for _, t := range c.L {
   125  			accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i])
   126  			accumulateG1(&bA[t.WireID()], t, &coeffBetaTau1[i])
   127  		}
   128  		// B
   129  		for _, t := range c.R {
   130  			accumulateG1(&evals.G1.B[t.WireID()], t, &coeffTau1[i])
   131  			accumulateG2(&evals.G2.B[t.WireID()], t, &coeffTau2[i])
   132  			accumulateG1(&aB[t.WireID()], t, &coeffAlphaTau1[i])
   133  		}
   134  		// C
   135  		for _, t := range c.O {
   136  			accumulateG1(&C[t.WireID()], t, &coeffTau1[i])
   137  		}
   138  		i++
   139  	}
   140  
   141  	// Prepare default contribution
   142  	_, _, g1, g2 := curve.Generators()
   143  	c2.Parameters.G1.Delta = g1
   144  	c2.Parameters.G2.Delta = g2
   145  
   146  	// Build Z in PK as τⁱ(τⁿ - 1)  = τ⁽ⁱ⁺ⁿ⁾ - τⁱ  for i ∈ [0, n-2]
   147  	// τⁱ(τⁿ - 1)  = τ⁽ⁱ⁺ⁿ⁾ - τⁱ  for i ∈ [0, n-2]
   148  	n := len(srs.G1.AlphaTau)
   149  	c2.Parameters.G1.Z = make([]curve.G1Affine, n)
   150  	for i := 0; i < n-1; i++ {
   151  		c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i])
   152  	}
   153  	bitReverse(c2.Parameters.G1.Z)
   154  	c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1]
   155  
   156  	// Evaluate L
   157  	nPrivate := internal + secret
   158  	c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate)
   159  	evals.G1.VKK = make([]curve.G1Affine, public)
   160  	offset := public
   161  	for i := 0; i < nWires; i++ {
   162  		var tmp curve.G1Affine
   163  		tmp.Add(&bA[i], &aB[i])
   164  		tmp.Add(&tmp, &C[i])
   165  		if i < public {
   166  			evals.G1.VKK[i].Set(&tmp)
   167  		} else {
   168  			c2.Parameters.G1.L[i-offset].Set(&tmp)
   169  		}
   170  	}
   171  	// Set δ public key
   172  	var delta fr.Element
   173  	delta.SetOne()
   174  	c2.PublicKey = newPublicKey(delta, nil, 1)
   175  
   176  	// Hash initial contribution
   177  	c2.Hash = c2.hash()
   178  	return c2, evals
   179  }
   180  
   181  func (c *Phase2) Contribute() {
   182  	// Sample toxic δ
   183  	var delta, deltaInv fr.Element
   184  	var deltaBI, deltaInvBI big.Int
   185  	delta.SetRandom()
   186  	deltaInv.Inverse(&delta)
   187  
   188  	delta.BigInt(&deltaBI)
   189  	deltaInv.BigInt(&deltaInvBI)
   190  
   191  	// Set δ public key
   192  	c.PublicKey = newPublicKey(delta, c.Hash, 1)
   193  
   194  	// Update δ
   195  	c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI)
   196  	c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI)
   197  
   198  	// Update Z using δ⁻¹
   199  	for i := 0; i < len(c.Parameters.G1.Z); i++ {
   200  		c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI)
   201  	}
   202  
   203  	// Update L using δ⁻¹
   204  	for i := 0; i < len(c.Parameters.G1.L); i++ {
   205  		c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI)
   206  	}
   207  
   208  	// 4. Hash contribution
   209  	c.Hash = c.hash()
   210  }
   211  
   212  func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error {
   213  	contribs := append([]*Phase2{c0, c1}, c...)
   214  	for i := 0; i < len(contribs)-1; i++ {
   215  		if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil {
   216  			return err
   217  		}
   218  	}
   219  	return nil
   220  }
   221  
   222  func verifyPhase2(current, contribution *Phase2) error {
   223  	// Compute R for δ
   224  	deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1)
   225  
   226  	// Check for knowledge of δ
   227  	if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) {
   228  		return errors.New("couldn't verify knowledge of δ")
   229  	}
   230  
   231  	// Check for valid updates using previous parameters
   232  	if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) {
   233  		return errors.New("couldn't verify that [δ]₁ is based on previous contribution")
   234  	}
   235  	if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) {
   236  		return errors.New("couldn't verify that [δ]₂ is based on previous contribution")
   237  	}
   238  
   239  	// Check for valid updates of L and Z using
   240  	L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L)
   241  	if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) {
   242  		return errors.New("couldn't verify valid updates of L using δ⁻¹")
   243  	}
   244  	Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z)
   245  	if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) {
   246  		return errors.New("couldn't verify valid updates of L using δ⁻¹")
   247  	}
   248  
   249  	// Check hash of the contribution
   250  	h := contribution.hash()
   251  	for i := 0; i < len(h); i++ {
   252  		if h[i] != contribution.Hash[i] {
   253  			return errors.New("couldn't verify hash of contribution")
   254  		}
   255  	}
   256  
   257  	return nil
   258  }
   259  
   260  func (c *Phase2) hash() []byte {
   261  	sha := sha256.New()
   262  	c.writeTo(sha)
   263  	return sha.Sum(nil)
   264  }