github.com/ethersphere/bee/v2@v2.2.0/pkg/storageincentives/proof.go (about) 1 // Copyright 2023 The Swarm Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package storageincentives 6 7 import ( 8 "errors" 9 "fmt" 10 "hash" 11 "math/big" 12 13 "github.com/ethersphere/bee/v2/pkg/bmt" 14 "github.com/ethersphere/bee/v2/pkg/bmtpool" 15 "github.com/ethersphere/bee/v2/pkg/cac" 16 "github.com/ethersphere/bee/v2/pkg/soc" 17 "github.com/ethersphere/bee/v2/pkg/storageincentives/redistribution" 18 storer "github.com/ethersphere/bee/v2/pkg/storer" 19 "github.com/ethersphere/bee/v2/pkg/swarm" 20 ) 21 22 var errProofCreation = errors.New("reserve commitment hasher: failure in proof creation") 23 24 // spanOffset returns the byte index of chunkdata where the spansize starts 25 func spanOffset(sampleItem storer.SampleItem) uint8 { 26 ch := swarm.NewChunk(sampleItem.ChunkAddress, sampleItem.ChunkData) 27 if soc.Valid(ch) { 28 return swarm.HashSize + swarm.SocSignatureSize 29 } 30 31 return 0 32 } 33 34 // makeInclusionProofs creates transaction data for claim method. 35 // In the document this logic, result data, is also called Proof of entitlement (POE). 36 func makeInclusionProofs( 37 reserveSampleItems []storer.SampleItem, 38 anchor1 []byte, 39 anchor2 []byte, 40 ) (redistribution.ChunkInclusionProofs, error) { 41 if len(reserveSampleItems) != storer.SampleSize { 42 return redistribution.ChunkInclusionProofs{}, fmt.Errorf("reserve sample items should have %d elements", storer.SampleSize) 43 } 44 if len(anchor1) == 0 { 45 return redistribution.ChunkInclusionProofs{}, errors.New("anchor1 is not set") 46 } 47 if len(anchor2) == 0 { 48 return redistribution.ChunkInclusionProofs{}, errors.New("anchor2 is not set") 49 } 50 51 require3 := storer.SampleSize - 1 52 require1 := new(big.Int).Mod(new(big.Int).SetBytes(anchor2), big.NewInt(int64(require3))).Uint64() 53 require2 := new(big.Int).Mod(new(big.Int).SetBytes(anchor2), big.NewInt(int64(require3-1))).Uint64() 54 if require2 >= require1 { 55 require2++ 56 } 57 58 prefixHasherFactory := func() hash.Hash { 59 return swarm.NewPrefixHasher(anchor1) 60 } 61 prefixHasherPool := bmt.NewPool(bmt.NewConf(prefixHasherFactory, swarm.BmtBranches, 8)) 62 63 // Sample chunk proofs 64 rccontent := bmt.Prover{Hasher: bmtpool.Get()} 65 rccontent.SetHeaderInt64(swarm.HashSize * storer.SampleSize * 2) 66 rsc, err := sampleChunk(reserveSampleItems) 67 if err != nil { 68 return redistribution.ChunkInclusionProofs{}, errProofCreation 69 } 70 rscData := rsc.Data() 71 _, err = rccontent.Write(rscData[swarm.SpanSize:]) 72 if err != nil { 73 return redistribution.ChunkInclusionProofs{}, errProofCreation 74 } 75 _, err = rccontent.Hash(nil) 76 if err != nil { 77 return redistribution.ChunkInclusionProofs{}, errProofCreation 78 } 79 proof1p1 := rccontent.Proof(int(require1) * 2) 80 proof2p1 := rccontent.Proof(int(require2) * 2) 81 proofLastp1 := rccontent.Proof(require3 * 2) 82 bmtpool.Put(rccontent.Hasher) 83 84 // Witness1 proofs 85 segmentIndex := int(new(big.Int).Mod(new(big.Int).SetBytes(anchor2), big.NewInt(int64(128))).Uint64()) 86 // OG chunk proof 87 chunk1Content := bmt.Prover{Hasher: bmtpool.Get()} 88 chunk1Offset := spanOffset(reserveSampleItems[require1]) 89 chunk1Content.SetHeader(reserveSampleItems[require1].ChunkData[chunk1Offset : chunk1Offset+swarm.SpanSize]) 90 chunk1ContentPayload := reserveSampleItems[require1].ChunkData[chunk1Offset+swarm.SpanSize:] 91 _, err = chunk1Content.Write(chunk1ContentPayload) 92 if err != nil { 93 return redistribution.ChunkInclusionProofs{}, errProofCreation 94 } 95 _, err = chunk1Content.Hash(nil) 96 if err != nil { 97 return redistribution.ChunkInclusionProofs{}, errProofCreation 98 } 99 proof1p2 := chunk1Content.Proof(segmentIndex) 100 // TR chunk proof 101 chunk1TrContent := bmt.Prover{Hasher: prefixHasherPool.Get()} 102 chunk1TrContent.SetHeader(reserveSampleItems[require1].ChunkData[chunk1Offset : chunk1Offset+swarm.SpanSize]) 103 _, err = chunk1TrContent.Write(chunk1ContentPayload) 104 if err != nil { 105 return redistribution.ChunkInclusionProofs{}, errProofCreation 106 } 107 _, err = chunk1TrContent.Hash(nil) 108 if err != nil { 109 return redistribution.ChunkInclusionProofs{}, errProofCreation 110 } 111 proof1p3 := chunk1TrContent.Proof(segmentIndex) 112 // cleanup 113 bmtpool.Put(chunk1Content.Hasher) 114 prefixHasherPool.Put(chunk1TrContent.Hasher) 115 116 // Witness2 proofs 117 // OG Chunk proof 118 chunk2Offset := spanOffset(reserveSampleItems[require2]) 119 chunk2Content := bmt.Prover{Hasher: bmtpool.Get()} 120 chunk2ContentPayload := reserveSampleItems[require2].ChunkData[chunk2Offset+swarm.SpanSize:] 121 chunk2Content.SetHeader(reserveSampleItems[require2].ChunkData[chunk2Offset : chunk2Offset+swarm.SpanSize]) 122 _, err = chunk2Content.Write(chunk2ContentPayload) 123 if err != nil { 124 return redistribution.ChunkInclusionProofs{}, errProofCreation 125 } 126 _, err = chunk2Content.Hash(nil) 127 if err != nil { 128 return redistribution.ChunkInclusionProofs{}, errProofCreation 129 } 130 proof2p2 := chunk2Content.Proof(segmentIndex) 131 // TR Chunk proof 132 chunk2TrContent := bmt.Prover{Hasher: prefixHasherPool.Get()} 133 chunk2TrContent.SetHeader(reserveSampleItems[require2].ChunkData[chunk2Offset : chunk2Offset+swarm.SpanSize]) 134 _, err = chunk2TrContent.Write(chunk2ContentPayload) 135 if err != nil { 136 return redistribution.ChunkInclusionProofs{}, errProofCreation 137 } 138 _, err = chunk2TrContent.Hash(nil) 139 if err != nil { 140 return redistribution.ChunkInclusionProofs{}, errProofCreation 141 } 142 proof2p3 := chunk2TrContent.Proof(segmentIndex) 143 // cleanup 144 bmtpool.Put(chunk2Content.Hasher) 145 prefixHasherPool.Put(chunk2TrContent.Hasher) 146 147 // Witness3 proofs 148 // OG Chunk proof 149 chunkLastOffset := spanOffset(reserveSampleItems[require3]) 150 chunkLastContent := bmt.Prover{Hasher: bmtpool.Get()} 151 chunkLastContent.SetHeader(reserveSampleItems[require3].ChunkData[chunkLastOffset : chunkLastOffset+swarm.SpanSize]) 152 chunkLastContentPayload := reserveSampleItems[require3].ChunkData[chunkLastOffset+swarm.SpanSize:] 153 _, err = chunkLastContent.Write(chunkLastContentPayload) 154 if err != nil { 155 return redistribution.ChunkInclusionProofs{}, errProofCreation 156 } 157 _, err = chunkLastContent.Hash(nil) 158 if err != nil { 159 return redistribution.ChunkInclusionProofs{}, errProofCreation 160 } 161 proofLastp2 := chunkLastContent.Proof(segmentIndex) 162 // TR Chunk Proof 163 chunkLastTrContent := bmt.Prover{Hasher: prefixHasherPool.Get()} 164 chunkLastTrContent.SetHeader(reserveSampleItems[require3].ChunkData[chunkLastOffset : chunkLastOffset+swarm.SpanSize]) 165 _, err = chunkLastTrContent.Write(chunkLastContentPayload) 166 if err != nil { 167 return redistribution.ChunkInclusionProofs{}, errProofCreation 168 } 169 _, err = chunkLastTrContent.Hash(nil) 170 if err != nil { 171 return redistribution.ChunkInclusionProofs{}, errProofCreation 172 } 173 proofLastp3 := chunkLastTrContent.Proof(segmentIndex) 174 // cleanup 175 bmtpool.Put(chunkLastContent.Hasher) 176 prefixHasherPool.Put(chunkLastTrContent.Hasher) 177 178 // map to output and add SOC related data if it is necessary 179 A, err := redistribution.NewChunkInclusionProof(proof1p1, proof1p2, proof1p3, reserveSampleItems[require1]) 180 if err != nil { 181 return redistribution.ChunkInclusionProofs{}, err 182 } 183 B, err := redistribution.NewChunkInclusionProof(proof2p1, proof2p2, proof2p3, reserveSampleItems[require2]) 184 if err != nil { 185 return redistribution.ChunkInclusionProofs{}, err 186 } 187 C, err := redistribution.NewChunkInclusionProof(proofLastp1, proofLastp2, proofLastp3, reserveSampleItems[require3]) 188 if err != nil { 189 return redistribution.ChunkInclusionProofs{}, err 190 } 191 return redistribution.ChunkInclusionProofs{ 192 A: A, 193 B: B, 194 C: C, 195 }, nil 196 } 197 198 func sampleChunk(items []storer.SampleItem) (swarm.Chunk, error) { 199 contentSize := len(items) * 2 * swarm.HashSize 200 201 pos := 0 202 content := make([]byte, contentSize) 203 for _, s := range items { 204 copy(content[pos:], s.ChunkAddress.Bytes()) 205 pos += swarm.HashSize 206 copy(content[pos:], s.TransformedAddress.Bytes()) 207 pos += swarm.HashSize 208 } 209 210 return cac.New(content) 211 } 212 213 func sampleHash(items []storer.SampleItem) (swarm.Address, error) { 214 ch, err := sampleChunk(items) 215 if err != nil { 216 return swarm.ZeroAddress, err 217 } 218 return ch.Address(), nil 219 }