github.com/onflow/flow-go@v0.33.17/ledger/complete/mtrie/flattener/encoding_test.go (about) 1 package flattener_test 2 3 import ( 4 "bytes" 5 crand "crypto/rand" 6 "errors" 7 "fmt" 8 "math/rand" 9 "testing" 10 11 "github.com/stretchr/testify/assert" 12 "github.com/stretchr/testify/require" 13 14 "github.com/onflow/flow-go/ledger" 15 "github.com/onflow/flow-go/ledger/common/hash" 16 "github.com/onflow/flow-go/ledger/common/testutils" 17 "github.com/onflow/flow-go/ledger/complete/mtrie/flattener" 18 "github.com/onflow/flow-go/ledger/complete/mtrie/node" 19 "github.com/onflow/flow-go/ledger/complete/mtrie/trie" 20 ) 21 22 func TestLeafNodeEncodingDecoding(t *testing.T) { 23 24 // Leaf node with nil payload 25 path1 := testutils.PathByUint8(0) 26 payload1 := (*ledger.Payload)(nil) 27 hashValue1 := hash.Hash([32]byte{1, 1, 1}) 28 leafNodeNilPayload := node.NewNode(255, nil, nil, ledger.Path(path1), payload1, hashValue1) 29 30 // Leaf node with empty payload (not nil) 31 // EmptyPayload() not used because decoded playload's value is empty slice (not nil) 32 path2 := testutils.PathByUint8(1) 33 payload2 := ledger.NewPayload(ledger.Key{}, []byte{}) 34 hashValue2 := hash.Hash([32]byte{2, 2, 2}) 35 leafNodeEmptyPayload := node.NewNode(255, nil, nil, ledger.Path(path2), payload2, hashValue2) 36 37 // Leaf node with payload 38 path3 := testutils.PathByUint8(2) 39 payload3 := testutils.LightPayload8('A', 'a') 40 hashValue3 := hash.Hash([32]byte{3, 3, 3}) 41 leafNodePayload := node.NewNode(255, nil, nil, ledger.Path(path3), payload3, hashValue3) 42 43 encodedLeafNodeNilPayload := []byte{ 44 0x00, // node type 45 0x00, 0xff, // height 46 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 47 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 48 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 49 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // hash data 50 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 51 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 52 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 53 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // path data 54 0x00, 0x00, 0x00, 0x00, // payload data len 55 } 56 57 encodedLeafNodeEmptyPayload := []byte{ 58 0x00, // node type 59 0x00, 0xff, // height 60 0x02, 0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 61 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 62 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 63 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // hash data 64 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 65 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 66 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 67 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // path data 68 0x00, 0x00, 0x00, 0x0a, // payload data len 69 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 70 0x00, 0x00, // payload data 71 } 72 73 encodedLeafNodePayload := []byte{ 74 0x00, // node type 75 0x00, 0xff, // height 76 0x03, 0x03, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 77 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 78 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 79 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // hash data 80 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 81 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 82 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 83 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // path data 84 0x00, 0x00, 0x00, 0x12, // payload data len 85 0x00, 0x00, 0x00, 0x09, 0x00, 0x01, 0x00, 0x00, 86 0x00, 0x03, 0x00, 0x00, 0x41, 0x00, 0x00, 0x00, 87 0x01, 0x61, // payload data 88 } 89 90 testCases := []struct { 91 name string 92 node *node.Node 93 encodedNode []byte 94 }{ 95 {"nil payload", leafNodeNilPayload, encodedLeafNodeNilPayload}, 96 {"empty payload", leafNodeEmptyPayload, encodedLeafNodeEmptyPayload}, 97 {"payload", leafNodePayload, encodedLeafNodePayload}, 98 } 99 100 for _, tc := range testCases { 101 t.Run("encode "+tc.name, func(t *testing.T) { 102 scratchBuffers := [][]byte{ 103 nil, 104 make([]byte, 0), 105 make([]byte, 16), 106 make([]byte, 1024), 107 } 108 109 for _, scratch := range scratchBuffers { 110 encodedNode := flattener.EncodeNode(tc.node, 0, 0, scratch) 111 assert.Equal(t, tc.encodedNode, encodedNode) 112 113 if len(scratch) > 0 { 114 if len(scratch) >= len(encodedNode) { 115 // reuse scratch buffer 116 require.True(t, &scratch[0] == &encodedNode[0]) 117 } else { 118 // new alloc 119 require.True(t, &scratch[0] != &encodedNode[0]) 120 } 121 } 122 } 123 }) 124 125 t.Run("decode "+tc.name, func(t *testing.T) { 126 scratchBuffers := [][]byte{ 127 nil, 128 make([]byte, 0), 129 make([]byte, 16), 130 make([]byte, 1024), 131 } 132 133 for _, scratch := range scratchBuffers { 134 reader := bytes.NewReader(tc.encodedNode) 135 newNode, err := flattener.ReadNode(reader, scratch, func(nodeIndex uint64) (*node.Node, error) { 136 return nil, fmt.Errorf("no call expected") 137 }) 138 require.NoError(t, err) 139 assert.Equal(t, tc.node, newNode) 140 assert.Equal(t, 0, reader.Len()) 141 } 142 }) 143 } 144 } 145 146 func TestRandomLeafNodeEncodingDecoding(t *testing.T) { 147 const count = 1000 148 const minPayloadSize = 40 149 const maxPayloadSize = 1024 * 2 150 // scratchBufferSize is intentionally small here to test 151 // when encoded node size is sometimes larger than scratch buffer. 152 const scratchBufferSize = 512 153 154 paths := testutils.RandomPaths(count) 155 payloads := testutils.RandomPayloads(count, minPayloadSize, maxPayloadSize) 156 157 writeScratch := make([]byte, scratchBufferSize) 158 readScratch := make([]byte, scratchBufferSize) 159 160 for i := 0; i < count; i++ { 161 height := rand.Intn(257) 162 163 var hashValue hash.Hash 164 _, err := crand.Read(hashValue[:]) 165 require.NoError(t, err) 166 167 n := node.NewNode(height, nil, nil, paths[i], payloads[i], hashValue) 168 169 encodedNode := flattener.EncodeNode(n, 0, 0, writeScratch) 170 171 if len(writeScratch) >= len(encodedNode) { 172 // reuse scratch buffer 173 require.True(t, &writeScratch[0] == &encodedNode[0]) 174 } else { 175 // new alloc because scratch buffer isn't big enough 176 require.True(t, &writeScratch[0] != &encodedNode[0]) 177 } 178 179 reader := bytes.NewReader(encodedNode) 180 newNode, err := flattener.ReadNode(reader, readScratch, func(nodeIndex uint64) (*node.Node, error) { 181 return nil, fmt.Errorf("no call expected") 182 }) 183 require.NoError(t, err) 184 assert.Equal(t, n, newNode) 185 assert.Equal(t, 0, reader.Len()) 186 } 187 } 188 189 func TestInterimNodeEncodingDecoding(t *testing.T) { 190 191 const lchildIndex = 1 192 const rchildIndex = 2 193 194 // Child node 195 path1 := testutils.PathByUint8(0) 196 payload1 := testutils.LightPayload8('A', 'a') 197 hashValue1 := hash.Hash([32]byte{1, 1, 1}) 198 leafNode1 := node.NewNode(255, nil, nil, ledger.Path(path1), payload1, hashValue1) 199 200 // Child node 201 path2 := testutils.PathByUint8(1) 202 payload2 := testutils.LightPayload8('B', 'b') 203 hashValue2 := hash.Hash([32]byte{2, 2, 2}) 204 leafNode2 := node.NewNode(255, nil, nil, ledger.Path(path2), payload2, hashValue2) 205 206 // Interim node 207 hashValue3 := hash.Hash([32]byte{3, 3, 3}) 208 interimNode := node.NewNode(256, leafNode1, leafNode2, ledger.DummyPath, nil, hashValue3) 209 210 encodedInterimNode := []byte{ 211 0x01, // node type 212 0x01, 0x00, // height 213 0x03, 0x03, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 214 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 215 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 216 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // hash data 217 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // LIndex 218 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, // RIndex 219 } 220 221 t.Run("encode", func(t *testing.T) { 222 scratchBuffers := [][]byte{ 223 nil, 224 make([]byte, 0), 225 make([]byte, 16), 226 make([]byte, 1024), 227 } 228 229 for _, scratch := range scratchBuffers { 230 data := flattener.EncodeNode(interimNode, lchildIndex, rchildIndex, scratch) 231 assert.Equal(t, encodedInterimNode, data) 232 } 233 }) 234 235 t.Run("decode", func(t *testing.T) { 236 scratchBuffers := [][]byte{ 237 nil, 238 make([]byte, 0), 239 make([]byte, 16), 240 make([]byte, 1024), 241 } 242 243 for _, scratch := range scratchBuffers { 244 reader := bytes.NewReader(encodedInterimNode) 245 newNode, err := flattener.ReadNode(reader, scratch, func(nodeIndex uint64) (*node.Node, error) { 246 switch nodeIndex { 247 case lchildIndex: 248 return leafNode1, nil 249 case rchildIndex: 250 return leafNode2, nil 251 default: 252 return nil, fmt.Errorf("unexpected child node index %d ", nodeIndex) 253 } 254 }) 255 require.NoError(t, err) 256 assert.Equal(t, interimNode, newNode) 257 assert.Equal(t, 0, reader.Len()) 258 } 259 }) 260 261 t.Run("decode child node not found error", func(t *testing.T) { 262 nodeNotFoundError := errors.New("failed to find node by index") 263 scratch := make([]byte, 1024) 264 265 reader := bytes.NewReader(encodedInterimNode) 266 newNode, err := flattener.ReadNode(reader, scratch, func(nodeIndex uint64) (*node.Node, error) { 267 return nil, nodeNotFoundError 268 }) 269 require.Nil(t, newNode) 270 require.ErrorIs(t, err, nodeNotFoundError) 271 }) 272 } 273 274 func TestTrieEncodingDecoding(t *testing.T) { 275 rootNodeNilIndex := uint64(20) 276 277 hashValue := hash.Hash([32]byte{2, 2, 2}) 278 rootNode := node.NewNode(256, nil, nil, ledger.DummyPath, nil, hashValue) 279 rootNodeIndex := uint64(21) 280 281 mtrie, err := trie.NewMTrie(rootNode, 7, 1234) 282 require.NoError(t, err) 283 284 encodedNilTrie := []byte{ 285 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, // root index 286 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reg count 287 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reg size 288 0x56, 0x8f, 0x4e, 0xc7, 0x40, 0xfe, 0x3b, 0x5d, 289 0xe8, 0x80, 0x34, 0xcb, 0x7b, 0x1f, 0xbd, 0xdb, 290 0x41, 0x54, 0x8b, 0x06, 0x8f, 0x31, 0xae, 0xbc, 291 0x8a, 0xe9, 0x18, 0x9e, 0x42, 0x9c, 0x57, 0x49, // hash data 292 } 293 294 encodedTrie := []byte{ 295 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x15, // root index 296 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, // reg count 297 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0xd2, // reg size 298 0x02, 0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 299 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 300 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 301 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // hash data 302 } 303 304 testCases := []struct { 305 name string 306 trie *trie.MTrie 307 rootNodeIndex uint64 308 encodedTrie []byte 309 }{ 310 {"empty trie", trie.NewEmptyMTrie(), rootNodeNilIndex, encodedNilTrie}, 311 {"trie", mtrie, rootNodeIndex, encodedTrie}, 312 } 313 314 for _, tc := range testCases { 315 316 t.Run("encode "+tc.name, func(t *testing.T) { 317 scratchBuffers := [][]byte{ 318 nil, 319 make([]byte, 0), 320 make([]byte, 16), 321 make([]byte, 1024), 322 } 323 324 for _, scratch := range scratchBuffers { 325 encodedTrie := flattener.EncodeTrie(tc.trie, tc.rootNodeIndex, scratch) 326 assert.Equal(t, tc.encodedTrie, encodedTrie) 327 328 if len(scratch) > 0 { 329 if len(scratch) >= len(encodedTrie) { 330 // reuse scratch buffer 331 require.True(t, &scratch[0] == &encodedTrie[0]) 332 } else { 333 // new alloc 334 require.True(t, &scratch[0] != &encodedTrie[0]) 335 } 336 } 337 } 338 }) 339 340 t.Run("decode "+tc.name, func(t *testing.T) { 341 scratchBuffers := [][]byte{ 342 nil, 343 make([]byte, 0), 344 make([]byte, 16), 345 make([]byte, 1024), 346 } 347 348 for _, scratch := range scratchBuffers { 349 reader := bytes.NewReader(tc.encodedTrie) 350 trie, err := flattener.ReadTrie(reader, scratch, func(nodeIndex uint64) (*node.Node, error) { 351 if nodeIndex != tc.rootNodeIndex { 352 return nil, fmt.Errorf("unexpected root node index %d ", nodeIndex) 353 } 354 return tc.trie.RootNode(), nil 355 }) 356 require.NoError(t, err) 357 assert.Equal(t, tc.trie, trie) 358 assert.Equal(t, tc.trie.AllocatedRegCount(), trie.AllocatedRegCount()) 359 assert.Equal(t, tc.trie.AllocatedRegSize(), trie.AllocatedRegSize()) 360 assert.Equal(t, 0, reader.Len()) 361 } 362 }) 363 364 t.Run("decode "+tc.name+" node not found error", func(t *testing.T) { 365 nodeNotFoundError := errors.New("failed to find node by index") 366 scratch := make([]byte, 1024) 367 368 reader := bytes.NewReader(tc.encodedTrie) 369 newNode, err := flattener.ReadTrie(reader, scratch, func(nodeIndex uint64) (*node.Node, error) { 370 return nil, nodeNotFoundError 371 }) 372 require.Nil(t, newNode) 373 require.ErrorIs(t, err, nodeNotFoundError) 374 }) 375 } 376 }