github.com/koko1123/flow-go-1@v0.29.6/ledger/complete/mtrie/flattener/encoding_test.go (about) 1 package flattener_test 2 3 import ( 4 "bytes" 5 "errors" 6 "fmt" 7 "math/rand" 8 "testing" 9 10 "github.com/stretchr/testify/assert" 11 "github.com/stretchr/testify/require" 12 13 "github.com/koko1123/flow-go-1/ledger" 14 "github.com/koko1123/flow-go-1/ledger/common/hash" 15 "github.com/koko1123/flow-go-1/ledger/common/testutils" 16 "github.com/koko1123/flow-go-1/ledger/complete/mtrie/flattener" 17 "github.com/koko1123/flow-go-1/ledger/complete/mtrie/node" 18 "github.com/koko1123/flow-go-1/ledger/complete/mtrie/trie" 19 ) 20 21 func TestLeafNodeEncodingDecoding(t *testing.T) { 22 23 // Leaf node with nil payload 24 path1 := testutils.PathByUint8(0) 25 payload1 := (*ledger.Payload)(nil) 26 hashValue1 := hash.Hash([32]byte{1, 1, 1}) 27 leafNodeNilPayload := node.NewNode(255, nil, nil, ledger.Path(path1), payload1, hashValue1) 28 29 // Leaf node with empty payload (not nil) 30 // EmptyPayload() not used because decoded playload's value is empty slice (not nil) 31 path2 := testutils.PathByUint8(1) 32 payload2 := ledger.NewPayload(ledger.Key{}, []byte{}) 33 hashValue2 := hash.Hash([32]byte{2, 2, 2}) 34 leafNodeEmptyPayload := node.NewNode(255, nil, nil, ledger.Path(path2), payload2, hashValue2) 35 36 // Leaf node with payload 37 path3 := testutils.PathByUint8(2) 38 payload3 := testutils.LightPayload8('A', 'a') 39 hashValue3 := hash.Hash([32]byte{3, 3, 3}) 40 leafNodePayload := node.NewNode(255, nil, nil, ledger.Path(path3), payload3, hashValue3) 41 42 encodedLeafNodeNilPayload := []byte{ 43 0x00, // node type 44 0x00, 0xff, // height 45 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 46 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 47 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 48 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // hash data 49 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 50 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 51 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 52 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // path data 53 0x00, 0x00, 0x00, 0x00, // payload data len 54 } 55 56 encodedLeafNodeEmptyPayload := []byte{ 57 0x00, // node type 58 0x00, 0xff, // height 59 0x02, 0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 60 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 61 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 62 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // hash data 63 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 64 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 65 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 66 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // path data 67 0x00, 0x00, 0x00, 0x0a, // payload data len 68 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 69 0x00, 0x00, // payload data 70 } 71 72 encodedLeafNodePayload := []byte{ 73 0x00, // node type 74 0x00, 0xff, // height 75 0x03, 0x03, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 76 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 77 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 78 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // hash data 79 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 80 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 81 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 82 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // path data 83 0x00, 0x00, 0x00, 0x12, // payload data len 84 0x00, 0x00, 0x00, 0x09, 0x00, 0x01, 0x00, 0x00, 85 0x00, 0x03, 0x00, 0x00, 0x41, 0x00, 0x00, 0x00, 86 0x01, 0x61, // payload data 87 } 88 89 testCases := []struct { 90 name string 91 node *node.Node 92 encodedNode []byte 93 }{ 94 {"nil payload", leafNodeNilPayload, encodedLeafNodeNilPayload}, 95 {"empty payload", leafNodeEmptyPayload, encodedLeafNodeEmptyPayload}, 96 {"payload", leafNodePayload, encodedLeafNodePayload}, 97 } 98 99 for _, tc := range testCases { 100 t.Run("encode "+tc.name, func(t *testing.T) { 101 scratchBuffers := [][]byte{ 102 nil, 103 make([]byte, 0), 104 make([]byte, 16), 105 make([]byte, 1024), 106 } 107 108 for _, scratch := range scratchBuffers { 109 encodedNode := flattener.EncodeNode(tc.node, 0, 0, scratch) 110 assert.Equal(t, tc.encodedNode, encodedNode) 111 112 if len(scratch) > 0 { 113 if len(scratch) >= len(encodedNode) { 114 // reuse scratch buffer 115 require.True(t, &scratch[0] == &encodedNode[0]) 116 } else { 117 // new alloc 118 require.True(t, &scratch[0] != &encodedNode[0]) 119 } 120 } 121 } 122 }) 123 124 t.Run("decode "+tc.name, func(t *testing.T) { 125 scratchBuffers := [][]byte{ 126 nil, 127 make([]byte, 0), 128 make([]byte, 16), 129 make([]byte, 1024), 130 } 131 132 for _, scratch := range scratchBuffers { 133 reader := bytes.NewReader(tc.encodedNode) 134 newNode, err := flattener.ReadNode(reader, scratch, func(nodeIndex uint64) (*node.Node, error) { 135 return nil, fmt.Errorf("no call expected") 136 }) 137 require.NoError(t, err) 138 assert.Equal(t, tc.node, newNode) 139 assert.Equal(t, 0, reader.Len()) 140 } 141 }) 142 } 143 } 144 145 func TestRandomLeafNodeEncodingDecoding(t *testing.T) { 146 const count = 1000 147 const minPayloadSize = 40 148 const maxPayloadSize = 1024 * 2 149 // scratchBufferSize is intentionally small here to test 150 // when encoded node size is sometimes larger than scratch buffer. 151 const scratchBufferSize = 512 152 153 paths := testutils.RandomPaths(count) 154 payloads := testutils.RandomPayloads(count, minPayloadSize, maxPayloadSize) 155 156 writeScratch := make([]byte, scratchBufferSize) 157 readScratch := make([]byte, scratchBufferSize) 158 159 for i := 0; i < count; i++ { 160 height := rand.Intn(257) 161 162 var hashValue hash.Hash 163 rand.Read(hashValue[:]) 164 165 n := node.NewNode(height, nil, nil, paths[i], payloads[i], hashValue) 166 167 encodedNode := flattener.EncodeNode(n, 0, 0, writeScratch) 168 169 if len(writeScratch) >= len(encodedNode) { 170 // reuse scratch buffer 171 require.True(t, &writeScratch[0] == &encodedNode[0]) 172 } else { 173 // new alloc because scratch buffer isn't big enough 174 require.True(t, &writeScratch[0] != &encodedNode[0]) 175 } 176 177 reader := bytes.NewReader(encodedNode) 178 newNode, err := flattener.ReadNode(reader, readScratch, func(nodeIndex uint64) (*node.Node, error) { 179 return nil, fmt.Errorf("no call expected") 180 }) 181 require.NoError(t, err) 182 assert.Equal(t, n, newNode) 183 assert.Equal(t, 0, reader.Len()) 184 } 185 } 186 187 func TestInterimNodeEncodingDecoding(t *testing.T) { 188 189 const lchildIndex = 1 190 const rchildIndex = 2 191 192 // Child node 193 path1 := testutils.PathByUint8(0) 194 payload1 := testutils.LightPayload8('A', 'a') 195 hashValue1 := hash.Hash([32]byte{1, 1, 1}) 196 leafNode1 := node.NewNode(255, nil, nil, ledger.Path(path1), payload1, hashValue1) 197 198 // Child node 199 path2 := testutils.PathByUint8(1) 200 payload2 := testutils.LightPayload8('B', 'b') 201 hashValue2 := hash.Hash([32]byte{2, 2, 2}) 202 leafNode2 := node.NewNode(255, nil, nil, ledger.Path(path2), payload2, hashValue2) 203 204 // Interim node 205 hashValue3 := hash.Hash([32]byte{3, 3, 3}) 206 interimNode := node.NewNode(256, leafNode1, leafNode2, ledger.DummyPath, nil, hashValue3) 207 208 encodedInterimNode := []byte{ 209 0x01, // node type 210 0x01, 0x00, // height 211 0x03, 0x03, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 212 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 213 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 214 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // hash data 215 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // LIndex 216 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, // RIndex 217 } 218 219 t.Run("encode", func(t *testing.T) { 220 scratchBuffers := [][]byte{ 221 nil, 222 make([]byte, 0), 223 make([]byte, 16), 224 make([]byte, 1024), 225 } 226 227 for _, scratch := range scratchBuffers { 228 data := flattener.EncodeNode(interimNode, lchildIndex, rchildIndex, scratch) 229 assert.Equal(t, encodedInterimNode, data) 230 } 231 }) 232 233 t.Run("decode", func(t *testing.T) { 234 scratchBuffers := [][]byte{ 235 nil, 236 make([]byte, 0), 237 make([]byte, 16), 238 make([]byte, 1024), 239 } 240 241 for _, scratch := range scratchBuffers { 242 reader := bytes.NewReader(encodedInterimNode) 243 newNode, err := flattener.ReadNode(reader, scratch, func(nodeIndex uint64) (*node.Node, error) { 244 switch nodeIndex { 245 case lchildIndex: 246 return leafNode1, nil 247 case rchildIndex: 248 return leafNode2, nil 249 default: 250 return nil, fmt.Errorf("unexpected child node index %d ", nodeIndex) 251 } 252 }) 253 require.NoError(t, err) 254 assert.Equal(t, interimNode, newNode) 255 assert.Equal(t, 0, reader.Len()) 256 } 257 }) 258 259 t.Run("decode child node not found error", func(t *testing.T) { 260 nodeNotFoundError := errors.New("failed to find node by index") 261 scratch := make([]byte, 1024) 262 263 reader := bytes.NewReader(encodedInterimNode) 264 newNode, err := flattener.ReadNode(reader, scratch, func(nodeIndex uint64) (*node.Node, error) { 265 return nil, nodeNotFoundError 266 }) 267 require.Nil(t, newNode) 268 require.ErrorIs(t, err, nodeNotFoundError) 269 }) 270 } 271 272 func TestTrieEncodingDecoding(t *testing.T) { 273 rootNodeNilIndex := uint64(20) 274 275 hashValue := hash.Hash([32]byte{2, 2, 2}) 276 rootNode := node.NewNode(256, nil, nil, ledger.DummyPath, nil, hashValue) 277 rootNodeIndex := uint64(21) 278 279 mtrie, err := trie.NewMTrie(rootNode, 7, 1234) 280 require.NoError(t, err) 281 282 encodedNilTrie := []byte{ 283 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, // root index 284 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reg count 285 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reg size 286 0x56, 0x8f, 0x4e, 0xc7, 0x40, 0xfe, 0x3b, 0x5d, 287 0xe8, 0x80, 0x34, 0xcb, 0x7b, 0x1f, 0xbd, 0xdb, 288 0x41, 0x54, 0x8b, 0x06, 0x8f, 0x31, 0xae, 0xbc, 289 0x8a, 0xe9, 0x18, 0x9e, 0x42, 0x9c, 0x57, 0x49, // hash data 290 } 291 292 encodedTrie := []byte{ 293 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x15, // root index 294 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, // reg count 295 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0xd2, // reg size 296 0x02, 0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 297 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 298 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 299 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // hash data 300 } 301 302 testCases := []struct { 303 name string 304 trie *trie.MTrie 305 rootNodeIndex uint64 306 encodedTrie []byte 307 }{ 308 {"empty trie", trie.NewEmptyMTrie(), rootNodeNilIndex, encodedNilTrie}, 309 {"trie", mtrie, rootNodeIndex, encodedTrie}, 310 } 311 312 for _, tc := range testCases { 313 314 t.Run("encode "+tc.name, func(t *testing.T) { 315 scratchBuffers := [][]byte{ 316 nil, 317 make([]byte, 0), 318 make([]byte, 16), 319 make([]byte, 1024), 320 } 321 322 for _, scratch := range scratchBuffers { 323 encodedTrie := flattener.EncodeTrie(tc.trie, tc.rootNodeIndex, scratch) 324 assert.Equal(t, tc.encodedTrie, encodedTrie) 325 326 if len(scratch) > 0 { 327 if len(scratch) >= len(encodedTrie) { 328 // reuse scratch buffer 329 require.True(t, &scratch[0] == &encodedTrie[0]) 330 } else { 331 // new alloc 332 require.True(t, &scratch[0] != &encodedTrie[0]) 333 } 334 } 335 } 336 }) 337 338 t.Run("decode "+tc.name, func(t *testing.T) { 339 scratchBuffers := [][]byte{ 340 nil, 341 make([]byte, 0), 342 make([]byte, 16), 343 make([]byte, 1024), 344 } 345 346 for _, scratch := range scratchBuffers { 347 reader := bytes.NewReader(tc.encodedTrie) 348 trie, err := flattener.ReadTrie(reader, scratch, func(nodeIndex uint64) (*node.Node, error) { 349 if nodeIndex != tc.rootNodeIndex { 350 return nil, fmt.Errorf("unexpected root node index %d ", nodeIndex) 351 } 352 return tc.trie.RootNode(), nil 353 }) 354 require.NoError(t, err) 355 assert.Equal(t, tc.trie, trie) 356 assert.Equal(t, tc.trie.AllocatedRegCount(), trie.AllocatedRegCount()) 357 assert.Equal(t, tc.trie.AllocatedRegSize(), trie.AllocatedRegSize()) 358 assert.Equal(t, 0, reader.Len()) 359 } 360 }) 361 362 t.Run("decode "+tc.name+" node not found error", func(t *testing.T) { 363 nodeNotFoundError := errors.New("failed to find node by index") 364 scratch := make([]byte, 1024) 365 366 reader := bytes.NewReader(tc.encodedTrie) 367 newNode, err := flattener.ReadTrie(reader, scratch, func(nodeIndex uint64) (*node.Node, error) { 368 return nil, nodeNotFoundError 369 }) 370 require.Nil(t, newNode) 371 require.ErrorIs(t, err, nodeNotFoundError) 372 }) 373 } 374 }