github.com/MetalBlockchain/metalgo@v1.11.9/x/merkledb/codec_test.go (about)

     1  // Copyright (C) 2019-2024, Ava Labs, Inc. All rights reserved.
     2  // See the file LICENSE for licensing terms.
     3  
     4  package merkledb
     5  
     6  import (
     7  	"encoding/binary"
     8  	"io"
     9  	"math"
    10  	"math/rand"
    11  	"strconv"
    12  	"testing"
    13  
    14  	"github.com/stretchr/testify/require"
    15  
    16  	"github.com/MetalBlockchain/metalgo/ids"
    17  	"github.com/MetalBlockchain/metalgo/utils/maybe"
    18  )
    19  
    20  var (
    21  	encodeDBNodeTests = []struct {
    22  		name          string
    23  		n             *dbNode
    24  		expectedBytes []byte
    25  	}{
    26  		{
    27  			name: "empty node",
    28  			n: &dbNode{
    29  				children: make(map[byte]*child),
    30  			},
    31  			expectedBytes: []byte{
    32  				0x00, // value.HasValue()
    33  				0x00, // len(children)
    34  			},
    35  		},
    36  		{
    37  			name: "has value",
    38  			n: &dbNode{
    39  				value:    maybe.Some([]byte("value")),
    40  				children: make(map[byte]*child),
    41  			},
    42  			expectedBytes: []byte{
    43  				0x01,                    // value.HasValue()
    44  				0x05,                    // len(value.Value())
    45  				'v', 'a', 'l', 'u', 'e', // value.Value()
    46  				0x00, // len(children)
    47  			},
    48  		},
    49  		{
    50  			name: "1 child",
    51  			n: &dbNode{
    52  				value: maybe.Some([]byte("value")),
    53  				children: map[byte]*child{
    54  					0: {
    55  						compressedKey: ToKey([]byte{0}),
    56  						id: ids.ID{
    57  							0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
    58  							0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
    59  							0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
    60  							0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
    61  						},
    62  						hasValue: true,
    63  					},
    64  				},
    65  			},
    66  			expectedBytes: []byte{
    67  				0x01,                    // value.HasValue()
    68  				0x05,                    // len(value.Value())
    69  				'v', 'a', 'l', 'u', 'e', // value.Value()
    70  				0x01, // len(children)
    71  				0x00, // children[0].index
    72  				0x08, // len(children[0].compressedKey)
    73  				0x00, // children[0].compressedKey
    74  				// children[0].id
    75  				0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
    76  				0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
    77  				0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
    78  				0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
    79  				0x01, // children[0].hasValue
    80  			},
    81  		},
    82  		{
    83  			name: "2 children",
    84  			n: &dbNode{
    85  				value: maybe.Some([]byte("value")),
    86  				children: map[byte]*child{
    87  					0: {
    88  						compressedKey: ToKey([]byte{0}),
    89  						id: ids.ID{
    90  							0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
    91  							0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
    92  							0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
    93  							0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
    94  						},
    95  						hasValue: true,
    96  					},
    97  					1: {
    98  						compressedKey: ToKey([]byte{1, 2, 3}),
    99  						id: ids.ID{
   100  							0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
   101  							0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
   102  							0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
   103  							0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
   104  						},
   105  						hasValue: false,
   106  					},
   107  				},
   108  			},
   109  			expectedBytes: []byte{
   110  				0x01,                    // value.HasValue()
   111  				0x05,                    // len(value.Value())
   112  				'v', 'a', 'l', 'u', 'e', // value.Value()
   113  				0x02, // len(children)
   114  				0x00, // children[0].index
   115  				0x08, // len(children[0].compressedKey)
   116  				0x00, // children[0].compressedKey
   117  				// children[0].id
   118  				0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
   119  				0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
   120  				0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
   121  				0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
   122  				0x01,             // children[0].hasValue
   123  				0x01,             // children[1].index
   124  				0x18,             // len(children[1].compressedKey)
   125  				0x01, 0x02, 0x03, // children[1].compressedKey
   126  				// children[1].id
   127  				0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
   128  				0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
   129  				0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
   130  				0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
   131  				0x00, // children[1].hasValue
   132  			},
   133  		},
   134  		{
   135  			name: "16 children",
   136  			n: func() *dbNode {
   137  				n := &dbNode{
   138  					value:    maybe.Some([]byte("value")),
   139  					children: make(map[byte]*child),
   140  				}
   141  				for i := byte(0); i < 16; i++ {
   142  					n.children[i] = &child{
   143  						compressedKey: ToKey([]byte{i}),
   144  						id: ids.ID{
   145  							0x00 + i, 0x01 + i, 0x02 + i, 0x03 + i,
   146  							0x04 + i, 0x05 + i, 0x06 + i, 0x07 + i,
   147  							0x08 + i, 0x09 + i, 0x0a + i, 0x0b + i,
   148  							0x0c + i, 0x0d + i, 0x0e + i, 0x0f + i,
   149  							0x10 + i, 0x11 + i, 0x12 + i, 0x13 + i,
   150  							0x14 + i, 0x15 + i, 0x16 + i, 0x17 + i,
   151  							0x18 + i, 0x19 + i, 0x1a + i, 0x1b + i,
   152  							0x1c + i, 0x1d + i, 0x1e + i, 0x1f + i,
   153  						},
   154  						hasValue: i%2 == 0,
   155  					}
   156  				}
   157  				return n
   158  			}(),
   159  			expectedBytes: []byte{
   160  				0x01,                    // value.HasValue()
   161  				0x05,                    // len(value.Value())
   162  				'v', 'a', 'l', 'u', 'e', // value.Value()
   163  				0x10, // len(children)
   164  				0x00, // children[0].index
   165  				0x08, // len(children[0].compressedKey)
   166  				0x00, // children[0].compressedKey
   167  				// children[0].id
   168  				0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
   169  				0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
   170  				0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
   171  				0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
   172  				0x01, // children[0].hasValue
   173  				0x01, // children[1].index
   174  				0x08, // len(children[1].compressedKey)
   175  				0x01, // children[1].compressedKey
   176  				// children[1].id
   177  				0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
   178  				0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10,
   179  				0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
   180  				0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20,
   181  				0x00, // children[1].hasValue
   182  				0x02, // children[2].index
   183  				0x08, // len(children[2].compressedKey)
   184  				0x02, // children[2].compressedKey
   185  				// children[2].id
   186  				0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09,
   187  				0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11,
   188  				0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19,
   189  				0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21,
   190  				0x01, // children[2].hasValue
   191  				0x03, // children[3].index
   192  				0x08, // len(children[3].compressedKey)
   193  				0x03, // children[3].compressedKey
   194  				// children[3].id
   195  				0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a,
   196  				0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12,
   197  				0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a,
   198  				0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22,
   199  				0x00, // children[3].hasValue
   200  				0x04, // children[4].index
   201  				0x08, // len(children[4].compressedKey)
   202  				0x04, // children[4].compressedKey
   203  				// children[4].id
   204  				0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
   205  				0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
   206  				0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
   207  				0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23,
   208  				0x01, // children[4].hasValue
   209  				0x05, // children[5].index
   210  				0x08, // len(children[5].compressedKey)
   211  				0x05, // children[5].compressedKey
   212  				// children[5].id
   213  				0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c,
   214  				0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14,
   215  				0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c,
   216  				0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24,
   217  				0x00, // children[5].hasValue
   218  				0x06, // children[6].index
   219  				0x08, // len(children[6].compressedKey)
   220  				0x06, // children[6].compressedKey
   221  				// children[6].id
   222  				0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d,
   223  				0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15,
   224  				0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d,
   225  				0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25,
   226  				0x01, // children[6].hasValue
   227  				0x07, // children[7].index
   228  				0x08, // len(children[7].compressedKey)
   229  				0x07, // children[7].compressedKey
   230  				// children[7].id
   231  				0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e,
   232  				0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16,
   233  				0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e,
   234  				0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26,
   235  				0x00, // children[7].hasValue
   236  				0x08, // children[8].index
   237  				0x08, // len(children[8].compressedKey)
   238  				0x08, // children[8].compressedKey
   239  				// children[8].id
   240  				0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
   241  				0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
   242  				0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
   243  				0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
   244  				0x01, // children[8].hasValue
   245  				0x09, // children[9].index
   246  				0x08, // len(children[9].compressedKey)
   247  				0x09, // children[9].compressedKey
   248  				// children[9].id
   249  				0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10,
   250  				0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
   251  				0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20,
   252  				0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28,
   253  				0x00, // children[9].hasValue
   254  				0x0a, // children[10].index
   255  				0x08, // len(children[10].compressedKey)
   256  				0x0a, // children[10].compressedKey
   257  				// children[10].id
   258  				0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11,
   259  				0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19,
   260  				0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21,
   261  				0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29,
   262  				0x01, // children[10].hasValue
   263  				0x0b, // children[11].index
   264  				0x08, // len(children[11].compressedKey)
   265  				0x0b, // children[11].compressedKey
   266  				// children[11].id
   267  				0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12,
   268  				0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a,
   269  				0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22,
   270  				0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a,
   271  				0x00, // children[11].hasValue
   272  				0x0c, // children[12].index
   273  				0x08, // len(children[12].compressedKey)
   274  				0x0c, // children[12].compressedKey
   275  				// children[12].id
   276  				0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
   277  				0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
   278  				0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23,
   279  				0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b,
   280  				0x01, // children[12].hasValue
   281  				0x0d, // children[13].index
   282  				0x08, // len(children[13].compressedKey)
   283  				0x0d, // children[13].compressedKey
   284  				// children[13].id
   285  				0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14,
   286  				0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c,
   287  				0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24,
   288  				0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c,
   289  				0x00, // children[13].hasValue
   290  				0x0e, // children[14].index
   291  				0x08, // len(children[14].compressedKey)
   292  				0x0e, // children[14].compressedKey
   293  				// children[14].id
   294  				0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15,
   295  				0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d,
   296  				0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25,
   297  				0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d,
   298  				0x01, // children[14].hasValue
   299  				0x0f, // children[15].index
   300  				0x08, // len(children[15].compressedKey)
   301  				0x0f, // children[15].compressedKey
   302  				// children[15].id
   303  				0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16,
   304  				0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e,
   305  				0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26,
   306  				0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e,
   307  				0x00, // children[15].hasValue
   308  			},
   309  		},
   310  	}
   311  	encodeKeyTests = []struct {
   312  		name          string
   313  		key           Key
   314  		expectedBytes []byte
   315  	}{
   316  		{
   317  			name: "empty",
   318  			key:  ToKey([]byte{}),
   319  			expectedBytes: []byte{
   320  				0x00, // length
   321  			},
   322  		},
   323  		{
   324  			name: "1 byte",
   325  			key:  ToKey([]byte{0}),
   326  			expectedBytes: []byte{
   327  				0x08, // length
   328  				0x00, // key
   329  			},
   330  		},
   331  		{
   332  			name: "2 bytes",
   333  			key:  ToKey([]byte{0, 1}),
   334  			expectedBytes: []byte{
   335  				0x10,       // length
   336  				0x00, 0x01, // key
   337  			},
   338  		},
   339  		{
   340  			name: "4 bytes",
   341  			key:  ToKey([]byte{0, 1, 2, 3}),
   342  			expectedBytes: []byte{
   343  				0x20,                   // length
   344  				0x00, 0x01, 0x02, 0x03, // key
   345  			},
   346  		},
   347  		{
   348  			name: "8 bytes",
   349  			key:  ToKey([]byte{0, 1, 2, 3, 4, 5, 6, 7}),
   350  			expectedBytes: []byte{
   351  				0x40,                                           // length
   352  				0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, // key
   353  			},
   354  		},
   355  		{
   356  			name: "32 bytes",
   357  			key:  ToKey(make([]byte, 32)),
   358  			expectedBytes: append(
   359  				[]byte{
   360  					0x80, 0x02, // length
   361  				},
   362  				make([]byte, 32)..., // key
   363  			),
   364  		},
   365  		{
   366  			name: "64 bytes",
   367  			key:  ToKey(make([]byte, 64)),
   368  			expectedBytes: append(
   369  				[]byte{
   370  					0x80, 0x04, // length
   371  				},
   372  				make([]byte, 64)..., // key
   373  			),
   374  		},
   375  		{
   376  			name: "1024 bytes",
   377  			key:  ToKey(make([]byte, 1024)),
   378  			expectedBytes: append(
   379  				[]byte{
   380  					0x80, 0x40, // length
   381  				},
   382  				make([]byte, 1024)..., // key
   383  			),
   384  		},
   385  	}
   386  )
   387  
   388  func FuzzCodecBool(f *testing.F) {
   389  	f.Fuzz(
   390  		func(
   391  			t *testing.T,
   392  			b []byte,
   393  		) {
   394  			require := require.New(t)
   395  
   396  			r := codecReader{
   397  				b: b,
   398  			}
   399  			startLen := len(r.b)
   400  			got, err := r.Bool()
   401  			if err != nil {
   402  				t.SkipNow()
   403  			}
   404  			endLen := len(r.b)
   405  			numRead := startLen - endLen
   406  
   407  			// Encoding [got] should be the same as [b].
   408  			w := codecWriter{}
   409  			w.Bool(got)
   410  			require.Len(w.b, numRead)
   411  			require.Equal(b[:numRead], w.b)
   412  		},
   413  	)
   414  }
   415  
   416  func FuzzCodecInt(f *testing.F) {
   417  	f.Fuzz(
   418  		func(
   419  			t *testing.T,
   420  			b []byte,
   421  		) {
   422  			require := require.New(t)
   423  
   424  			c := codecReader{
   425  				b: b,
   426  			}
   427  			startLen := len(c.b)
   428  			got, err := c.Uvarint()
   429  			if err != nil {
   430  				t.SkipNow()
   431  			}
   432  			endLen := len(c.b)
   433  			numRead := startLen - endLen
   434  
   435  			// Encoding [got] should be the same as [b].
   436  			w := codecWriter{}
   437  			w.Uvarint(got)
   438  			require.Len(w.b, numRead)
   439  			require.Equal(b[:numRead], w.b)
   440  		},
   441  	)
   442  }
   443  
   444  func FuzzCodecKey(f *testing.F) {
   445  	f.Fuzz(
   446  		func(
   447  			t *testing.T,
   448  			b []byte,
   449  		) {
   450  			require := require.New(t)
   451  			got, err := decodeKey(b)
   452  			if err != nil {
   453  				t.SkipNow()
   454  			}
   455  
   456  			// Encoding [got] should be the same as [b].
   457  			gotBytes := encodeKey(got)
   458  			require.Equal(b, gotBytes)
   459  		},
   460  	)
   461  }
   462  
   463  func FuzzCodecDBNodeCanonical(f *testing.F) {
   464  	f.Fuzz(
   465  		func(
   466  			t *testing.T,
   467  			b []byte,
   468  		) {
   469  			require := require.New(t)
   470  			node := &dbNode{}
   471  			if err := decodeDBNode(b, node); err != nil {
   472  				t.SkipNow()
   473  			}
   474  
   475  			// Encoding [node] should be the same as [b].
   476  			buf := encodeDBNode(node)
   477  			require.Equal(b, buf)
   478  		},
   479  	)
   480  }
   481  
   482  func FuzzCodecDBNodeDeterministic(f *testing.F) {
   483  	f.Fuzz(
   484  		func(
   485  			t *testing.T,
   486  			randSeed int,
   487  			hasValue bool,
   488  			valueBytes []byte,
   489  		) {
   490  			require := require.New(t)
   491  			for _, bf := range validBranchFactors {
   492  				r := rand.New(rand.NewSource(int64(randSeed))) // #nosec G404
   493  
   494  				value := maybe.Nothing[[]byte]()
   495  				if hasValue {
   496  					value = maybe.Some(valueBytes)
   497  				}
   498  
   499  				numChildren := r.Intn(int(bf)) // #nosec G404
   500  
   501  				children := map[byte]*child{}
   502  				for i := 0; i < numChildren; i++ {
   503  					var childID ids.ID
   504  					_, _ = r.Read(childID[:]) // #nosec G404
   505  
   506  					childKeyBytes := make([]byte, r.Intn(32)) // #nosec G404
   507  					_, _ = r.Read(childKeyBytes)              // #nosec G404
   508  
   509  					children[byte(i)] = &child{
   510  						compressedKey: ToKey(childKeyBytes),
   511  						id:            childID,
   512  					}
   513  				}
   514  				node := dbNode{
   515  					value:    value,
   516  					children: children,
   517  				}
   518  
   519  				nodeBytes := encodeDBNode(&node)
   520  				require.Len(nodeBytes, encodedDBNodeSize(&node))
   521  				var gotNode dbNode
   522  				require.NoError(decodeDBNode(nodeBytes, &gotNode))
   523  				require.Equal(node, gotNode)
   524  
   525  				nodeBytes2 := encodeDBNode(&gotNode)
   526  				require.Equal(nodeBytes, nodeBytes2)
   527  
   528  				// Enforce that modifying bytes after decodeDBNode doesn't
   529  				// modify the populated struct.
   530  				clear(nodeBytes)
   531  				require.Equal(node, gotNode)
   532  			}
   533  		},
   534  	)
   535  }
   536  
   537  func TestCodecDecodeDBNode_TooShort(t *testing.T) {
   538  	require := require.New(t)
   539  
   540  	var (
   541  		parsedDBNode  dbNode
   542  		tooShortBytes = make([]byte, 1)
   543  	)
   544  	err := decodeDBNode(tooShortBytes, &parsedDBNode)
   545  	require.ErrorIs(err, io.ErrUnexpectedEOF)
   546  }
   547  
   548  func TestEncodeDBNode(t *testing.T) {
   549  	for _, test := range encodeDBNodeTests {
   550  		t.Run(test.name, func(t *testing.T) {
   551  			bytes := encodeDBNode(test.n)
   552  			require.Equal(t, test.expectedBytes, bytes)
   553  		})
   554  	}
   555  }
   556  
   557  func TestDecodeDBNode(t *testing.T) {
   558  	for _, test := range encodeDBNodeTests {
   559  		t.Run(test.name, func(t *testing.T) {
   560  			require := require.New(t)
   561  
   562  			var n dbNode
   563  			require.NoError(decodeDBNode(test.expectedBytes, &n))
   564  			require.Equal(test.n, &n)
   565  		})
   566  	}
   567  }
   568  
   569  func TestEncodeKey(t *testing.T) {
   570  	for _, test := range encodeKeyTests {
   571  		t.Run(test.name, func(t *testing.T) {
   572  			bytes := encodeKey(test.key)
   573  			require.Equal(t, test.expectedBytes, bytes)
   574  		})
   575  	}
   576  }
   577  
   578  func TestDecodeKey(t *testing.T) {
   579  	for _, test := range encodeKeyTests {
   580  		t.Run(test.name, func(t *testing.T) {
   581  			require := require.New(t)
   582  
   583  			key, err := decodeKey(test.expectedBytes)
   584  			require.NoError(err)
   585  			require.Equal(test.key, key)
   586  		})
   587  	}
   588  }
   589  
   590  func TestCodecDecodeKeyLengthOverflowRegression(t *testing.T) {
   591  	_, err := decodeKey(binary.AppendUvarint(nil, math.MaxInt))
   592  	require.ErrorIs(t, err, io.ErrUnexpectedEOF)
   593  }
   594  
   595  func TestUintSize(t *testing.T) {
   596  	// Test lower bound
   597  	expectedSize := uintSize(0)
   598  	actualSize := binary.PutUvarint(make([]byte, binary.MaxVarintLen64), 0)
   599  	require.Equal(t, expectedSize, actualSize)
   600  
   601  	// Test upper bound
   602  	expectedSize = uintSize(math.MaxUint64)
   603  	actualSize = binary.PutUvarint(make([]byte, binary.MaxVarintLen64), math.MaxUint64)
   604  	require.Equal(t, expectedSize, actualSize)
   605  
   606  	// Test powers of 2
   607  	for power := 0; power < 64; power++ {
   608  		n := uint64(1) << uint(power)
   609  		expectedSize := uintSize(n)
   610  		actualSize := binary.PutUvarint(make([]byte, binary.MaxVarintLen64), n)
   611  		require.Equal(t, expectedSize, actualSize, power)
   612  	}
   613  }
   614  
   615  func Benchmark_EncodeDBNode(b *testing.B) {
   616  	for _, benchmark := range encodeDBNodeTests {
   617  		b.Run(benchmark.name, func(b *testing.B) {
   618  			for i := 0; i < b.N; i++ {
   619  				encodeDBNode(benchmark.n)
   620  			}
   621  		})
   622  	}
   623  }
   624  
   625  func Benchmark_DecodeDBNode(b *testing.B) {
   626  	for _, benchmark := range encodeDBNodeTests {
   627  		b.Run(benchmark.name, func(b *testing.B) {
   628  			for i := 0; i < b.N; i++ {
   629  				var n dbNode
   630  				err := decodeDBNode(benchmark.expectedBytes, &n)
   631  				require.NoError(b, err)
   632  			}
   633  		})
   634  	}
   635  }
   636  
   637  func Benchmark_EncodeKey(b *testing.B) {
   638  	for _, benchmark := range encodeKeyTests {
   639  		b.Run(benchmark.name, func(b *testing.B) {
   640  			for i := 0; i < b.N; i++ {
   641  				encodeKey(benchmark.key)
   642  			}
   643  		})
   644  	}
   645  }
   646  
   647  func Benchmark_DecodeKey(b *testing.B) {
   648  	for _, benchmark := range encodeKeyTests {
   649  		b.Run(benchmark.name, func(b *testing.B) {
   650  			for i := 0; i < b.N; i++ {
   651  				_, err := decodeKey(benchmark.expectedBytes)
   652  				require.NoError(b, err)
   653  			}
   654  		})
   655  	}
   656  }
   657  
   658  func Benchmark_EncodeUint(b *testing.B) {
   659  	w := codecWriter{
   660  		b: make([]byte, 0, binary.MaxVarintLen64),
   661  	}
   662  
   663  	for _, v := range []uint64{0, 1, 2, 32, 1024, 32768} {
   664  		b.Run(strconv.FormatUint(v, 10), func(b *testing.B) {
   665  			for i := 0; i < b.N; i++ {
   666  				w.Uvarint(v)
   667  				w.b = w.b[:0]
   668  			}
   669  		})
   670  	}
   671  }