github.com/onflow/atree@v0.6.0/map_test.go (about)

     1  /*
     2   * Atree - Scalable Arrays and Ordered Maps
     3   *
     4   * Copyright 2021 Dapper Labs, Inc.
     5   *
     6   * Licensed under the Apache License, Version 2.0 (the "License");
     7   * you may not use this file except in compliance with the License.
     8   * You may obtain a copy of the License at
     9   *
    10   *   http://www.apache.org/licenses/LICENSE-2.0
    11   *
    12   * Unless required by applicable law or agreed to in writing, software
    13   * distributed under the License is distributed on an "AS IS" BASIS,
    14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    15   * See the License for the specific language governing permissions and
    16   * limitations under the License.
    17   */
    18  
    19  package atree
    20  
    21  import (
    22  	"errors"
    23  	"fmt"
    24  	"math"
    25  	"math/rand"
    26  	"reflect"
    27  	"sort"
    28  	"strings"
    29  	"testing"
    30  
    31  	"github.com/stretchr/testify/mock"
    32  	"github.com/stretchr/testify/require"
    33  )
    34  
    35  type mockDigesterBuilder struct {
    36  	mock.Mock
    37  }
    38  
    39  var _ DigesterBuilder = &mockDigesterBuilder{}
    40  
    41  type mockDigester struct {
    42  	d []Digest
    43  }
    44  
    45  var _ Digester = &mockDigester{}
    46  
    47  func (h *mockDigesterBuilder) SetSeed(_ uint64, _ uint64) {
    48  }
    49  
    50  func (h *mockDigesterBuilder) Digest(hip HashInputProvider, value Value) (Digester, error) {
    51  	args := h.Called(value)
    52  	return args.Get(0).(mockDigester), nil
    53  }
    54  
    55  func (d mockDigester) DigestPrefix(level uint) ([]Digest, error) {
    56  	if level > uint(len(d.d)) {
    57  		return nil, fmt.Errorf("digest level %d out of bounds", level)
    58  	}
    59  	return d.d[:level], nil
    60  }
    61  
    62  func (d mockDigester) Digest(level uint) (Digest, error) {
    63  	if level >= uint(len(d.d)) {
    64  		return 0, fmt.Errorf("digest level %d out of bounds", level)
    65  	}
    66  	return d.d[level], nil
    67  }
    68  
    69  func (d mockDigester) Levels() uint {
    70  	return uint(len(d.d))
    71  }
    72  
    73  func (d mockDigester) Reset() {}
    74  
    75  type errorDigesterBuilder struct {
    76  	err error
    77  }
    78  
    79  var _ DigesterBuilder = &errorDigesterBuilder{}
    80  
    81  func newErrorDigesterBuilder(err error) *errorDigesterBuilder {
    82  	return &errorDigesterBuilder{err: err}
    83  }
    84  
    85  func (h *errorDigesterBuilder) SetSeed(_ uint64, _ uint64) {
    86  }
    87  
    88  func (h *errorDigesterBuilder) Digest(hip HashInputProvider, value Value) (Digester, error) {
    89  	return nil, h.err
    90  }
    91  
    92  func verifyEmptyMap(
    93  	t *testing.T,
    94  	storage *PersistentSlabStorage,
    95  	typeInfo TypeInfo,
    96  	address Address,
    97  	m *OrderedMap,
    98  ) {
    99  	verifyMap(t, storage, typeInfo, address, m, nil, nil, false)
   100  }
   101  
   102  // verifyMap verifies map elements and validates serialization and in-memory slab tree.
   103  // It also verifies elements ordering if sortedKeys is not nil.
   104  func verifyMap(
   105  	t *testing.T,
   106  	storage *PersistentSlabStorage,
   107  	typeInfo TypeInfo,
   108  	address Address,
   109  	m *OrderedMap,
   110  	keyValues map[Value]Value,
   111  	sortedKeys []Value,
   112  	hasNestedArrayMapElement bool,
   113  ) {
   114  	require.True(t, typeInfoComparator(typeInfo, m.Type()))
   115  	require.Equal(t, address, m.Address())
   116  	require.Equal(t, uint64(len(keyValues)), m.Count())
   117  
   118  	var err error
   119  
   120  	// Verify map elements
   121  	for k, v := range keyValues {
   122  		s, err := m.Get(compare, hashInputProvider, k)
   123  		require.NoError(t, err)
   124  
   125  		e, err := s.StoredValue(m.Storage)
   126  		require.NoError(t, err)
   127  
   128  		valueEqual(t, typeInfoComparator, v, e)
   129  	}
   130  
   131  	// Verify map elements ordering
   132  	if len(sortedKeys) > 0 {
   133  		require.Equal(t, len(keyValues), len(sortedKeys))
   134  
   135  		i := 0
   136  		err = m.Iterate(func(k, v Value) (bool, error) {
   137  			expectedKey := sortedKeys[i]
   138  			expectedValue := keyValues[expectedKey]
   139  
   140  			valueEqual(t, typeInfoComparator, expectedKey, k)
   141  			valueEqual(t, typeInfoComparator, expectedValue, v)
   142  
   143  			i++
   144  			return true, nil
   145  		})
   146  		require.NoError(t, err)
   147  		require.Equal(t, len(keyValues), i)
   148  	}
   149  
   150  	// Verify in-memory slabs
   151  	err = ValidMap(m, typeInfo, typeInfoComparator, hashInputProvider)
   152  	if err != nil {
   153  		PrintMap(m)
   154  	}
   155  	require.NoError(t, err)
   156  
   157  	// Verify slab serializations
   158  	err = ValidMapSerialization(
   159  		m,
   160  		storage.cborDecMode,
   161  		storage.cborEncMode,
   162  		storage.DecodeStorable,
   163  		storage.DecodeTypeInfo,
   164  		func(a, b Storable) bool {
   165  			return reflect.DeepEqual(a, b)
   166  		},
   167  	)
   168  	if err != nil {
   169  		PrintMap(m)
   170  	}
   171  	require.NoError(t, err)
   172  
   173  	// Check storage slab tree
   174  	rootIDSet, err := CheckStorageHealth(storage, 1)
   175  	require.NoError(t, err)
   176  
   177  	rootIDs := make([]StorageID, 0, len(rootIDSet))
   178  	for id := range rootIDSet {
   179  		rootIDs = append(rootIDs, id)
   180  	}
   181  	require.Equal(t, 1, len(rootIDs))
   182  	require.Equal(t, m.StorageID(), rootIDs[0])
   183  
   184  	if !hasNestedArrayMapElement {
   185  		// Need to call Commit before calling storage.Count() for PersistentSlabStorage.
   186  		err = storage.Commit()
   187  		require.NoError(t, err)
   188  
   189  		stats, err := GetMapStats(m)
   190  		require.NoError(t, err)
   191  		require.Equal(t, stats.SlabCount(), uint64(storage.Count()))
   192  
   193  		if len(keyValues) == 0 {
   194  			// Verify slab count for empty map
   195  			require.Equal(t, uint64(1), stats.DataSlabCount)
   196  			require.Equal(t, uint64(0), stats.MetaDataSlabCount)
   197  			require.Equal(t, uint64(0), stats.StorableSlabCount)
   198  			require.Equal(t, uint64(0), stats.CollisionDataSlabCount)
   199  		}
   200  	}
   201  }
   202  
   203  type keysByDigest struct {
   204  	keys            []Value
   205  	digesterBuilder DigesterBuilder
   206  }
   207  
   208  func (d keysByDigest) Len() int { return len(d.keys) }
   209  
   210  func (d keysByDigest) Swap(i, j int) { d.keys[i], d.keys[j] = d.keys[j], d.keys[i] }
   211  
   212  func (d keysByDigest) Less(i, j int) bool {
   213  	d1, err := d.digesterBuilder.Digest(hashInputProvider, d.keys[i])
   214  	if err != nil {
   215  		panic(err)
   216  	}
   217  
   218  	digest1, err := d1.DigestPrefix(d1.Levels())
   219  	if err != nil {
   220  		panic(err)
   221  	}
   222  
   223  	d2, err := d.digesterBuilder.Digest(hashInputProvider, d.keys[j])
   224  	if err != nil {
   225  		panic(err)
   226  	}
   227  
   228  	digest2, err := d2.DigestPrefix(d2.Levels())
   229  	if err != nil {
   230  		panic(err)
   231  	}
   232  
   233  	for z := 0; z < len(digest1); z++ {
   234  		if digest1[z] != digest2[z] {
   235  			return digest1[z] < digest2[z] // sort by hkey
   236  		}
   237  	}
   238  	return i < j // sort by insertion order with hash collision
   239  }
   240  
   241  func TestMapSetAndGet(t *testing.T) {
   242  
   243  	t.Run("unique keys", func(t *testing.T) {
   244  		// Map tree will be 4 levels, with ~35 metadata slabs, and ~270 data slabs when
   245  		// slab size is 256 bytes, number of map elements is 2048,
   246  		// keys are strings of 16 bytes of random content, values are 0-2048,
   247  
   248  		SetThreshold(256)
   249  		defer SetThreshold(1024)
   250  
   251  		const (
   252  			mapSize       = 2048
   253  			keyStringSize = 16
   254  		)
   255  
   256  		r := newRand(t)
   257  
   258  		keyValues := make(map[Value]Value, mapSize)
   259  		i := uint64(0)
   260  		for len(keyValues) < mapSize {
   261  			k := NewStringValue(randStr(r, keyStringSize))
   262  			v := Uint64Value(i)
   263  			keyValues[k] = v
   264  			i++
   265  		}
   266  
   267  		typeInfo := testTypeInfo{42}
   268  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   269  		storage := newTestPersistentStorage(t)
   270  
   271  		m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo)
   272  		require.NoError(t, err)
   273  
   274  		for k, v := range keyValues {
   275  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
   276  			require.NoError(t, err)
   277  			require.Nil(t, existingStorable)
   278  		}
   279  
   280  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
   281  	})
   282  
   283  	t.Run("replicate keys", func(t *testing.T) {
   284  		SetThreshold(256)
   285  		defer SetThreshold(1024)
   286  
   287  		const (
   288  			mapSize       = 2048
   289  			keyStringSize = 16
   290  		)
   291  
   292  		r := newRand(t)
   293  
   294  		keyValues := make(map[Value]Value, mapSize)
   295  		i := uint64(0)
   296  		for len(keyValues) < mapSize {
   297  			k := NewStringValue(randStr(r, keyStringSize))
   298  			v := Uint64Value(i)
   299  			keyValues[k] = v
   300  			i++
   301  		}
   302  
   303  		typeInfo := testTypeInfo{42}
   304  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   305  		storage := newTestPersistentStorage(t)
   306  
   307  		m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo)
   308  		require.NoError(t, err)
   309  
   310  		for k, v := range keyValues {
   311  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
   312  			require.NoError(t, err)
   313  			require.Nil(t, existingStorable)
   314  		}
   315  
   316  		// Overwrite values
   317  		for k, v := range keyValues {
   318  			oldValue := v.(Uint64Value)
   319  			newValue := Uint64Value(uint64(oldValue) + mapSize)
   320  
   321  			existingStorable, err := m.Set(compare, hashInputProvider, k, newValue)
   322  			require.NoError(t, err)
   323  			require.NotNil(t, existingStorable)
   324  
   325  			existingValue, err := existingStorable.StoredValue(storage)
   326  			require.NoError(t, err)
   327  			valueEqual(t, typeInfoComparator, oldValue, existingValue)
   328  
   329  			keyValues[k] = newValue
   330  		}
   331  
   332  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
   333  	})
   334  
   335  	t.Run("random key and value", func(t *testing.T) {
   336  		SetThreshold(256)
   337  		defer SetThreshold(1024)
   338  
   339  		const (
   340  			mapSize          = 2048
   341  			keyStringMaxSize = 1024
   342  		)
   343  
   344  		r := newRand(t)
   345  
   346  		keyValues := make(map[Value]Value, mapSize)
   347  		for len(keyValues) < mapSize {
   348  			slen := r.Intn(keyStringMaxSize)
   349  			k := NewStringValue(randStr(r, slen))
   350  			v := randomValue(r, int(maxInlineMapElementSize))
   351  			keyValues[k] = v
   352  		}
   353  
   354  		typeInfo := testTypeInfo{42}
   355  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   356  		storage := newTestPersistentStorage(t)
   357  
   358  		m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo)
   359  		require.NoError(t, err)
   360  
   361  		for k, v := range keyValues {
   362  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
   363  			require.NoError(t, err)
   364  			require.Nil(t, existingStorable)
   365  		}
   366  
   367  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
   368  	})
   369  
   370  	t.Run("unique keys with hash collision", func(t *testing.T) {
   371  
   372  		const (
   373  			mapSize       = 1024
   374  			keyStringSize = 16
   375  		)
   376  
   377  		SetThreshold(256)
   378  		defer SetThreshold(1024)
   379  
   380  		savedMaxCollisionLimitPerDigest := MaxCollisionLimitPerDigest
   381  		MaxCollisionLimitPerDigest = uint32(math.Ceil(float64(mapSize) / 10))
   382  		defer func() {
   383  			MaxCollisionLimitPerDigest = savedMaxCollisionLimitPerDigest
   384  		}()
   385  
   386  		r := newRand(t)
   387  
   388  		digesterBuilder := &mockDigesterBuilder{}
   389  		keyValues := make(map[Value]Value, mapSize)
   390  		i := uint64(0)
   391  		for len(keyValues) < mapSize {
   392  			k := NewStringValue(randStr(r, keyStringSize))
   393  			v := Uint64Value(i)
   394  			keyValues[k] = v
   395  			i++
   396  
   397  			digests := []Digest{
   398  				Digest(i % 10),
   399  			}
   400  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
   401  		}
   402  
   403  		typeInfo := testTypeInfo{42}
   404  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   405  		storage := newTestPersistentStorage(t)
   406  
   407  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
   408  		require.NoError(t, err)
   409  
   410  		for k, v := range keyValues {
   411  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
   412  			require.NoError(t, err)
   413  			require.Nil(t, existingStorable)
   414  		}
   415  
   416  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
   417  	})
   418  
   419  	t.Run("replicate keys with hash collision", func(t *testing.T) {
   420  		const (
   421  			mapSize       = 1024
   422  			keyStringSize = 16
   423  		)
   424  
   425  		SetThreshold(256)
   426  		defer SetThreshold(1024)
   427  
   428  		savedMaxCollisionLimitPerDigest := MaxCollisionLimitPerDigest
   429  		MaxCollisionLimitPerDigest = uint32(math.Ceil(float64(mapSize) / 10))
   430  		defer func() {
   431  			MaxCollisionLimitPerDigest = savedMaxCollisionLimitPerDigest
   432  		}()
   433  
   434  		r := newRand(t)
   435  
   436  		digesterBuilder := &mockDigesterBuilder{}
   437  		keyValues := make(map[Value]Value, mapSize)
   438  		i := uint64(0)
   439  		for len(keyValues) < mapSize {
   440  			k := NewStringValue(randStr(r, keyStringSize))
   441  			v := Uint64Value(i)
   442  			keyValues[k] = v
   443  			i++
   444  
   445  			digests := []Digest{
   446  				Digest(i % 10),
   447  			}
   448  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
   449  		}
   450  
   451  		typeInfo := testTypeInfo{42}
   452  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   453  		storage := newTestPersistentStorage(t)
   454  
   455  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
   456  		require.NoError(t, err)
   457  
   458  		for k, v := range keyValues {
   459  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
   460  			require.NoError(t, err)
   461  			require.Nil(t, existingStorable)
   462  		}
   463  
   464  		// Overwrite values
   465  		for k, v := range keyValues {
   466  			oldValue := v.(Uint64Value)
   467  			newValue := Uint64Value(uint64(oldValue) + mapSize)
   468  
   469  			existingStorable, err := m.Set(compare, hashInputProvider, k, newValue)
   470  			require.NoError(t, err)
   471  			require.NotNil(t, existingStorable)
   472  
   473  			existingValue, err := existingStorable.StoredValue(storage)
   474  			require.NoError(t, err)
   475  			valueEqual(t, typeInfoComparator, oldValue, existingValue)
   476  
   477  			keyValues[k] = newValue
   478  		}
   479  
   480  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
   481  	})
   482  }
   483  
   484  func TestMapGetKeyNotFound(t *testing.T) {
   485  	t.Run("no collision", func(t *testing.T) {
   486  		const mapSize = 1024
   487  
   488  		typeInfo := testTypeInfo{42}
   489  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   490  		storage := newTestPersistentStorage(t)
   491  
   492  		m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo)
   493  		require.NoError(t, err)
   494  
   495  		keyValues := make(map[Value]Value, mapSize)
   496  		for i := 0; i < mapSize; i++ {
   497  			k := Uint64Value(i)
   498  			v := Uint64Value(i)
   499  			keyValues[k] = v
   500  			storable, err := m.Set(compare, hashInputProvider, k, v)
   501  			require.NoError(t, err)
   502  			require.Nil(t, storable)
   503  		}
   504  
   505  		r := newRand(t)
   506  
   507  		k := NewStringValue(randStr(r, 1024))
   508  		storable, err := m.Get(compare, hashInputProvider, k)
   509  		require.Nil(t, storable)
   510  		require.Equal(t, 1, errorCategorizationCount(err))
   511  		var userError *UserError
   512  		var keyNotFoundError *KeyNotFoundError
   513  		require.ErrorAs(t, err, &userError)
   514  		require.ErrorAs(t, err, &keyNotFoundError)
   515  		require.ErrorAs(t, userError, &keyNotFoundError)
   516  
   517  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
   518  	})
   519  
   520  	t.Run("collision", func(t *testing.T) {
   521  		const mapSize = 256
   522  
   523  		typeInfo := testTypeInfo{42}
   524  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   525  		storage := newTestPersistentStorage(t)
   526  		digesterBuilder := &mockDigesterBuilder{}
   527  
   528  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
   529  		require.NoError(t, err)
   530  
   531  		keyValues := make(map[Value]Value, mapSize)
   532  		for i := 0; i < mapSize; i++ {
   533  			k := Uint64Value(i)
   534  			v := Uint64Value(i)
   535  			keyValues[k] = v
   536  
   537  			digests := []Digest{Digest(i)}
   538  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
   539  
   540  			storable, err := m.Set(compare, hashInputProvider, k, v)
   541  			require.NoError(t, err)
   542  			require.Nil(t, storable)
   543  		}
   544  
   545  		r := newRand(t)
   546  		k := NewStringValue(randStr(r, 1024))
   547  
   548  		digests := []Digest{Digest(0)}
   549  		digesterBuilder.On("Digest", k).Return(mockDigester{digests})
   550  
   551  		storable, err := m.Get(compare, hashInputProvider, k)
   552  		require.Nil(t, storable)
   553  		require.Equal(t, 1, errorCategorizationCount(err))
   554  		var userError *UserError
   555  		var keyNotFoundError *KeyNotFoundError
   556  		require.ErrorAs(t, err, &userError)
   557  		require.ErrorAs(t, err, &keyNotFoundError)
   558  		require.ErrorAs(t, userError, &keyNotFoundError)
   559  
   560  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
   561  	})
   562  
   563  	t.Run("collision group", func(t *testing.T) {
   564  		const mapSize = 256
   565  
   566  		typeInfo := testTypeInfo{42}
   567  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   568  		storage := newTestPersistentStorage(t)
   569  		digesterBuilder := &mockDigesterBuilder{}
   570  
   571  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
   572  		require.NoError(t, err)
   573  
   574  		keyValues := make(map[Value]Value, mapSize)
   575  		for i := 0; i < mapSize; i++ {
   576  			k := Uint64Value(i)
   577  			v := Uint64Value(i)
   578  			keyValues[k] = v
   579  
   580  			digests := []Digest{Digest(i % 10)}
   581  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
   582  
   583  			storable, err := m.Set(compare, hashInputProvider, k, v)
   584  			require.NoError(t, err)
   585  			require.Nil(t, storable)
   586  		}
   587  
   588  		r := newRand(t)
   589  		k := NewStringValue(randStr(r, 1024))
   590  
   591  		digests := []Digest{Digest(0)}
   592  		digesterBuilder.On("Digest", k).Return(mockDigester{digests})
   593  
   594  		storable, err := m.Get(compare, hashInputProvider, k)
   595  		require.Nil(t, storable)
   596  		require.Equal(t, 1, errorCategorizationCount(err))
   597  		var userError *UserError
   598  		var keyNotFoundError *KeyNotFoundError
   599  		require.ErrorAs(t, err, &userError)
   600  		require.ErrorAs(t, err, &keyNotFoundError)
   601  		require.ErrorAs(t, userError, &keyNotFoundError)
   602  
   603  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
   604  	})
   605  }
   606  
   607  func TestMapHas(t *testing.T) {
   608  
   609  	t.Run("no error", func(t *testing.T) {
   610  		const (
   611  			mapSize       = 2048
   612  			keyStringSize = 16
   613  		)
   614  
   615  		r := newRand(t)
   616  
   617  		keys := make(map[Value]bool, mapSize*2)
   618  		keysToInsert := make([]Value, 0, mapSize)
   619  		keysToNotInsert := make([]Value, 0, mapSize)
   620  		for len(keysToInsert) < mapSize || len(keysToNotInsert) < mapSize {
   621  			k := NewStringValue(randStr(r, keyStringSize))
   622  			if !keys[k] {
   623  				keys[k] = true
   624  
   625  				if len(keysToInsert) < mapSize {
   626  					keysToInsert = append(keysToInsert, k)
   627  				} else {
   628  					keysToNotInsert = append(keysToNotInsert, k)
   629  				}
   630  			}
   631  		}
   632  
   633  		typeInfo := testTypeInfo{42}
   634  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   635  		storage := newTestPersistentStorage(t)
   636  
   637  		m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo)
   638  		require.NoError(t, err)
   639  
   640  		for i, k := range keysToInsert {
   641  			existingStorable, err := m.Set(compare, hashInputProvider, k, Uint64Value(i))
   642  			require.NoError(t, err)
   643  			require.Nil(t, existingStorable)
   644  		}
   645  
   646  		for _, k := range keysToInsert {
   647  			exist, err := m.Has(compare, hashInputProvider, k)
   648  			require.NoError(t, err)
   649  			require.True(t, exist)
   650  		}
   651  
   652  		for _, k := range keysToNotInsert {
   653  			exist, err := m.Has(compare, hashInputProvider, k)
   654  			require.NoError(t, err)
   655  			require.False(t, exist)
   656  		}
   657  	})
   658  
   659  	t.Run("error", func(t *testing.T) {
   660  
   661  		typeInfo := testTypeInfo{42}
   662  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   663  		storage := newTestPersistentStorage(t)
   664  
   665  		testErr := errors.New("test")
   666  		digesterBuilder := newErrorDigesterBuilder(testErr)
   667  
   668  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
   669  		require.NoError(t, err)
   670  
   671  		exist, err := m.Has(compare, hashInputProvider, Uint64Value(0))
   672  		// err is testErr wrapped in ExternalError.
   673  		require.Equal(t, 1, errorCategorizationCount(err))
   674  		var externalError *ExternalError
   675  		require.ErrorAs(t, err, &externalError)
   676  		require.Equal(t, testErr, externalError.Unwrap())
   677  		require.False(t, exist)
   678  	})
   679  }
   680  
   681  func testMapRemoveElement(t *testing.T, m *OrderedMap, k Value, expectedV Value) {
   682  
   683  	removedKeyStorable, removedValueStorable, err := m.Remove(compare, hashInputProvider, k)
   684  	require.NoError(t, err)
   685  
   686  	removedKey, err := removedKeyStorable.StoredValue(m.Storage)
   687  	require.NoError(t, err)
   688  	valueEqual(t, typeInfoComparator, k, removedKey)
   689  
   690  	removedValue, err := removedValueStorable.StoredValue(m.Storage)
   691  	require.NoError(t, err)
   692  	valueEqual(t, typeInfoComparator, expectedV, removedValue)
   693  
   694  	if id, ok := removedKeyStorable.(StorageIDStorable); ok {
   695  		err = m.Storage.Remove(StorageID(id))
   696  		require.NoError(t, err)
   697  	}
   698  
   699  	if id, ok := removedValueStorable.(StorageIDStorable); ok {
   700  		err = m.Storage.Remove(StorageID(id))
   701  		require.NoError(t, err)
   702  	}
   703  
   704  	// Remove the same key for the second time.
   705  	removedKeyStorable, removedValueStorable, err = m.Remove(compare, hashInputProvider, k)
   706  	require.Equal(t, 1, errorCategorizationCount(err))
   707  	var userError *UserError
   708  	var keyNotFoundError *KeyNotFoundError
   709  	require.ErrorAs(t, err, &userError)
   710  	require.ErrorAs(t, err, &keyNotFoundError)
   711  	require.ErrorAs(t, userError, &keyNotFoundError)
   712  	require.Nil(t, removedKeyStorable)
   713  	require.Nil(t, removedValueStorable)
   714  }
   715  
   716  func TestMapRemove(t *testing.T) {
   717  
   718  	SetThreshold(512)
   719  	defer SetThreshold(1024)
   720  
   721  	const (
   722  		mapSize              = 2048
   723  		smallKeyStringSize   = 16
   724  		smallValueStringSize = 16
   725  		largeKeyStringSize   = 512
   726  		largeValueStringSize = 512
   727  	)
   728  
   729  	r := newRand(t)
   730  
   731  	smallKeyValues := make(map[Value]Value, mapSize)
   732  	for len(smallKeyValues) < mapSize {
   733  		k := NewStringValue(randStr(r, smallKeyStringSize))
   734  		v := NewStringValue(randStr(r, smallValueStringSize))
   735  		smallKeyValues[k] = v
   736  	}
   737  
   738  	largeKeyValues := make(map[Value]Value, mapSize)
   739  	for len(largeKeyValues) < mapSize {
   740  		k := NewStringValue(randStr(r, largeKeyStringSize))
   741  		v := NewStringValue(randStr(r, largeValueStringSize))
   742  		largeKeyValues[k] = v
   743  	}
   744  
   745  	testCases := []struct {
   746  		name      string
   747  		keyValues map[Value]Value
   748  	}{
   749  		{name: "small key and value", keyValues: smallKeyValues},
   750  		{name: "large key and value", keyValues: largeKeyValues},
   751  	}
   752  
   753  	for _, tc := range testCases {
   754  		t.Run(tc.name, func(t *testing.T) {
   755  			typeInfo := testTypeInfo{42}
   756  			address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   757  			storage := newTestPersistentStorage(t)
   758  
   759  			m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo)
   760  			require.NoError(t, err)
   761  
   762  			// Insert elements
   763  			for k, v := range tc.keyValues {
   764  				existingStorable, err := m.Set(compare, hashInputProvider, k, v)
   765  				require.NoError(t, err)
   766  				require.Nil(t, existingStorable)
   767  			}
   768  
   769  			verifyMap(t, storage, typeInfo, address, m, tc.keyValues, nil, false)
   770  
   771  			count := len(tc.keyValues)
   772  
   773  			// Remove all elements
   774  			for k, v := range tc.keyValues {
   775  
   776  				testMapRemoveElement(t, m, k, v)
   777  
   778  				count--
   779  
   780  				require.True(t, typeInfoComparator(typeInfo, m.Type()))
   781  				require.Equal(t, address, m.Address())
   782  				require.Equal(t, uint64(count), m.Count())
   783  			}
   784  
   785  			verifyEmptyMap(t, storage, typeInfo, address, m)
   786  		})
   787  	}
   788  
   789  	t.Run("collision", func(t *testing.T) {
   790  		// Test:
   791  		// - data slab refers to an external slab containing elements with hash collision
   792  		// - last collision element is inlined after all other collision elements are removed
   793  		// - data slab overflows with inlined colllision element
   794  		// - data slab splits
   795  
   796  		SetThreshold(512)
   797  		defer SetThreshold(1024)
   798  
   799  		const (
   800  			numOfElementsBeforeCollision = 54
   801  			numOfElementsWithCollision   = 10
   802  			numOfElementsAfterCollision  = 1
   803  		)
   804  
   805  		digesterBuilder := &mockDigesterBuilder{}
   806  		typeInfo := testTypeInfo{42}
   807  		storage := newTestPersistentStorage(t)
   808  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   809  		r := newRand(t)
   810  
   811  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
   812  		require.NoError(t, err)
   813  
   814  		nextDigest := Digest(0)
   815  
   816  		nonCollisionKeyValues := make(map[Value]Value)
   817  		for i := 0; i < numOfElementsBeforeCollision; i++ {
   818  			k := Uint64Value(i)
   819  			v := Uint64Value(i)
   820  			nonCollisionKeyValues[k] = v
   821  
   822  			digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{nextDigest}})
   823  			nextDigest++
   824  
   825  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
   826  			require.NoError(t, err)
   827  			require.Nil(t, existingStorable)
   828  		}
   829  
   830  		collisionKeyValues := make(map[Value]Value)
   831  		for len(collisionKeyValues) < numOfElementsWithCollision {
   832  			k := NewStringValue(randStr(r, int(MaxInlineMapKeyOrValueSize)-2))
   833  			v := NewStringValue(randStr(r, int(MaxInlineMapKeyOrValueSize)-2))
   834  			collisionKeyValues[k] = v
   835  
   836  			digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{nextDigest}})
   837  		}
   838  
   839  		for k, v := range collisionKeyValues {
   840  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
   841  			require.NoError(t, err)
   842  			require.Nil(t, existingStorable)
   843  		}
   844  
   845  		nextDigest++
   846  		k := Uint64Value(nextDigest)
   847  		v := Uint64Value(nextDigest)
   848  		nonCollisionKeyValues[k] = v
   849  
   850  		digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{nextDigest}})
   851  
   852  		existingStorable, err := m.Set(compare, hashInputProvider, k, v)
   853  		require.NoError(t, err)
   854  		require.Nil(t, existingStorable)
   855  
   856  		count := len(nonCollisionKeyValues) + len(collisionKeyValues)
   857  
   858  		// Remove all collision elements
   859  		for k, v := range collisionKeyValues {
   860  
   861  			testMapRemoveElement(t, m, k, v)
   862  
   863  			count--
   864  
   865  			require.True(t, typeInfoComparator(typeInfo, m.Type()))
   866  			require.Equal(t, address, m.Address())
   867  			require.Equal(t, uint64(count), m.Count())
   868  		}
   869  
   870  		verifyMap(t, storage, typeInfo, address, m, nonCollisionKeyValues, nil, false)
   871  
   872  		// Remove remaining elements
   873  		for k, v := range nonCollisionKeyValues {
   874  
   875  			testMapRemoveElement(t, m, k, v)
   876  
   877  			count--
   878  
   879  			require.True(t, typeInfoComparator(typeInfo, m.Type()))
   880  			require.Equal(t, address, m.Address())
   881  			require.Equal(t, uint64(count), m.Count())
   882  		}
   883  
   884  		verifyEmptyMap(t, storage, typeInfo, address, m)
   885  	})
   886  
   887  	t.Run("collision with data root", func(t *testing.T) {
   888  		// Test:
   889  		// - data slab refers to an external slab containing elements with hash collision
   890  		// - last collision element is inlined after all other collision elements are removed
   891  		// - data slab overflows with inlined colllision element
   892  		// - data slab splits
   893  
   894  		SetThreshold(512)
   895  		defer SetThreshold(1024)
   896  
   897  		const (
   898  			numOfElementsWithCollision    = 10
   899  			numOfElementsWithoutCollision = 35
   900  		)
   901  
   902  		digesterBuilder := &mockDigesterBuilder{}
   903  		typeInfo := testTypeInfo{42}
   904  		storage := newTestPersistentStorage(t)
   905  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   906  		r := newRand(t)
   907  
   908  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
   909  		require.NoError(t, err)
   910  
   911  		collisionKeyValues := make(map[Value]Value)
   912  		for len(collisionKeyValues) < numOfElementsWithCollision {
   913  			k := NewStringValue(randStr(r, int(MaxInlineMapKeyOrValueSize)-2))
   914  			v := NewStringValue(randStr(r, int(MaxInlineMapKeyOrValueSize)-2))
   915  			collisionKeyValues[k] = v
   916  
   917  			digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{0}})
   918  		}
   919  
   920  		for k, v := range collisionKeyValues {
   921  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
   922  			require.NoError(t, err)
   923  			require.Nil(t, existingStorable)
   924  		}
   925  
   926  		nonCollisionKeyValues := make(map[Value]Value)
   927  		for i := 0; i < numOfElementsWithoutCollision; i++ {
   928  			k := Uint64Value(i)
   929  			v := Uint64Value(i)
   930  			nonCollisionKeyValues[k] = v
   931  
   932  			digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{Digest(i) + 1}})
   933  
   934  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
   935  			require.NoError(t, err)
   936  			require.Nil(t, existingStorable)
   937  		}
   938  
   939  		count := len(nonCollisionKeyValues) + len(collisionKeyValues)
   940  
   941  		// Remove all collision elements
   942  		for k, v := range collisionKeyValues {
   943  
   944  			testMapRemoveElement(t, m, k, v)
   945  
   946  			count--
   947  
   948  			require.True(t, typeInfoComparator(typeInfo, m.Type()))
   949  			require.Equal(t, address, m.Address())
   950  			require.Equal(t, uint64(count), m.Count())
   951  		}
   952  
   953  		verifyMap(t, storage, typeInfo, address, m, nonCollisionKeyValues, nil, false)
   954  
   955  		// Remove remaining elements
   956  		for k, v := range nonCollisionKeyValues {
   957  
   958  			testMapRemoveElement(t, m, k, v)
   959  
   960  			count--
   961  
   962  			require.True(t, typeInfoComparator(typeInfo, m.Type()))
   963  			require.Equal(t, address, m.Address())
   964  			require.Equal(t, uint64(count), m.Count())
   965  		}
   966  
   967  		verifyEmptyMap(t, storage, typeInfo, address, m)
   968  	})
   969  
   970  	t.Run("no collision key not found", func(t *testing.T) {
   971  		const mapSize = 1024
   972  
   973  		typeInfo := testTypeInfo{42}
   974  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
   975  		storage := newTestPersistentStorage(t)
   976  
   977  		m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo)
   978  		require.NoError(t, err)
   979  
   980  		keyValues := make(map[Value]Value, mapSize)
   981  		for i := 0; i < mapSize; i++ {
   982  			k := Uint64Value(i)
   983  			v := Uint64Value(i)
   984  			keyValues[k] = v
   985  			storable, err := m.Set(compare, hashInputProvider, k, v)
   986  			require.NoError(t, err)
   987  			require.Nil(t, storable)
   988  		}
   989  
   990  		r := newRand(t)
   991  
   992  		k := NewStringValue(randStr(r, 1024))
   993  		existingKeyStorable, existingValueStorable, err := m.Remove(compare, hashInputProvider, k)
   994  		require.Nil(t, existingKeyStorable)
   995  		require.Nil(t, existingValueStorable)
   996  		require.Equal(t, 1, errorCategorizationCount(err))
   997  		var userError *UserError
   998  		var keyNotFoundError *KeyNotFoundError
   999  		require.ErrorAs(t, err, &userError)
  1000  		require.ErrorAs(t, err, &keyNotFoundError)
  1001  		require.ErrorAs(t, userError, &keyNotFoundError)
  1002  
  1003  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  1004  	})
  1005  
  1006  	t.Run("collision key not found", func(t *testing.T) {
  1007  		const mapSize = 256
  1008  
  1009  		typeInfo := testTypeInfo{42}
  1010  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  1011  		storage := newTestPersistentStorage(t)
  1012  		digesterBuilder := &mockDigesterBuilder{}
  1013  
  1014  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  1015  		require.NoError(t, err)
  1016  
  1017  		keyValues := make(map[Value]Value, mapSize)
  1018  		for i := 0; i < mapSize; i++ {
  1019  			k := Uint64Value(i)
  1020  			v := Uint64Value(i)
  1021  			keyValues[k] = v
  1022  
  1023  			digests := []Digest{Digest(i % 10)}
  1024  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  1025  
  1026  			storable, err := m.Set(compare, hashInputProvider, k, v)
  1027  			require.NoError(t, err)
  1028  			require.Nil(t, storable)
  1029  		}
  1030  
  1031  		r := newRand(t)
  1032  		k := NewStringValue(randStr(r, 1024))
  1033  
  1034  		digests := []Digest{Digest(0)}
  1035  		digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  1036  
  1037  		existingKeyStorable, existingValueStorable, err := m.Remove(compare, hashInputProvider, k)
  1038  		require.Nil(t, existingKeyStorable)
  1039  		require.Nil(t, existingValueStorable)
  1040  		require.Equal(t, 1, errorCategorizationCount(err))
  1041  		var userError *UserError
  1042  		var keyNotFoundError *KeyNotFoundError
  1043  		require.ErrorAs(t, err, &userError)
  1044  		require.ErrorAs(t, err, &keyNotFoundError)
  1045  		require.ErrorAs(t, userError, &keyNotFoundError)
  1046  
  1047  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  1048  	})
  1049  }
  1050  
  1051  func TestMapIterate(t *testing.T) {
  1052  
  1053  	t.Run("no collision", func(t *testing.T) {
  1054  		const (
  1055  			mapSize       = 2048
  1056  			keyStringSize = 16
  1057  		)
  1058  
  1059  		r := newRand(t)
  1060  
  1061  		keyValues := make(map[Value]Value, mapSize)
  1062  		sortedKeys := make([]Value, mapSize)
  1063  		i := uint64(0)
  1064  		for len(keyValues) < mapSize {
  1065  			k := NewStringValue(randStr(r, keyStringSize))
  1066  			if _, found := keyValues[k]; !found {
  1067  				keyValues[k] = Uint64Value(i)
  1068  				sortedKeys[i] = k
  1069  				i++
  1070  			}
  1071  		}
  1072  
  1073  		typeInfo := testTypeInfo{42}
  1074  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  1075  		storage := newTestPersistentStorage(t)
  1076  		digesterBuilder := newBasicDigesterBuilder()
  1077  
  1078  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  1079  		require.NoError(t, err)
  1080  
  1081  		for k, v := range keyValues {
  1082  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  1083  			require.NoError(t, err)
  1084  			require.Nil(t, existingStorable)
  1085  		}
  1086  
  1087  		// Sort keys by digest
  1088  		sort.Stable(keysByDigest{sortedKeys, digesterBuilder})
  1089  
  1090  		// Iterate key value pairs
  1091  		i = uint64(0)
  1092  		err = m.Iterate(func(k Value, v Value) (resume bool, err error) {
  1093  			valueEqual(t, typeInfoComparator, sortedKeys[i], k)
  1094  			valueEqual(t, typeInfoComparator, keyValues[k], v)
  1095  			i++
  1096  			return true, nil
  1097  		})
  1098  
  1099  		require.NoError(t, err)
  1100  		require.Equal(t, uint64(mapSize), i)
  1101  
  1102  		// Iterate keys
  1103  		i = uint64(0)
  1104  		err = m.IterateKeys(func(k Value) (resume bool, err error) {
  1105  			valueEqual(t, typeInfoComparator, sortedKeys[i], k)
  1106  			i++
  1107  			return true, nil
  1108  		})
  1109  
  1110  		require.NoError(t, err)
  1111  		require.Equal(t, uint64(mapSize), i)
  1112  
  1113  		// Iterate values
  1114  		i = uint64(0)
  1115  		err = m.IterateValues(func(v Value) (resume bool, err error) {
  1116  			k := sortedKeys[i]
  1117  			valueEqual(t, typeInfoComparator, keyValues[k], v)
  1118  			i++
  1119  			return true, nil
  1120  		})
  1121  
  1122  		require.NoError(t, err)
  1123  		require.Equal(t, uint64(mapSize), i)
  1124  
  1125  		verifyMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false)
  1126  	})
  1127  
  1128  	t.Run("collision", func(t *testing.T) {
  1129  		const (
  1130  			mapSize         = 1024
  1131  			keyStringSize   = 16
  1132  			valueStringSize = 16
  1133  		)
  1134  
  1135  		r := newRand(t)
  1136  
  1137  		digesterBuilder := &mockDigesterBuilder{}
  1138  		typeInfo := testTypeInfo{42}
  1139  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  1140  		storage := newTestPersistentStorage(t)
  1141  
  1142  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  1143  		require.NoError(t, err)
  1144  
  1145  		keyValues := make(map[Value]Value, mapSize)
  1146  		sortedKeys := make([]Value, 0, mapSize)
  1147  		for len(keyValues) < mapSize {
  1148  			k := NewStringValue(randStr(r, keyStringSize))
  1149  
  1150  			if _, found := keyValues[k]; !found {
  1151  				v := NewStringValue(randStr(r, valueStringSize))
  1152  				sortedKeys = append(sortedKeys, k)
  1153  				keyValues[k] = v
  1154  
  1155  				digests := []Digest{
  1156  					Digest(r.Intn(256)),
  1157  					Digest(r.Intn(256)),
  1158  					Digest(r.Intn(256)),
  1159  					Digest(r.Intn(256)),
  1160  				}
  1161  				digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  1162  
  1163  				existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  1164  				require.NoError(t, err)
  1165  				require.Nil(t, existingStorable)
  1166  			}
  1167  		}
  1168  
  1169  		t.Log("created map of unique key value pairs")
  1170  
  1171  		// Sort keys by digest
  1172  		sort.Stable(keysByDigest{sortedKeys, digesterBuilder})
  1173  
  1174  		t.Log("sorted keys by digests")
  1175  
  1176  		// Iterate key value pairs
  1177  		i := uint64(0)
  1178  		err = m.Iterate(func(k Value, v Value) (resume bool, err error) {
  1179  			valueEqual(t, typeInfoComparator, sortedKeys[i], k)
  1180  			valueEqual(t, typeInfoComparator, keyValues[k], v)
  1181  			i++
  1182  			return true, nil
  1183  		})
  1184  		require.NoError(t, err)
  1185  		require.Equal(t, i, uint64(mapSize))
  1186  
  1187  		t.Log("iterated key value pairs")
  1188  
  1189  		// Iterate keys
  1190  		i = uint64(0)
  1191  		err = m.IterateKeys(func(k Value) (resume bool, err error) {
  1192  			valueEqual(t, typeInfoComparator, sortedKeys[i], k)
  1193  			i++
  1194  			return true, nil
  1195  		})
  1196  		require.NoError(t, err)
  1197  		require.Equal(t, i, uint64(mapSize))
  1198  
  1199  		t.Log("iterated keys")
  1200  
  1201  		// Iterate values
  1202  		i = uint64(0)
  1203  		err = m.IterateValues(func(v Value) (resume bool, err error) {
  1204  			valueEqual(t, typeInfoComparator, keyValues[sortedKeys[i]], v)
  1205  			i++
  1206  			return true, nil
  1207  		})
  1208  		require.NoError(t, err)
  1209  		require.Equal(t, i, uint64(mapSize))
  1210  
  1211  		t.Log("iterated values")
  1212  
  1213  		verifyMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false)
  1214  	})
  1215  }
  1216  
  1217  func testMapDeterministicHashCollision(t *testing.T, r *rand.Rand, maxDigestLevel int) {
  1218  
  1219  	const (
  1220  		mapSize            = 1024
  1221  		keyStringMaxSize   = 1024
  1222  		valueStringMaxSize = 1024
  1223  
  1224  		// mockDigestCount is the number of unique set of digests.
  1225  		// Each set has maxDigestLevel of digest.
  1226  		mockDigestCount = 8
  1227  	)
  1228  
  1229  	uniqueFirstLevelDigests := make(map[Digest]bool, mockDigestCount)
  1230  	firstLevelDigests := make([]Digest, 0, mockDigestCount)
  1231  	for len(firstLevelDigests) < mockDigestCount {
  1232  		d := Digest(uint64(r.Intn(256)))
  1233  		if !uniqueFirstLevelDigests[d] {
  1234  			uniqueFirstLevelDigests[d] = true
  1235  			firstLevelDigests = append(firstLevelDigests, d)
  1236  		}
  1237  	}
  1238  
  1239  	digestsGroup := make([][]Digest, mockDigestCount)
  1240  	for i := 0; i < mockDigestCount; i++ {
  1241  		digests := make([]Digest, maxDigestLevel)
  1242  		digests[0] = firstLevelDigests[i]
  1243  		for j := 1; j < maxDigestLevel; j++ {
  1244  			digests[j] = Digest(uint64(r.Intn(256)))
  1245  		}
  1246  		digestsGroup[i] = digests
  1247  	}
  1248  
  1249  	digesterBuilder := &mockDigesterBuilder{}
  1250  
  1251  	keyValues := make(map[Value]Value, mapSize)
  1252  	i := 0
  1253  	for len(keyValues) < mapSize {
  1254  		k := NewStringValue(randStr(r, r.Intn(keyStringMaxSize)))
  1255  		if _, found := keyValues[k]; !found {
  1256  			keyValues[k] = NewStringValue(randStr(r, r.Intn(valueStringMaxSize)))
  1257  
  1258  			index := i % len(digestsGroup)
  1259  			digests := digestsGroup[index]
  1260  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  1261  
  1262  			i++
  1263  		}
  1264  	}
  1265  
  1266  	typeInfo := testTypeInfo{42}
  1267  	address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  1268  	storage := newTestPersistentStorage(t)
  1269  
  1270  	m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  1271  	require.NoError(t, err)
  1272  
  1273  	for k, v := range keyValues {
  1274  		existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  1275  		require.NoError(t, err)
  1276  		require.Nil(t, existingStorable)
  1277  	}
  1278  
  1279  	verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  1280  
  1281  	stats, err := GetMapStats(m)
  1282  	require.NoError(t, err)
  1283  	require.Equal(t, uint64(mockDigestCount), stats.CollisionDataSlabCount)
  1284  
  1285  	// Remove all elements
  1286  	for k, v := range keyValues {
  1287  		removedKeyStorable, removedValueStorable, err := m.Remove(compare, hashInputProvider, k)
  1288  		require.NoError(t, err)
  1289  
  1290  		removedKey, err := removedKeyStorable.StoredValue(storage)
  1291  		require.NoError(t, err)
  1292  		valueEqual(t, typeInfoComparator, k, removedKey)
  1293  
  1294  		removedValue, err := removedValueStorable.StoredValue(storage)
  1295  		require.NoError(t, err)
  1296  		valueEqual(t, typeInfoComparator, v, removedValue)
  1297  
  1298  		if id, ok := removedKeyStorable.(StorageIDStorable); ok {
  1299  			err = storage.Remove(StorageID(id))
  1300  			require.NoError(t, err)
  1301  		}
  1302  
  1303  		if id, ok := removedValueStorable.(StorageIDStorable); ok {
  1304  			err = storage.Remove(StorageID(id))
  1305  			require.NoError(t, err)
  1306  		}
  1307  	}
  1308  
  1309  	verifyEmptyMap(t, storage, typeInfo, address, m)
  1310  }
  1311  
  1312  func testMapRandomHashCollision(t *testing.T, r *rand.Rand, maxDigestLevel int) {
  1313  
  1314  	const (
  1315  		mapSize            = 1024
  1316  		keyStringMaxSize   = 1024
  1317  		valueStringMaxSize = 1024
  1318  	)
  1319  
  1320  	digesterBuilder := &mockDigesterBuilder{}
  1321  
  1322  	keyValues := make(map[Value]Value, mapSize)
  1323  	for len(keyValues) < mapSize {
  1324  		k := NewStringValue(randStr(r, r.Intn(keyStringMaxSize)))
  1325  
  1326  		if _, found := keyValues[k]; !found {
  1327  			keyValues[k] = NewStringValue(randStr(r, valueStringMaxSize))
  1328  
  1329  			var digests []Digest
  1330  			for i := 0; i < maxDigestLevel; i++ {
  1331  				digests = append(digests, Digest(r.Intn(256)))
  1332  			}
  1333  
  1334  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  1335  		}
  1336  	}
  1337  
  1338  	typeInfo := testTypeInfo{42}
  1339  	address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  1340  	storage := newTestPersistentStorage(t)
  1341  
  1342  	m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  1343  	require.NoError(t, err)
  1344  
  1345  	for k, v := range keyValues {
  1346  		existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  1347  		require.NoError(t, err)
  1348  		require.Nil(t, existingStorable)
  1349  	}
  1350  
  1351  	verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  1352  
  1353  	// Remove all elements
  1354  	for k, v := range keyValues {
  1355  		removedKeyStorable, removedValueStorable, err := m.Remove(compare, hashInputProvider, k)
  1356  		require.NoError(t, err)
  1357  
  1358  		removedKey, err := removedKeyStorable.StoredValue(storage)
  1359  		require.NoError(t, err)
  1360  		valueEqual(t, typeInfoComparator, k, removedKey)
  1361  
  1362  		removedValue, err := removedValueStorable.StoredValue(storage)
  1363  		require.NoError(t, err)
  1364  		valueEqual(t, typeInfoComparator, v, removedValue)
  1365  
  1366  		if id, ok := removedKeyStorable.(StorageIDStorable); ok {
  1367  			err = storage.Remove(StorageID(id))
  1368  			require.NoError(t, err)
  1369  		}
  1370  
  1371  		if id, ok := removedValueStorable.(StorageIDStorable); ok {
  1372  			err = storage.Remove(StorageID(id))
  1373  			require.NoError(t, err)
  1374  		}
  1375  	}
  1376  
  1377  	verifyEmptyMap(t, storage, typeInfo, address, m)
  1378  }
  1379  
  1380  func TestMapHashCollision(t *testing.T) {
  1381  
  1382  	SetThreshold(512)
  1383  	defer SetThreshold(1024)
  1384  
  1385  	const maxDigestLevel = 4
  1386  
  1387  	r := newRand(t)
  1388  
  1389  	for hashLevel := 1; hashLevel <= maxDigestLevel; hashLevel++ {
  1390  		name := fmt.Sprintf("deterministic max hash level %d", hashLevel)
  1391  		t.Run(name, func(t *testing.T) {
  1392  			testMapDeterministicHashCollision(t, r, hashLevel)
  1393  		})
  1394  	}
  1395  
  1396  	for hashLevel := 1; hashLevel <= maxDigestLevel; hashLevel++ {
  1397  		name := fmt.Sprintf("random max hash level %d", hashLevel)
  1398  		t.Run(name, func(t *testing.T) {
  1399  			testMapRandomHashCollision(t, r, hashLevel)
  1400  		})
  1401  	}
  1402  }
  1403  
  1404  func testMapSetRemoveRandomValues(
  1405  	t *testing.T,
  1406  	r *rand.Rand,
  1407  	storage *PersistentSlabStorage,
  1408  	typeInfo TypeInfo,
  1409  	address Address,
  1410  ) (*OrderedMap, map[Value]Value) {
  1411  
  1412  	const (
  1413  		MapSetOp = iota
  1414  		MapRemoveOp
  1415  		MapMaxOp
  1416  	)
  1417  
  1418  	const (
  1419  		opCount         = 4096
  1420  		digestMaxValue  = 256
  1421  		digestMaxLevels = 4
  1422  	)
  1423  
  1424  	digesterBuilder := &mockDigesterBuilder{}
  1425  
  1426  	m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  1427  	require.NoError(t, err)
  1428  
  1429  	keyValues := make(map[Value]Value)
  1430  	var keys []Value
  1431  	for i := uint64(0); i < opCount; i++ {
  1432  
  1433  		nextOp := r.Intn(MapMaxOp)
  1434  
  1435  		if m.Count() == 0 {
  1436  			nextOp = MapSetOp
  1437  		}
  1438  
  1439  		switch nextOp {
  1440  
  1441  		case MapSetOp:
  1442  
  1443  			k := randomValue(r, int(maxInlineMapElementSize))
  1444  			v := randomValue(r, int(maxInlineMapElementSize))
  1445  
  1446  			var digests []Digest
  1447  			for i := 0; i < digestMaxLevels; i++ {
  1448  				digests = append(digests, Digest(r.Intn(digestMaxValue)))
  1449  			}
  1450  
  1451  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  1452  
  1453  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  1454  			require.NoError(t, err)
  1455  
  1456  			if oldv, ok := keyValues[k]; ok {
  1457  				require.NotNil(t, existingStorable)
  1458  
  1459  				existingValue, err := existingStorable.StoredValue(storage)
  1460  				require.NoError(t, err)
  1461  				valueEqual(t, typeInfoComparator, oldv, existingValue)
  1462  
  1463  				if id, ok := existingStorable.(StorageIDStorable); ok {
  1464  					err = storage.Remove(StorageID(id))
  1465  					require.NoError(t, err)
  1466  				}
  1467  			} else {
  1468  				require.Nil(t, existingStorable)
  1469  
  1470  				keys = append(keys, k)
  1471  			}
  1472  
  1473  			keyValues[k] = v
  1474  
  1475  		case MapRemoveOp:
  1476  			index := r.Intn(len(keys))
  1477  			k := keys[index]
  1478  
  1479  			removedKeyStorable, removedValueStorable, err := m.Remove(compare, hashInputProvider, k)
  1480  			require.NoError(t, err)
  1481  
  1482  			removedKey, err := removedKeyStorable.StoredValue(storage)
  1483  			require.NoError(t, err)
  1484  			valueEqual(t, typeInfoComparator, k, removedKey)
  1485  
  1486  			removedValue, err := removedValueStorable.StoredValue(storage)
  1487  			require.NoError(t, err)
  1488  			valueEqual(t, typeInfoComparator, keyValues[k], removedValue)
  1489  
  1490  			if id, ok := removedKeyStorable.(StorageIDStorable); ok {
  1491  				err := storage.Remove(StorageID(id))
  1492  				require.NoError(t, err)
  1493  			}
  1494  
  1495  			if id, ok := removedValueStorable.(StorageIDStorable); ok {
  1496  				err := storage.Remove(StorageID(id))
  1497  				require.NoError(t, err)
  1498  			}
  1499  
  1500  			delete(keyValues, k)
  1501  			copy(keys[index:], keys[index+1:])
  1502  			keys = keys[:len(keys)-1]
  1503  		}
  1504  
  1505  		require.True(t, typeInfoComparator(typeInfo, m.Type()))
  1506  		require.Equal(t, address, m.Address())
  1507  		require.Equal(t, uint64(len(keys)), m.Count())
  1508  	}
  1509  
  1510  	return m, keyValues
  1511  }
  1512  
  1513  func TestMapSetRemoveRandomValues(t *testing.T) {
  1514  
  1515  	SetThreshold(256)
  1516  	defer SetThreshold(1024)
  1517  
  1518  	r := newRand(t)
  1519  
  1520  	storage := newTestPersistentStorage(t)
  1521  	typeInfo := testTypeInfo{42}
  1522  	address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  1523  
  1524  	m, keyValues := testMapSetRemoveRandomValues(t, r, storage, typeInfo, address)
  1525  
  1526  	verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  1527  }
  1528  
  1529  func TestMapEncodeDecode(t *testing.T) {
  1530  
  1531  	typeInfo := testTypeInfo{42}
  1532  	address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  1533  
  1534  	t.Run("empty", func(t *testing.T) {
  1535  
  1536  		storage := newTestBasicStorage(t)
  1537  
  1538  		// Create map
  1539  		m, err := NewMap(storage, address, NewDefaultDigesterBuilder(), typeInfo)
  1540  		require.NoError(t, err)
  1541  		require.Equal(t, uint64(0), m.Count())
  1542  
  1543  		id1 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 1}}
  1544  
  1545  		expected := map[StorageID][]byte{
  1546  			id1: {
  1547  				// extra data
  1548  				// version
  1549  				0x00,
  1550  				// flag: root + map data
  1551  				0x88,
  1552  				// extra data (CBOR encoded array of 3 elements)
  1553  				0x83,
  1554  				// type info
  1555  				0x18, 0x2a,
  1556  				// count: 0
  1557  				0x00,
  1558  				// seed
  1559  				0x1b, 0x52, 0xa8, 0x78, 0x3, 0x85, 0x2c, 0xaa, 0x49,
  1560  
  1561  				// version
  1562  				0x00,
  1563  				// flag: root + map data
  1564  				0x88,
  1565  
  1566  				// the following encoded data is valid CBOR
  1567  
  1568  				// elements (array of 3 elements)
  1569  				0x83,
  1570  
  1571  				// level: 0
  1572  				0x00,
  1573  
  1574  				// hkeys (byte string of length 8 * 1)
  1575  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1576  
  1577  				// elements (array of 0 elements)
  1578  				// each element is encoded as CBOR array of 2 elements (key, value)
  1579  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1580  			},
  1581  		}
  1582  
  1583  		// Verify encoded data
  1584  		stored, err := storage.Encode()
  1585  		require.NoError(t, err)
  1586  		require.Equal(t, 1, len(stored))
  1587  		require.Equal(t, expected[id1], stored[id1])
  1588  
  1589  		// Decode data to new storage
  1590  		storage2 := newTestPersistentStorageWithData(t, stored)
  1591  
  1592  		// Test new map from storage2
  1593  		decodedMap, err := NewMapWithRootID(storage2, id1, NewDefaultDigesterBuilder())
  1594  		require.NoError(t, err)
  1595  
  1596  		verifyEmptyMap(t, storage2, typeInfo, address, decodedMap)
  1597  	})
  1598  
  1599  	t.Run("dataslab as root", func(t *testing.T) {
  1600  		SetThreshold(256)
  1601  		defer SetThreshold(1024)
  1602  
  1603  		// Create and populate map in memory
  1604  		storage := newTestBasicStorage(t)
  1605  
  1606  		digesterBuilder := &mockDigesterBuilder{}
  1607  
  1608  		// Create map
  1609  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  1610  		require.NoError(t, err)
  1611  
  1612  		const mapSize = 1
  1613  		keyValues := make(map[Value]Value, mapSize)
  1614  		for i := uint64(0); i < mapSize; i++ {
  1615  			k := Uint64Value(i)
  1616  			v := Uint64Value(i * 2)
  1617  			keyValues[k] = v
  1618  
  1619  			digests := []Digest{Digest(i), Digest(i * 2)}
  1620  			digesterBuilder.On("Digest", k).Return(mockDigester{d: digests})
  1621  
  1622  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  1623  			require.NoError(t, err)
  1624  			require.Nil(t, existingStorable)
  1625  		}
  1626  
  1627  		require.Equal(t, uint64(mapSize), m.Count())
  1628  
  1629  		id1 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 1}}
  1630  
  1631  		// Expected serialized slab data with storage id
  1632  		expected := map[StorageID][]byte{
  1633  
  1634  			id1: {
  1635  				// extra data
  1636  				// version
  1637  				0x00,
  1638  				// flag: root + map data
  1639  				0x88,
  1640  				// extra data (CBOR encoded array of 3 elements)
  1641  				0x83,
  1642  				// type info
  1643  				0x18, 0x2a,
  1644  				// count: 1
  1645  				0x01,
  1646  				// seed
  1647  				0x1b, 0x52, 0xa8, 0x78, 0x3, 0x85, 0x2c, 0xaa, 0x49,
  1648  
  1649  				// version
  1650  				0x00,
  1651  				// flag: root + map data
  1652  				0x88,
  1653  
  1654  				// the following encoded data is valid CBOR
  1655  
  1656  				// elements (array of 3 elements)
  1657  				0x83,
  1658  
  1659  				// level: 0
  1660  				0x00,
  1661  
  1662  				// hkeys (byte string of length 8 * 1)
  1663  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08,
  1664  				// hkey: 0
  1665  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1666  
  1667  				// elements (array of 1 elements)
  1668  				// each element is encoded as CBOR array of 2 elements (key, value)
  1669  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  1670  				// element: [uint64(0):uint64(0)]
  1671  				0x82, 0xd8, 0xa4, 0x00, 0xd8, 0xa4, 0x00,
  1672  			},
  1673  		}
  1674  
  1675  		// Verify encoded data
  1676  		stored, err := storage.Encode()
  1677  		require.NoError(t, err)
  1678  
  1679  		require.Equal(t, len(expected), len(stored))
  1680  		require.Equal(t, expected[id1], stored[id1])
  1681  
  1682  		// Decode data to new storage
  1683  		storage2 := newTestPersistentStorageWithData(t, stored)
  1684  
  1685  		// Test new map from storage2
  1686  		decodedMap, err := NewMapWithRootID(storage2, id1, digesterBuilder)
  1687  		require.NoError(t, err)
  1688  
  1689  		verifyMap(t, storage2, typeInfo, address, decodedMap, keyValues, nil, false)
  1690  	})
  1691  
  1692  	t.Run("has pointer no collision", func(t *testing.T) {
  1693  
  1694  		SetThreshold(256)
  1695  		defer SetThreshold(1024)
  1696  
  1697  		// Create and populate map in memory
  1698  		storage := newTestBasicStorage(t)
  1699  
  1700  		digesterBuilder := &mockDigesterBuilder{}
  1701  
  1702  		// Create map
  1703  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  1704  		require.NoError(t, err)
  1705  
  1706  		const mapSize = 8
  1707  		keyValues := make(map[Value]Value, mapSize)
  1708  		r := 'a'
  1709  		for i := uint64(0); i < mapSize-1; i++ {
  1710  			k := NewStringValue(strings.Repeat(string(r), 22))
  1711  			v := NewStringValue(strings.Repeat(string(r), 22))
  1712  			keyValues[k] = v
  1713  
  1714  			digests := []Digest{Digest(i), Digest(i * 2)}
  1715  			digesterBuilder.On("Digest", k).Return(mockDigester{d: digests})
  1716  
  1717  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  1718  			require.NoError(t, err)
  1719  			require.Nil(t, existingStorable)
  1720  
  1721  			ri := int(r)
  1722  			r = rune(ri + 1)
  1723  		}
  1724  
  1725  		// Create nested array
  1726  		typeInfo2 := testTypeInfo{43}
  1727  
  1728  		nested, err := NewArray(storage, address, typeInfo2)
  1729  		require.NoError(t, err)
  1730  
  1731  		err = nested.Append(Uint64Value(0))
  1732  		require.NoError(t, err)
  1733  
  1734  		k := NewStringValue(strings.Repeat(string(r), 22))
  1735  		v := nested
  1736  		keyValues[k] = v
  1737  
  1738  		digests := []Digest{Digest(mapSize - 1), Digest((mapSize - 1) * 2)}
  1739  		digesterBuilder.On("Digest", k).Return(mockDigester{d: digests})
  1740  
  1741  		// Insert nested array
  1742  		existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  1743  		require.NoError(t, err)
  1744  		require.Nil(t, existingStorable)
  1745  
  1746  		require.Equal(t, uint64(mapSize), m.Count())
  1747  
  1748  		id1 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 1}}
  1749  		id2 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 2}}
  1750  		id3 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 3}}
  1751  		id4 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 4}}
  1752  
  1753  		// Expected serialized slab data with storage id
  1754  		expected := map[StorageID][]byte{
  1755  
  1756  			// metadata slab
  1757  			id1: {
  1758  				// extra data
  1759  				// version
  1760  				0x00,
  1761  				// flag: root + map meta
  1762  				0x89,
  1763  				// extra data (CBOR encoded array of 3 elements)
  1764  				0x83,
  1765  				// type info: "map"
  1766  				0x18, 0x2A,
  1767  				// count: 8
  1768  				0x08,
  1769  				// seed
  1770  				0x1b, 0x52, 0xa8, 0x78, 0x3, 0x85, 0x2c, 0xaa, 0x49,
  1771  
  1772  				// version
  1773  				0x00,
  1774  				// flag: root + meta
  1775  				0x89,
  1776  				// child header count
  1777  				0x00, 0x02,
  1778  				// child header 1 (storage id, first key, size)
  1779  				0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  1780  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1781  				0x00, 0x00, 0x01, 0x02,
  1782  				// child header 2
  1783  				0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
  1784  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
  1785  				0x00, 0x00, 0x00, 0xfe,
  1786  			},
  1787  
  1788  			// data slab
  1789  			id2: {
  1790  				// version
  1791  				0x00,
  1792  				// flag: map data
  1793  				0x08,
  1794  				// next storage id
  1795  				0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
  1796  
  1797  				// the following encoded data is valid CBOR
  1798  
  1799  				// elements (array of 3 elements)
  1800  				0x83,
  1801  
  1802  				// level: 0
  1803  				0x00,
  1804  
  1805  				// hkeys (byte string of length 8 * 4)
  1806  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20,
  1807  				// hkey: 0
  1808  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1809  				// hkey: 1
  1810  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  1811  				// hkey: 2
  1812  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  1813  				// hkey: 3
  1814  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
  1815  
  1816  				// elements (array of 4 elements)
  1817  				// each element is encoded as CBOR array of 2 elements (key, value)
  1818  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
  1819  				// element: [aaaaaaaaaaaaaaaaaaaaaa:aaaaaaaaaaaaaaaaaaaaaa]
  1820  				0x82,
  1821  				0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61,
  1822  				0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61,
  1823  				// element: [bbbbbbbbbbbbbbbbbbbbbb:bbbbbbbbbbbbbbbbbbbbbb]
  1824  				0x82,
  1825  				0x76, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62,
  1826  				0x76, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62,
  1827  				// element: [cccccccccccccccccccccc:cccccccccccccccccccccc]
  1828  				0x82,
  1829  				0x76, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1830  				0x76, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1831  				// element: [dddddddddddddddddddddd:dddddddddddddddddddddd]
  1832  				0x82,
  1833  				0x76, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64,
  1834  				0x76, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64,
  1835  			},
  1836  
  1837  			// data slab
  1838  			id3: {
  1839  				// version
  1840  				0x00,
  1841  				// flag: has pointer + map data
  1842  				0x48,
  1843  				// next storage id
  1844  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1845  
  1846  				// the following encoded data is valid CBOR
  1847  
  1848  				// elements (array of 3 elements)
  1849  				0x83,
  1850  
  1851  				// level: 0
  1852  				0x00,
  1853  
  1854  				// hkeys (byte string of length 8 * 4)
  1855  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20,
  1856  				// hkey: 4
  1857  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
  1858  				// hkey: 5
  1859  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05,
  1860  				// hkey: 6
  1861  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06,
  1862  				// hkey: 7
  1863  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07,
  1864  
  1865  				// elements (array of 4 elements)
  1866  				// each element is encoded as CBOR array of 2 elements (key, value)
  1867  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
  1868  				// element: [eeeeeeeeeeeeeeeeeeeeee:eeeeeeeeeeeeeeeeeeeeee]
  1869  				0x82,
  1870  				0x76, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
  1871  				0x76, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
  1872  				// element: [ffffffffffffffffffffff:ffffffffffffffffffffff]
  1873  				0x82,
  1874  				0x76, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66,
  1875  				0x76, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66,
  1876  				// element: [gggggggggggggggggggggg:gggggggggggggggggggggg]
  1877  				0x82,
  1878  				0x76, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67,
  1879  				0x76, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67, 0x67,
  1880  				// element: [hhhhhhhhhhhhhhhhhhhhhh:StorageID(1,2,3,4,5,6,7,8,0,0,0,0,0,0,0,4)]
  1881  				0x82,
  1882  				0x76, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68, 0x68,
  1883  				0xd8, 0xff, 0x50, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
  1884  			},
  1885  			// array data slab
  1886  			id4: {
  1887  				// extra data
  1888  				// version
  1889  				0x00,
  1890  				// flag: root + array data
  1891  				0x80,
  1892  				// extra data (CBOR encoded array of 1 elements)
  1893  				0x81,
  1894  				// type info
  1895  				0x18, 0x2b,
  1896  
  1897  				// version
  1898  				0x00,
  1899  				// flag: root + array data
  1900  				0x80,
  1901  				// CBOR encoded array head (fixed size 3 byte)
  1902  				0x99, 0x00, 0x01,
  1903  				// CBOR encoded array elements
  1904  				0xd8, 0xa4, 0x00,
  1905  			},
  1906  		}
  1907  
  1908  		// Verify encoded data
  1909  		stored, err := storage.Encode()
  1910  		require.NoError(t, err)
  1911  
  1912  		require.Equal(t, len(expected), len(stored))
  1913  		require.Equal(t, expected[id1], stored[id1])
  1914  		require.Equal(t, expected[id2], stored[id2])
  1915  		require.Equal(t, expected[id3], stored[id3])
  1916  		require.Equal(t, expected[id4], stored[id4])
  1917  
  1918  		// Verify slab size in header is correct.
  1919  		meta, ok := m.root.(*MapMetaDataSlab)
  1920  		require.True(t, ok)
  1921  		require.Equal(t, 2, len(meta.childrenHeaders))
  1922  		require.Equal(t, uint32(len(stored[id2])), meta.childrenHeaders[0].size)
  1923  		require.Equal(t, uint32(len(stored[id3])), meta.childrenHeaders[1].size)
  1924  
  1925  		// Decode data to new storage
  1926  		storage2 := newTestPersistentStorageWithData(t, stored)
  1927  
  1928  		// Test new map from storage2
  1929  		decodedMap, err := NewMapWithRootID(storage2, id1, digesterBuilder)
  1930  		require.NoError(t, err)
  1931  
  1932  		verifyMap(t, storage2, typeInfo, address, decodedMap, keyValues, nil, false)
  1933  	})
  1934  
  1935  	t.Run("inline collision 1 level", func(t *testing.T) {
  1936  
  1937  		SetThreshold(256)
  1938  		defer SetThreshold(1024)
  1939  
  1940  		// Create and populate map in memory
  1941  		storage := newTestBasicStorage(t)
  1942  
  1943  		digesterBuilder := &mockDigesterBuilder{}
  1944  
  1945  		// Create map
  1946  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  1947  		require.NoError(t, err)
  1948  
  1949  		const mapSize = 8
  1950  		keyValues := make(map[Value]Value, mapSize)
  1951  		for i := uint64(0); i < mapSize; i++ {
  1952  			k := Uint64Value(i)
  1953  			v := Uint64Value(i * 2)
  1954  
  1955  			digests := []Digest{Digest(i % 4), Digest(i)}
  1956  			digesterBuilder.On("Digest", k).Return(mockDigester{d: digests})
  1957  
  1958  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  1959  			require.NoError(t, err)
  1960  			require.Nil(t, existingStorable)
  1961  
  1962  			keyValues[k] = v
  1963  		}
  1964  
  1965  		require.Equal(t, uint64(mapSize), m.Count())
  1966  
  1967  		id1 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 1}}
  1968  
  1969  		// Expected serialized slab data with storage id
  1970  		expected := map[StorageID][]byte{
  1971  
  1972  			// map metadata slab
  1973  			id1: {
  1974  				// extra data
  1975  				// version
  1976  				0x00,
  1977  				// flag: root + map data
  1978  				0x88,
  1979  				// extra data (CBOR encoded array of 3 elements)
  1980  				0x83,
  1981  				// type info: "map"
  1982  				0x18, 0x2A,
  1983  				// count: 8
  1984  				0x08,
  1985  				// seed
  1986  				0x1b, 0x52, 0xa8, 0x78, 0x3, 0x85, 0x2c, 0xaa, 0x49,
  1987  
  1988  				// version
  1989  				0x00,
  1990  				// flag: root + map data
  1991  				0x88,
  1992  
  1993  				// the following encoded data is valid CBOR
  1994  
  1995  				// elements (array of 3 elements)
  1996  				0x83,
  1997  
  1998  				// level: 0
  1999  				0x00,
  2000  
  2001  				// hkeys (byte string of length 8 * 4)
  2002  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20,
  2003  				// hkey: 0
  2004  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2005  				// hkey: 1
  2006  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2007  				// hkey: 2
  2008  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2009  				// hkey: 3
  2010  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
  2011  
  2012  				// elements (array of 2 elements)
  2013  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
  2014  
  2015  				// inline collision group corresponding to hkey 0
  2016  				// (tag number CBORTagInlineCollisionGroup)
  2017  				0xd8, 0xfd,
  2018  				// (tag content: array of 3 elements)
  2019  				0x83,
  2020  
  2021  				// level: 1
  2022  				0x01,
  2023  
  2024  				// hkeys (byte string of length 8 * 2)
  2025  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10,
  2026  				// hkey: 0
  2027  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2028  				// hkey: 4
  2029  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
  2030  
  2031  				// elements (array of 2 elements)
  2032  				// each element is encoded as CBOR array of 2 elements (key, value)
  2033  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2034  				// element: [uint64(0), uint64(0)]
  2035  				0x82, 0xd8, 0xa4, 0x00, 0xd8, 0xa4, 0x00,
  2036  				// element: [uint64(4), uint64(8)]
  2037  				0x82, 0xd8, 0xa4, 0x04, 0xd8, 0xa4, 0x08,
  2038  
  2039  				// inline collision group corresponding to hkey 1
  2040  				// (tag number CBORTagInlineCollisionGroup)
  2041  				0xd8, 0xfd,
  2042  				// (tag content: array of 3 elements)
  2043  				0x83,
  2044  
  2045  				// level: 1
  2046  				0x01,
  2047  
  2048  				// hkeys (byte string of length 8 * 2)
  2049  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10,
  2050  				// hkey: 1
  2051  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2052  				// hkey: 5
  2053  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05,
  2054  
  2055  				// elements (array of 2 elements)
  2056  				// each element is encoded as CBOR array of 2 elements (key, value)
  2057  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2058  				// element: [uint64(1), uint64(2)]
  2059  				0x82, 0xd8, 0xa4, 0x01, 0xd8, 0xa4, 0x02,
  2060  				// element: [uint64(5), uint64(10)]
  2061  				0x82, 0xd8, 0xa4, 0x05, 0xd8, 0xa4, 0x0a,
  2062  
  2063  				// inline collision group corresponding to hkey 2
  2064  				// (tag number CBORTagInlineCollisionGroup)
  2065  				0xd8, 0xfd,
  2066  				// (tag content: array of 3 elements)
  2067  				0x83,
  2068  
  2069  				// level: 1
  2070  				0x01,
  2071  
  2072  				// hkeys (byte string of length 8 * 2)
  2073  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10,
  2074  				// hkey: 2
  2075  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2076  				// hkey: 6
  2077  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06,
  2078  
  2079  				// elements (array of 2 elements)
  2080  				// each element is encoded as CBOR array of 2 elements (key, value)
  2081  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2082  				// element: [uint64(2), uint64(4)]
  2083  				0x82, 0xd8, 0xa4, 0x02, 0xd8, 0xa4, 0x04,
  2084  				// element: [uint64(6), uint64(12)]
  2085  				0x82, 0xd8, 0xa4, 0x06, 0xd8, 0xa4, 0x0c,
  2086  
  2087  				// inline collision group corresponding to hkey 3
  2088  				// (tag number CBORTagInlineCollisionGroup)
  2089  				0xd8, 0xfd,
  2090  				// (tag content: array of 3 elements)
  2091  				0x83,
  2092  
  2093  				// level: 1
  2094  				0x01,
  2095  
  2096  				// hkeys (byte string of length 8 * 2)
  2097  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10,
  2098  				// hkey: 3
  2099  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
  2100  				// hkey: 7
  2101  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07,
  2102  
  2103  				// elements (array of 2 elements)
  2104  				// each element is encoded as CBOR array of 2 elements (key, value)
  2105  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2106  				// element: [uint64(3), uint64(6)]
  2107  				0x82, 0xd8, 0xa4, 0x03, 0xd8, 0xa4, 0x06,
  2108  				// element: [uint64(7), uint64(14)]
  2109  				0x82, 0xd8, 0xa4, 0x07, 0xd8, 0xa4, 0x0e,
  2110  			},
  2111  		}
  2112  
  2113  		stored, err := storage.Encode()
  2114  		require.NoError(t, err)
  2115  		require.Equal(t, len(expected), len(stored))
  2116  		require.Equal(t, expected[id1], stored[id1])
  2117  
  2118  		// Decode data to new storage
  2119  		storage2 := newTestPersistentStorageWithData(t, stored)
  2120  
  2121  		// Test new map from storage2
  2122  		decodedMap, err := NewMapWithRootID(storage2, id1, digesterBuilder)
  2123  		require.NoError(t, err)
  2124  
  2125  		verifyMap(t, storage2, typeInfo, address, decodedMap, keyValues, nil, false)
  2126  	})
  2127  
  2128  	t.Run("inline collision 2 levels", func(t *testing.T) {
  2129  
  2130  		SetThreshold(256)
  2131  		defer SetThreshold(1024)
  2132  
  2133  		// Create and populate map in memory
  2134  		storage := newTestBasicStorage(t)
  2135  
  2136  		digesterBuilder := &mockDigesterBuilder{}
  2137  
  2138  		// Create map
  2139  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  2140  		require.NoError(t, err)
  2141  
  2142  		const mapSize = 8
  2143  		keyValues := make(map[Value]Value)
  2144  		for i := uint64(0); i < mapSize; i++ {
  2145  			k := Uint64Value(i)
  2146  			v := Uint64Value(i * 2)
  2147  
  2148  			digests := []Digest{Digest(i % 4), Digest(i % 2)}
  2149  			digesterBuilder.On("Digest", k).Return(mockDigester{d: digests})
  2150  
  2151  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  2152  			require.NoError(t, err)
  2153  			require.Nil(t, existingStorable)
  2154  
  2155  			keyValues[k] = v
  2156  		}
  2157  
  2158  		require.Equal(t, uint64(mapSize), m.Count())
  2159  
  2160  		id1 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 1}}
  2161  
  2162  		// Expected serialized slab data with storage id
  2163  		expected := map[StorageID][]byte{
  2164  
  2165  			// map metadata slab
  2166  			id1: {
  2167  				// extra data
  2168  				// version
  2169  				0x00,
  2170  				// flag: root + map data
  2171  				0x88,
  2172  				// extra data (CBOR encoded array of 3 elements)
  2173  				0x83,
  2174  				// type info: "map"
  2175  				0x18, 0x2A,
  2176  				// count: 8
  2177  				0x08,
  2178  				// seed
  2179  				0x1b, 0x52, 0xa8, 0x78, 0x3, 0x85, 0x2c, 0xaa, 0x49,
  2180  
  2181  				// version
  2182  				0x00,
  2183  				// flag: root + map data
  2184  				0x88,
  2185  
  2186  				// the following encoded data is valid CBOR
  2187  
  2188  				// elements (array of 3 elements)
  2189  				0x83,
  2190  
  2191  				// level: 0
  2192  				0x00,
  2193  
  2194  				// hkeys (byte string of length 8 * 4)
  2195  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20,
  2196  				// hkey: 0
  2197  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2198  				// hkey: 1
  2199  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2200  				// hkey: 2
  2201  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2202  				// hkey: 3
  2203  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
  2204  
  2205  				// elements (array of 4 elements)
  2206  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
  2207  
  2208  				// inline collision group corresponding to hkey 0
  2209  				// (tag number CBORTagInlineCollisionGroup)
  2210  				0xd8, 0xfd,
  2211  				// (tag content: array of 3 elements)
  2212  				0x83,
  2213  
  2214  				// level 1
  2215  				0x01,
  2216  
  2217  				// hkeys (byte string of length 8 * 1)
  2218  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08,
  2219  				// hkey: 0
  2220  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2221  
  2222  				// elements (array of 1 elements)
  2223  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2224  
  2225  				// inline collision group corresponding to hkey [0, 0]
  2226  				// (tag number CBORTagInlineCollisionGroup)
  2227  				0xd8, 0xfd,
  2228  				// (tag content: array of 3 elements)
  2229  				0x83,
  2230  
  2231  				// level: 2
  2232  				0x02,
  2233  
  2234  				// hkeys (empty byte string)
  2235  				0x40,
  2236  
  2237  				// elements (array of 2 elements)
  2238  				// each element is encoded as CBOR array of 2 elements (key, value)
  2239  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2240  				// element: [uint64(0), uint64(0)]
  2241  				0x82, 0xd8, 0xa4, 0x00, 0xd8, 0xa4, 0x00,
  2242  				// element: [uint64(4), uint64(8)]
  2243  				0x82, 0xd8, 0xa4, 0x04, 0xd8, 0xa4, 0x08,
  2244  
  2245  				// inline collision group corresponding to hkey 1
  2246  				// (tag number CBORTagInlineCollisionGroup)
  2247  				0xd8, 0xfd,
  2248  				// (tag content: array of 3 elements)
  2249  				0x83,
  2250  
  2251  				// level: 1
  2252  				0x01,
  2253  
  2254  				// hkeys (byte string of length 8 * 1)
  2255  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08,
  2256  				// hkey: 1
  2257  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2258  
  2259  				// elements (array of 1 elements)
  2260  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2261  
  2262  				// inline collision group corresponding to hkey [1, 1]
  2263  				// (tag number CBORTagInlineCollisionGroup)
  2264  				0xd8, 0xfd,
  2265  				// (tag content: array of 3 elements)
  2266  				0x83,
  2267  
  2268  				// level: 2
  2269  				0x02,
  2270  
  2271  				// hkeys (empty byte string)
  2272  				0x40,
  2273  
  2274  				// elements (array of 2 elements)
  2275  				// each element is encoded as CBOR array of 2 elements (key, value)
  2276  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2277  				// element: [uint64(1), uint64(2)]
  2278  				0x82, 0xd8, 0xa4, 0x01, 0xd8, 0xa4, 0x02,
  2279  				// element: [uint64(5), uint64(10)]
  2280  				0x82, 0xd8, 0xa4, 0x05, 0xd8, 0xa4, 0x0a,
  2281  
  2282  				// inline collision group corresponding to hkey 2
  2283  				// (tag number CBORTagInlineCollisionGroup)
  2284  				0xd8, 0xfd,
  2285  				// (tag content: array of 3 elements)
  2286  				0x83,
  2287  
  2288  				// level: 1
  2289  				0x01,
  2290  
  2291  				// hkeys (byte string of length 8 * 1)
  2292  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08,
  2293  				// hkey: 0
  2294  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2295  
  2296  				// elements (array of 1 element)
  2297  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2298  
  2299  				// inline collision group corresponding to hkey [2, 0]
  2300  				// (tag number CBORTagInlineCollisionGroup)
  2301  				0xd8, 0xfd,
  2302  				// (tag content: array of 3 elements)
  2303  				0x83,
  2304  
  2305  				// level: 2
  2306  				0x02,
  2307  
  2308  				// hkeys (empty byte string)
  2309  				0x40,
  2310  
  2311  				// elements (array of 2 element)
  2312  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2313  				// element: [uint64(2), uint64(4)]
  2314  				0x82, 0xd8, 0xa4, 0x02, 0xd8, 0xa4, 0x04,
  2315  				// element: [uint64(6), uint64(12)]
  2316  				0x82, 0xd8, 0xa4, 0x06, 0xd8, 0xa4, 0x0c,
  2317  
  2318  				// inline collision group corresponding to hkey 3
  2319  				// (tag number CBORTagInlineCollisionGroup)
  2320  				0xd8, 0xfd,
  2321  				// (tag content: array of 3 elements)
  2322  				0x83,
  2323  
  2324  				// level: 1
  2325  				0x01,
  2326  
  2327  				// hkeys (byte string of length 8 * 1)
  2328  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08,
  2329  				// hkey: 1
  2330  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2331  
  2332  				// elements (array of 1 element)
  2333  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2334  
  2335  				// inline collision group corresponding to hkey [3, 1]
  2336  				// (tag number CBORTagInlineCollisionGroup)
  2337  				0xd8, 0xfd,
  2338  				// (tag content: array of 3 elements)
  2339  				0x83,
  2340  
  2341  				// level: 2
  2342  				0x02,
  2343  
  2344  				// hkeys (empty byte string)
  2345  				0x40,
  2346  
  2347  				// elements (array of 2 element)
  2348  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2349  				// element: [uint64(3), uint64(6)]
  2350  				0x82, 0xd8, 0xa4, 0x03, 0xd8, 0xa4, 0x06,
  2351  				// element: [uint64(7), uint64(14)]
  2352  				0x82, 0xd8, 0xa4, 0x07, 0xd8, 0xa4, 0x0e,
  2353  			},
  2354  		}
  2355  
  2356  		stored, err := storage.Encode()
  2357  		require.NoError(t, err)
  2358  		require.Equal(t, len(expected), len(stored))
  2359  		require.Equal(t, expected[id1], stored[id1])
  2360  
  2361  		// Decode data to new storage
  2362  		storage2 := newTestPersistentStorageWithData(t, stored)
  2363  
  2364  		// Test new map from storage2
  2365  		decodedMap, err := NewMapWithRootID(storage2, id1, digesterBuilder)
  2366  		require.NoError(t, err)
  2367  
  2368  		verifyMap(t, storage2, typeInfo, address, decodedMap, keyValues, nil, false)
  2369  	})
  2370  
  2371  	t.Run("external collision", func(t *testing.T) {
  2372  
  2373  		SetThreshold(256)
  2374  		defer SetThreshold(1024)
  2375  
  2376  		// Create and populate map in memory
  2377  		storage := newTestBasicStorage(t)
  2378  
  2379  		digesterBuilder := &mockDigesterBuilder{}
  2380  
  2381  		// Create map
  2382  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  2383  		require.NoError(t, err)
  2384  
  2385  		const mapSize = 20
  2386  		keyValues := make(map[Value]Value)
  2387  		for i := uint64(0); i < mapSize; i++ {
  2388  			k := Uint64Value(i)
  2389  			v := Uint64Value(i * 2)
  2390  
  2391  			digests := []Digest{Digest(i % 2), Digest(i)}
  2392  			digesterBuilder.On("Digest", k).Return(mockDigester{d: digests})
  2393  
  2394  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  2395  			require.NoError(t, err)
  2396  			require.Nil(t, existingStorable)
  2397  
  2398  			keyValues[k] = v
  2399  		}
  2400  
  2401  		require.Equal(t, uint64(mapSize), m.Count())
  2402  
  2403  		id1 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 1}}
  2404  		id2 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 2}}
  2405  		id3 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 3}}
  2406  
  2407  		// Expected serialized slab data with storage id
  2408  		expected := map[StorageID][]byte{
  2409  
  2410  			// map data slab
  2411  			id1: {
  2412  				// extra data
  2413  				// version
  2414  				0x00,
  2415  				// flag: root + has pointer + map data
  2416  				0xc8,
  2417  				// extra data (CBOR encoded array of 3 elements)
  2418  				0x83,
  2419  				// type info: "map"
  2420  				0x18, 0x2A,
  2421  				// count: 10
  2422  				0x14,
  2423  				// seed
  2424  				0x1b, 0x52, 0xa8, 0x78, 0x3, 0x85, 0x2c, 0xaa, 0x49,
  2425  
  2426  				// version
  2427  				0x00,
  2428  				// flag: root + has pointer + map data
  2429  				0xc8,
  2430  
  2431  				// the following encoded data is valid CBOR
  2432  
  2433  				// elements (array of 3 elements)
  2434  				0x83,
  2435  
  2436  				// level: 0
  2437  				0x00,
  2438  
  2439  				// hkeys (byte string of length 8 * 2)
  2440  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10,
  2441  				// hkey: 0
  2442  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2443  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2444  
  2445  				// elements (array of 2 elements)
  2446  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2447  
  2448  				// external collision group corresponding to hkey 0
  2449  				// (tag number CBORTagExternalCollisionGroup)
  2450  				0xd8, 0xfe,
  2451  				// (tag content: storage id)
  2452  				0xd8, 0xff, 0x50,
  2453  				0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2454  
  2455  				// external collision group corresponding to hkey 1
  2456  				// (tag number CBORTagExternalCollisionGroup)
  2457  				0xd8, 0xfe,
  2458  				// (tag content: storage id)
  2459  				0xd8, 0xff, 0x50,
  2460  				0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
  2461  			},
  2462  
  2463  			// external collision group
  2464  			id2: {
  2465  				// version
  2466  				0x00,
  2467  				// flag: any size + collision group
  2468  				0x2b,
  2469  				// next storage id
  2470  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2471  
  2472  				// the following encoded data is valid CBOR
  2473  
  2474  				// elements (array of 3 elements)
  2475  				0x83,
  2476  
  2477  				// level: 1
  2478  				0x01,
  2479  
  2480  				// hkeys (byte string of length 8 * 10)
  2481  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50,
  2482  				// hkey: 0
  2483  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2484  				// hkey: 2
  2485  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2486  				// hkey: 4
  2487  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
  2488  				// hkey: 6
  2489  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06,
  2490  				// hkey: 8
  2491  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08,
  2492  				// hkey: 10
  2493  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a,
  2494  				// hkey: 12
  2495  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c,
  2496  				// hkey: 14
  2497  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e,
  2498  				// hkey: 16
  2499  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10,
  2500  				// hkey: 18
  2501  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12,
  2502  
  2503  				// elements (array of 10 elements)
  2504  				// each element is encoded as CBOR array of 2 elements (key, value)
  2505  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a,
  2506  				// element: [uint64(0), uint64(0)]
  2507  				0x82, 0xd8, 0xa4, 0x00, 0xd8, 0xa4, 0x00,
  2508  				// element: [uint64(2), uint64(4)]
  2509  				0x82, 0xd8, 0xa4, 0x02, 0xd8, 0xa4, 0x04,
  2510  				// element: [uint64(4), uint64(8)]
  2511  				0x82, 0xd8, 0xa4, 0x04, 0xd8, 0xa4, 0x08,
  2512  				// element: [uint64(6), uint64(12)]
  2513  				0x82, 0xd8, 0xa4, 0x06, 0xd8, 0xa4, 0x0c,
  2514  				// element: [uint64(8), uint64(16)]
  2515  				0x82, 0xd8, 0xa4, 0x08, 0xd8, 0xa4, 0x10,
  2516  				// element: [uint64(10), uint64(20)]
  2517  				0x82, 0xd8, 0xa4, 0x0a, 0xd8, 0xa4, 0x14,
  2518  				// element: [uint64(12), uint64(24)]
  2519  				0x82, 0xd8, 0xa4, 0x0c, 0xd8, 0xa4, 0x18, 0x18,
  2520  				// element: [uint64(14), uint64(28)]
  2521  				0x82, 0xd8, 0xa4, 0x0e, 0xd8, 0xa4, 0x18, 0x1c,
  2522  				// element: [uint64(16), uint64(32)]
  2523  				0x82, 0xd8, 0xa4, 0x10, 0xd8, 0xa4, 0x18, 0x20,
  2524  				// element: [uint64(18), uint64(36)]
  2525  				0x82, 0xd8, 0xa4, 0x12, 0xd8, 0xa4, 0x18, 0x24,
  2526  			},
  2527  
  2528  			// external collision group
  2529  			id3: {
  2530  				// version
  2531  				0x00,
  2532  				// flag: any size + collision group
  2533  				0x2b,
  2534  				// next storage id
  2535  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2536  
  2537  				// the following encoded data is valid CBOR
  2538  
  2539  				// elements (array of 3 elements)
  2540  				0x83,
  2541  
  2542  				// level: 1
  2543  				0x01,
  2544  
  2545  				// hkeys (byte string of length 8 * 10)
  2546  				0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50,
  2547  				// hkey: 1
  2548  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2549  				// hkey: 3
  2550  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
  2551  				// hkey: 5
  2552  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05,
  2553  				// hkey: 7
  2554  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07,
  2555  				// hkey: 9
  2556  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
  2557  				// hkey: 11
  2558  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0b,
  2559  				// hkey: 13
  2560  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0d,
  2561  				// hkey: 15
  2562  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f,
  2563  				// hkey: 17
  2564  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11,
  2565  				// hkey: 19
  2566  				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13,
  2567  
  2568  				// elements (array of 10 elements)
  2569  				// each element is encoded as CBOR array of 2 elements (key, value)
  2570  				0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a,
  2571  				// element: [uint64(1), uint64(2)]
  2572  				0x82, 0xd8, 0xa4, 0x01, 0xd8, 0xa4, 0x02,
  2573  				// element: [uint64(3), uint64(6)]
  2574  				0x82, 0xd8, 0xa4, 0x03, 0xd8, 0xa4, 0x06,
  2575  				// element: [uint64(5), uint64(10)]
  2576  				0x82, 0xd8, 0xa4, 0x05, 0xd8, 0xa4, 0x0a,
  2577  				// element: [uint64(7), uint64(14)]
  2578  				0x82, 0xd8, 0xa4, 0x07, 0xd8, 0xa4, 0x0e,
  2579  				// element: [uint64(9), uint64(18)]
  2580  				0x82, 0xd8, 0xa4, 0x09, 0xd8, 0xa4, 0x12,
  2581  				// element: [uint64(11), uint64(22))]
  2582  				0x82, 0xd8, 0xa4, 0x0b, 0xd8, 0xa4, 0x16,
  2583  				// element: [uint64(13), uint64(26)]
  2584  				0x82, 0xd8, 0xa4, 0x0d, 0xd8, 0xa4, 0x18, 0x1a,
  2585  				// element: [uint64(15), uint64(30)]
  2586  				0x82, 0xd8, 0xa4, 0x0f, 0xd8, 0xa4, 0x18, 0x1e,
  2587  				// element: [uint64(17), uint64(34)]
  2588  				0x82, 0xd8, 0xa4, 0x11, 0xd8, 0xa4, 0x18, 0x22,
  2589  				// element: [uint64(19), uint64(38)]
  2590  				0x82, 0xd8, 0xa4, 0x13, 0xd8, 0xa4, 0x18, 0x26,
  2591  			},
  2592  		}
  2593  
  2594  		stored, err := storage.Encode()
  2595  		require.NoError(t, err)
  2596  		require.Equal(t, len(expected), len(stored))
  2597  		require.Equal(t, expected[id1], stored[id1])
  2598  		require.Equal(t, expected[id2], stored[id2])
  2599  		require.Equal(t, expected[id3], stored[id3])
  2600  
  2601  		// Decode data to new storage
  2602  		storage2 := newTestPersistentStorageWithData(t, stored)
  2603  
  2604  		// Test new map from storage2
  2605  		decodedMap, err := NewMapWithRootID(storage2, id1, digesterBuilder)
  2606  		require.NoError(t, err)
  2607  
  2608  		verifyMap(t, storage2, typeInfo, address, decodedMap, keyValues, nil, false)
  2609  	})
  2610  
  2611  	t.Run("pointer", func(t *testing.T) {
  2612  		// Create and populate map in memory
  2613  		storage := newTestBasicStorage(t)
  2614  
  2615  		digesterBuilder := &mockDigesterBuilder{}
  2616  
  2617  		// Create map
  2618  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  2619  		require.NoError(t, err)
  2620  
  2621  		k := Uint64Value(0)
  2622  		v := Uint64Value(0)
  2623  
  2624  		digests := []Digest{Digest(0), Digest(1)}
  2625  		digesterBuilder.On("Digest", k).Return(mockDigester{d: digests})
  2626  
  2627  		existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  2628  		require.NoError(t, err)
  2629  		require.Nil(t, existingStorable)
  2630  
  2631  		require.Equal(t, uint64(1), m.Count())
  2632  
  2633  		id1 := StorageID{Address: address, Index: StorageIndex{0, 0, 0, 0, 0, 0, 0, 1}}
  2634  
  2635  		expectedNoPointer := []byte{
  2636  
  2637  			// version
  2638  			0x00,
  2639  			// flag: root + map data
  2640  			0x88,
  2641  			// extra data (CBOR encoded array of 3 elements)
  2642  			0x83,
  2643  			// type info: "map"
  2644  			0x18, 0x2A,
  2645  			// count: 10
  2646  			0x01,
  2647  			// seed
  2648  			0x1b, 0x52, 0xa8, 0x78, 0x3, 0x85, 0x2c, 0xaa, 0x49,
  2649  
  2650  			// version
  2651  			0x00,
  2652  			// flag: root + map data
  2653  			0x88,
  2654  
  2655  			// the following encoded data is valid CBOR
  2656  
  2657  			// elements (array of 3 elements)
  2658  			0x83,
  2659  
  2660  			// level: 0
  2661  			0x00,
  2662  
  2663  			// hkeys (byte string of length 8 * 1)
  2664  			0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08,
  2665  			// hkey: 0
  2666  			0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2667  
  2668  			// elements (array of 1 elements)
  2669  			// each element is encoded as CBOR array of 2 elements (key, value)
  2670  			0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2671  			// element: [uint64(0), uint64(0)]
  2672  			0x82, 0xd8, 0xa4, 0x00, 0xd8, 0xa4, 0x00,
  2673  		}
  2674  
  2675  		// Verify encoded data
  2676  		stored, err := storage.Encode()
  2677  		require.NoError(t, err)
  2678  		require.Equal(t, 1, len(stored))
  2679  		require.Equal(t, expectedNoPointer, stored[id1])
  2680  
  2681  		// Overwrite existing value with long string
  2682  		vs := NewStringValue(strings.Repeat("a", 512))
  2683  		existingStorable, err = m.Set(compare, hashInputProvider, k, vs)
  2684  		require.NoError(t, err)
  2685  
  2686  		existingValue, err := existingStorable.StoredValue(storage)
  2687  		require.NoError(t, err)
  2688  		valueEqual(t, typeInfoComparator, v, existingValue)
  2689  
  2690  		expectedHasPointer := []byte{
  2691  
  2692  			// version
  2693  			0x00,
  2694  			// flag: root + pointer + map data
  2695  			0xc8,
  2696  			// extra data (CBOR encoded array of 3 elements)
  2697  			0x83,
  2698  			// type info: "map"
  2699  			0x18, 0x2A,
  2700  			// count: 10
  2701  			0x01,
  2702  			// seed
  2703  			0x1b, 0x52, 0xa8, 0x78, 0x3, 0x85, 0x2c, 0xaa, 0x49,
  2704  
  2705  			// version
  2706  			0x00,
  2707  			// flag: root + pointer + map data
  2708  			0xc8,
  2709  
  2710  			// the following encoded data is valid CBOR
  2711  
  2712  			// elements (array of 3 elements)
  2713  			0x83,
  2714  
  2715  			// level: 0
  2716  			0x00,
  2717  
  2718  			// hkeys (byte string of length 8 * 1)
  2719  			0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08,
  2720  			// hkey: 0
  2721  			0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  2722  
  2723  			// elements (array of 1 elements)
  2724  			// each element is encoded as CBOR array of 2 elements (key, value)
  2725  			0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  2726  			// element: [uint64(0), storage id]
  2727  			0x82, 0xd8, 0xa4, 0x00,
  2728  			// (tag content: storage id)
  2729  			0xd8, 0xff, 0x50,
  2730  			0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  2731  		}
  2732  
  2733  		stored, err = storage.Encode()
  2734  		require.NoError(t, err)
  2735  		require.Equal(t, 2, len(stored))
  2736  		require.Equal(t, expectedHasPointer, stored[id1])
  2737  	})
  2738  }
  2739  
  2740  func TestMapEncodeDecodeRandomValues(t *testing.T) {
  2741  
  2742  	SetThreshold(256)
  2743  	defer SetThreshold(1024)
  2744  
  2745  	r := newRand(t)
  2746  
  2747  	typeInfo := testTypeInfo{42}
  2748  	storage := newTestPersistentStorage(t)
  2749  	address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  2750  
  2751  	m, keyValues := testMapSetRemoveRandomValues(t, r, storage, typeInfo, address)
  2752  
  2753  	verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  2754  
  2755  	// Create a new storage with encoded data from base storage
  2756  	storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage)
  2757  
  2758  	// Create new map from new storage
  2759  	m2, err := NewMapWithRootID(storage2, m.StorageID(), m.digesterBuilder)
  2760  	require.NoError(t, err)
  2761  
  2762  	verifyMap(t, storage2, typeInfo, address, m2, keyValues, nil, false)
  2763  }
  2764  
  2765  func TestMapStoredValue(t *testing.T) {
  2766  
  2767  	const mapSize = 4096
  2768  
  2769  	r := newRand(t)
  2770  
  2771  	typeInfo := testTypeInfo{42}
  2772  	address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  2773  	storage := newTestPersistentStorage(t)
  2774  
  2775  	keyValues := make(map[Value]Value, mapSize)
  2776  	i := 0
  2777  	for len(keyValues) < mapSize {
  2778  		k := NewStringValue(randStr(r, 16))
  2779  		keyValues[k] = Uint64Value(i)
  2780  		i++
  2781  	}
  2782  
  2783  	m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo)
  2784  	require.NoError(t, err)
  2785  
  2786  	for k, v := range keyValues {
  2787  		existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  2788  		require.NoError(t, err)
  2789  		require.Nil(t, existingStorable)
  2790  	}
  2791  
  2792  	rootID := m.StorageID()
  2793  
  2794  	slabIterator, err := storage.SlabIterator()
  2795  	require.NoError(t, err)
  2796  
  2797  	for {
  2798  		id, slab := slabIterator()
  2799  
  2800  		if id == StorageIDUndefined {
  2801  			break
  2802  		}
  2803  
  2804  		value, err := slab.StoredValue(storage)
  2805  
  2806  		if id == rootID {
  2807  			require.NoError(t, err)
  2808  
  2809  			m2, ok := value.(*OrderedMap)
  2810  			require.True(t, ok)
  2811  
  2812  			verifyMap(t, storage, typeInfo, address, m2, keyValues, nil, false)
  2813  		} else {
  2814  			require.Equal(t, 1, errorCategorizationCount(err))
  2815  			var fatalError *FatalError
  2816  			var notValueError *NotValueError
  2817  			require.ErrorAs(t, err, &fatalError)
  2818  			require.ErrorAs(t, err, &notValueError)
  2819  			require.ErrorAs(t, fatalError, &notValueError)
  2820  			require.Nil(t, value)
  2821  		}
  2822  	}
  2823  }
  2824  
  2825  func TestMapPopIterate(t *testing.T) {
  2826  
  2827  	t.Run("empty", func(t *testing.T) {
  2828  		typeInfo := testTypeInfo{42}
  2829  		storage := newTestPersistentStorage(t)
  2830  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  2831  		digesterBuilder := newBasicDigesterBuilder()
  2832  
  2833  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  2834  		require.NoError(t, err)
  2835  
  2836  		err = storage.Commit()
  2837  		require.NoError(t, err)
  2838  
  2839  		require.Equal(t, 1, storage.Count())
  2840  
  2841  		i := uint64(0)
  2842  		err = m.PopIterate(func(k Storable, v Storable) {
  2843  			i++
  2844  		})
  2845  		require.NoError(t, err)
  2846  		require.Equal(t, uint64(0), i)
  2847  
  2848  		verifyEmptyMap(t, storage, typeInfo, address, m)
  2849  	})
  2850  
  2851  	t.Run("root-dataslab", func(t *testing.T) {
  2852  		const mapSize = 10
  2853  
  2854  		typeInfo := testTypeInfo{42}
  2855  		storage := newTestPersistentStorage(t)
  2856  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  2857  		digesterBuilder := newBasicDigesterBuilder()
  2858  
  2859  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  2860  		require.NoError(t, err)
  2861  
  2862  		keyValues := make(map[Value]Value, mapSize)
  2863  		sortedKeys := make([]Value, mapSize)
  2864  		for i := uint64(0); i < mapSize; i++ {
  2865  			key, value := Uint64Value(i), Uint64Value(i*10)
  2866  			sortedKeys[i] = key
  2867  			keyValues[key] = value
  2868  
  2869  			existingStorable, err := m.Set(compare, hashInputProvider, key, value)
  2870  			require.NoError(t, err)
  2871  			require.Nil(t, existingStorable)
  2872  		}
  2873  
  2874  		require.Equal(t, uint64(mapSize), m.Count())
  2875  
  2876  		err = storage.Commit()
  2877  		require.NoError(t, err)
  2878  
  2879  		require.Equal(t, 1, storage.Count())
  2880  
  2881  		sort.Stable(keysByDigest{sortedKeys, digesterBuilder})
  2882  
  2883  		i := mapSize
  2884  		err = m.PopIterate(func(k, v Storable) {
  2885  			i--
  2886  
  2887  			kv, err := k.StoredValue(storage)
  2888  			require.NoError(t, err)
  2889  			valueEqual(t, typeInfoComparator, sortedKeys[i], kv)
  2890  
  2891  			vv, err := v.StoredValue(storage)
  2892  			require.NoError(t, err)
  2893  			valueEqual(t, typeInfoComparator, keyValues[sortedKeys[i]], vv)
  2894  		})
  2895  
  2896  		require.NoError(t, err)
  2897  		require.Equal(t, 0, i)
  2898  
  2899  		verifyEmptyMap(t, storage, typeInfo, address, m)
  2900  	})
  2901  
  2902  	t.Run("root-metaslab", func(t *testing.T) {
  2903  		const mapSize = 4096
  2904  
  2905  		r := newRand(t)
  2906  
  2907  		keyValues := make(map[Value]Value, mapSize)
  2908  		sortedKeys := make([]Value, mapSize)
  2909  		i := 0
  2910  		for len(keyValues) < mapSize {
  2911  			k := NewStringValue(randStr(r, 16))
  2912  			if _, found := keyValues[k]; !found {
  2913  				sortedKeys[i] = k
  2914  				keyValues[k] = NewStringValue(randStr(r, 16))
  2915  				i++
  2916  			}
  2917  		}
  2918  
  2919  		typeInfo := testTypeInfo{42}
  2920  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  2921  		storage := newTestPersistentStorage(t)
  2922  		digesterBuilder := newBasicDigesterBuilder()
  2923  
  2924  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  2925  		require.NoError(t, err)
  2926  
  2927  		for k, v := range keyValues {
  2928  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  2929  			require.NoError(t, err)
  2930  			require.Nil(t, existingStorable)
  2931  		}
  2932  
  2933  		err = storage.Commit()
  2934  		require.NoError(t, err)
  2935  
  2936  		sort.Stable(keysByDigest{sortedKeys, digesterBuilder})
  2937  
  2938  		// Iterate key value pairs
  2939  		i = len(keyValues)
  2940  		err = m.PopIterate(func(k Storable, v Storable) {
  2941  			i--
  2942  
  2943  			kv, err := k.StoredValue(storage)
  2944  			require.NoError(t, err)
  2945  			valueEqual(t, typeInfoComparator, sortedKeys[i], kv)
  2946  
  2947  			vv, err := v.StoredValue(storage)
  2948  			require.NoError(t, err)
  2949  			valueEqual(t, typeInfoComparator, keyValues[sortedKeys[i]], vv)
  2950  		})
  2951  
  2952  		require.NoError(t, err)
  2953  		require.Equal(t, 0, i)
  2954  
  2955  		verifyEmptyMap(t, storage, typeInfo, address, m)
  2956  	})
  2957  
  2958  	t.Run("collision", func(t *testing.T) {
  2959  		//MetaDataSlabCount:1 DataSlabCount:13 CollisionDataSlabCount:100
  2960  
  2961  		const mapSize = 1024
  2962  
  2963  		SetThreshold(512)
  2964  		defer SetThreshold(1024)
  2965  
  2966  		r := newRand(t)
  2967  
  2968  		typeInfo := testTypeInfo{42}
  2969  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  2970  		digesterBuilder := &mockDigesterBuilder{}
  2971  		storage := newTestPersistentStorage(t)
  2972  
  2973  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  2974  		require.NoError(t, err)
  2975  
  2976  		keyValues := make(map[Value]Value, mapSize)
  2977  		sortedKeys := make([]Value, mapSize)
  2978  		i := 0
  2979  		for len(keyValues) < mapSize {
  2980  			k := NewStringValue(randStr(r, 16))
  2981  
  2982  			if _, found := keyValues[k]; !found {
  2983  
  2984  				sortedKeys[i] = k
  2985  				keyValues[k] = NewStringValue(randStr(r, 16))
  2986  
  2987  				digests := []Digest{
  2988  					Digest(i % 100),
  2989  					Digest(i % 5),
  2990  				}
  2991  
  2992  				digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  2993  
  2994  				existingStorable, err := m.Set(compare, hashInputProvider, k, keyValues[k])
  2995  				require.NoError(t, err)
  2996  				require.Nil(t, existingStorable)
  2997  
  2998  				i++
  2999  			}
  3000  		}
  3001  
  3002  		sort.Stable(keysByDigest{sortedKeys, digesterBuilder})
  3003  
  3004  		err = storage.Commit()
  3005  		require.NoError(t, err)
  3006  
  3007  		// Iterate key value pairs
  3008  		i = mapSize
  3009  		err = m.PopIterate(func(k Storable, v Storable) {
  3010  			i--
  3011  
  3012  			kv, err := k.StoredValue(storage)
  3013  			require.NoError(t, err)
  3014  			valueEqual(t, typeInfoComparator, sortedKeys[i], kv)
  3015  
  3016  			vv, err := v.StoredValue(storage)
  3017  			require.NoError(t, err)
  3018  			valueEqual(t, typeInfoComparator, keyValues[sortedKeys[i]], vv)
  3019  		})
  3020  
  3021  		require.NoError(t, err)
  3022  		require.Equal(t, 0, i)
  3023  
  3024  		verifyEmptyMap(t, storage, typeInfo, address, m)
  3025  	})
  3026  }
  3027  
  3028  func TestEmptyMap(t *testing.T) {
  3029  
  3030  	t.Parallel()
  3031  
  3032  	typeInfo := testTypeInfo{42}
  3033  	storage := newTestPersistentStorage(t)
  3034  	address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3035  
  3036  	m, err := NewMap(storage, address, NewDefaultDigesterBuilder(), typeInfo)
  3037  	require.NoError(t, err)
  3038  
  3039  	t.Run("get", func(t *testing.T) {
  3040  		s, err := m.Get(compare, hashInputProvider, Uint64Value(0))
  3041  		require.Equal(t, 1, errorCategorizationCount(err))
  3042  		var userError *UserError
  3043  		var keyNotFoundError *KeyNotFoundError
  3044  		require.ErrorAs(t, err, &userError)
  3045  		require.ErrorAs(t, err, &keyNotFoundError)
  3046  		require.ErrorAs(t, userError, &keyNotFoundError)
  3047  		require.Nil(t, s)
  3048  	})
  3049  
  3050  	t.Run("remove", func(t *testing.T) {
  3051  		existingKey, existingValue, err := m.Remove(compare, hashInputProvider, Uint64Value(0))
  3052  		require.Equal(t, 1, errorCategorizationCount(err))
  3053  		var userError *UserError
  3054  		var keyNotFoundError *KeyNotFoundError
  3055  		require.ErrorAs(t, err, &userError)
  3056  		require.ErrorAs(t, err, &keyNotFoundError)
  3057  		require.ErrorAs(t, userError, &keyNotFoundError)
  3058  		require.Nil(t, existingKey)
  3059  		require.Nil(t, existingValue)
  3060  	})
  3061  
  3062  	t.Run("iterate", func(t *testing.T) {
  3063  		i := 0
  3064  		err := m.Iterate(func(k Value, v Value) (bool, error) {
  3065  			i++
  3066  			return true, nil
  3067  		})
  3068  		require.NoError(t, err)
  3069  		require.Equal(t, 0, i)
  3070  	})
  3071  
  3072  	t.Run("count", func(t *testing.T) {
  3073  		count := m.Count()
  3074  		require.Equal(t, uint64(0), count)
  3075  	})
  3076  
  3077  	t.Run("type", func(t *testing.T) {
  3078  		require.True(t, typeInfoComparator(typeInfo, m.Type()))
  3079  	})
  3080  
  3081  	t.Run("address", func(t *testing.T) {
  3082  		require.Equal(t, address, m.Address())
  3083  	})
  3084  
  3085  	// TestMapEncodeDecode/empty tests empty map encoding and decoding
  3086  }
  3087  
  3088  func TestMapFromBatchData(t *testing.T) {
  3089  
  3090  	t.Run("empty", func(t *testing.T) {
  3091  		typeInfo := testTypeInfo{42}
  3092  
  3093  		m, err := NewMap(
  3094  			newTestPersistentStorage(t),
  3095  			Address{1, 2, 3, 4, 5, 6, 7, 8},
  3096  			NewDefaultDigesterBuilder(),
  3097  			typeInfo,
  3098  		)
  3099  		require.NoError(t, err)
  3100  		require.Equal(t, uint64(0), m.Count())
  3101  
  3102  		iter, err := m.Iterator()
  3103  		require.NoError(t, err)
  3104  
  3105  		storage := newTestPersistentStorage(t)
  3106  		address := Address{2, 3, 4, 5, 6, 7, 8, 9}
  3107  
  3108  		// Create a map with new storage, new address, and original map's elements.
  3109  		copied, err := NewMapFromBatchData(
  3110  			storage,
  3111  			address,
  3112  			NewDefaultDigesterBuilder(),
  3113  			m.Type(),
  3114  			compare,
  3115  			hashInputProvider,
  3116  			m.Seed(),
  3117  			func() (Value, Value, error) {
  3118  				return iter.Next()
  3119  			})
  3120  		require.NoError(t, err)
  3121  		require.NotEqual(t, copied.StorageID(), m.StorageID())
  3122  
  3123  		verifyEmptyMap(t, storage, typeInfo, address, copied)
  3124  	})
  3125  
  3126  	t.Run("root-dataslab", func(t *testing.T) {
  3127  		SetThreshold(1024)
  3128  
  3129  		const mapSize = 10
  3130  
  3131  		typeInfo := testTypeInfo{42}
  3132  
  3133  		m, err := NewMap(
  3134  			newTestPersistentStorage(t),
  3135  			Address{1, 2, 3, 4, 5, 6, 7, 8},
  3136  			NewDefaultDigesterBuilder(),
  3137  			typeInfo,
  3138  		)
  3139  		require.NoError(t, err)
  3140  
  3141  		for i := uint64(0); i < mapSize; i++ {
  3142  			storable, err := m.Set(compare, hashInputProvider, Uint64Value(i), Uint64Value(i*10))
  3143  			require.NoError(t, err)
  3144  			require.Nil(t, storable)
  3145  		}
  3146  
  3147  		require.Equal(t, uint64(mapSize), m.Count())
  3148  
  3149  		iter, err := m.Iterator()
  3150  		require.NoError(t, err)
  3151  
  3152  		var sortedKeys []Value
  3153  		keyValues := make(map[Value]Value)
  3154  
  3155  		storage := newTestPersistentStorage(t)
  3156  		digesterBuilder := NewDefaultDigesterBuilder()
  3157  		address := Address{2, 3, 4, 5, 6, 7, 8, 9}
  3158  
  3159  		// Create a map with new storage, new address, and original map's elements.
  3160  		copied, err := NewMapFromBatchData(
  3161  			storage,
  3162  			address,
  3163  			digesterBuilder,
  3164  			m.Type(),
  3165  			compare,
  3166  			hashInputProvider,
  3167  			m.Seed(),
  3168  			func() (Value, Value, error) {
  3169  
  3170  				k, v, err := iter.Next()
  3171  
  3172  				// Save key value pair
  3173  				if k != nil {
  3174  					sortedKeys = append(sortedKeys, k)
  3175  					keyValues[k] = v
  3176  				}
  3177  
  3178  				return k, v, err
  3179  			})
  3180  
  3181  		require.NoError(t, err)
  3182  		require.NotEqual(t, copied.StorageID(), m.StorageID())
  3183  
  3184  		verifyMap(t, storage, typeInfo, address, copied, keyValues, sortedKeys, false)
  3185  	})
  3186  
  3187  	t.Run("root-metaslab", func(t *testing.T) {
  3188  		SetThreshold(256)
  3189  		defer SetThreshold(1024)
  3190  
  3191  		const mapSize = 4096
  3192  
  3193  		typeInfo := testTypeInfo{42}
  3194  
  3195  		m, err := NewMap(
  3196  			newTestPersistentStorage(t),
  3197  			Address{1, 2, 3, 4, 5, 6, 7, 8},
  3198  			NewDefaultDigesterBuilder(),
  3199  			typeInfo,
  3200  		)
  3201  		require.NoError(t, err)
  3202  
  3203  		for i := uint64(0); i < mapSize; i++ {
  3204  			storable, err := m.Set(compare, hashInputProvider, Uint64Value(i), Uint64Value(i*10))
  3205  			require.NoError(t, err)
  3206  			require.Nil(t, storable)
  3207  		}
  3208  
  3209  		require.Equal(t, uint64(mapSize), m.Count())
  3210  
  3211  		iter, err := m.Iterator()
  3212  		require.NoError(t, err)
  3213  
  3214  		var sortedKeys []Value
  3215  		keyValues := make(map[Value]Value)
  3216  
  3217  		storage := newTestPersistentStorage(t)
  3218  		digesterBuilder := NewDefaultDigesterBuilder()
  3219  		address := Address{2, 3, 4, 5, 6, 7, 8, 9}
  3220  
  3221  		copied, err := NewMapFromBatchData(
  3222  			storage,
  3223  			address,
  3224  			digesterBuilder,
  3225  			m.Type(),
  3226  			compare,
  3227  			hashInputProvider,
  3228  			m.Seed(),
  3229  			func() (Value, Value, error) {
  3230  				k, v, err := iter.Next()
  3231  
  3232  				if k != nil {
  3233  					sortedKeys = append(sortedKeys, k)
  3234  					keyValues[k] = v
  3235  				}
  3236  
  3237  				return k, v, err
  3238  			})
  3239  
  3240  		require.NoError(t, err)
  3241  		require.NotEqual(t, m.StorageID(), copied.StorageID())
  3242  
  3243  		verifyMap(t, storage, typeInfo, address, copied, keyValues, sortedKeys, false)
  3244  	})
  3245  
  3246  	t.Run("rebalance two data slabs", func(t *testing.T) {
  3247  		SetThreshold(256)
  3248  		defer SetThreshold(1024)
  3249  
  3250  		const mapSize = 10
  3251  
  3252  		typeInfo := testTypeInfo{42}
  3253  
  3254  		m, err := NewMap(
  3255  			newTestPersistentStorage(t),
  3256  			Address{1, 2, 3, 4, 5, 6, 7, 8},
  3257  			NewDefaultDigesterBuilder(),
  3258  			typeInfo,
  3259  		)
  3260  		require.NoError(t, err)
  3261  
  3262  		for i := uint64(0); i < mapSize; i++ {
  3263  			storable, err := m.Set(compare, hashInputProvider, Uint64Value(i), Uint64Value(i*10))
  3264  			require.NoError(t, err)
  3265  			require.Nil(t, storable)
  3266  		}
  3267  
  3268  		k := NewStringValue(strings.Repeat("a", int(maxInlineMapElementSize-2)))
  3269  		v := NewStringValue(strings.Repeat("b", int(maxInlineMapElementSize-2)))
  3270  		storable, err := m.Set(compare, hashInputProvider, k, v)
  3271  		require.NoError(t, err)
  3272  		require.Nil(t, storable)
  3273  
  3274  		require.Equal(t, uint64(mapSize+1), m.Count())
  3275  
  3276  		iter, err := m.Iterator()
  3277  		require.NoError(t, err)
  3278  
  3279  		var sortedKeys []Value
  3280  		keyValues := make(map[Value]Value)
  3281  
  3282  		storage := newTestPersistentStorage(t)
  3283  		address := Address{2, 3, 4, 5, 6, 7, 8, 9}
  3284  		digesterBuilder := NewDefaultDigesterBuilder()
  3285  
  3286  		copied, err := NewMapFromBatchData(
  3287  			storage,
  3288  			address,
  3289  			digesterBuilder,
  3290  			m.Type(),
  3291  			compare,
  3292  			hashInputProvider,
  3293  			m.Seed(),
  3294  			func() (Value, Value, error) {
  3295  				k, v, err := iter.Next()
  3296  
  3297  				if k != nil {
  3298  					sortedKeys = append(sortedKeys, k)
  3299  					keyValues[k] = v
  3300  				}
  3301  
  3302  				return k, v, err
  3303  			})
  3304  
  3305  		require.NoError(t, err)
  3306  		require.NotEqual(t, m.StorageID(), copied.StorageID())
  3307  
  3308  		verifyMap(t, storage, typeInfo, address, copied, keyValues, sortedKeys, false)
  3309  	})
  3310  
  3311  	t.Run("merge two data slabs", func(t *testing.T) {
  3312  		SetThreshold(256)
  3313  		defer SetThreshold(1024)
  3314  
  3315  		const mapSize = 8
  3316  
  3317  		typeInfo := testTypeInfo{42}
  3318  
  3319  		m, err := NewMap(
  3320  			newTestPersistentStorage(t),
  3321  			Address{1, 2, 3, 4, 5, 6, 7, 8},
  3322  			NewDefaultDigesterBuilder(),
  3323  			typeInfo,
  3324  		)
  3325  		require.NoError(t, err)
  3326  
  3327  		for i := uint64(0); i < mapSize; i++ {
  3328  			storable, err := m.Set(compare, hashInputProvider, Uint64Value(i), Uint64Value(i*10))
  3329  			require.NoError(t, err)
  3330  			require.Nil(t, storable)
  3331  		}
  3332  
  3333  		storable, err := m.Set(
  3334  			compare,
  3335  			hashInputProvider,
  3336  			NewStringValue(strings.Repeat("b", int(maxInlineMapElementSize-2))),
  3337  			NewStringValue(strings.Repeat("b", int(maxInlineMapElementSize-2))),
  3338  		)
  3339  		require.NoError(t, err)
  3340  		require.Nil(t, storable)
  3341  
  3342  		require.Equal(t, uint64(mapSize+1), m.Count())
  3343  		require.Equal(t, typeInfo, m.Type())
  3344  
  3345  		iter, err := m.Iterator()
  3346  		require.NoError(t, err)
  3347  
  3348  		var sortedKeys []Value
  3349  		keyValues := make(map[Value]Value)
  3350  
  3351  		storage := newTestPersistentStorage(t)
  3352  		address := Address{2, 3, 4, 5, 6, 7, 8, 9}
  3353  		digesterBuilder := NewDefaultDigesterBuilder()
  3354  
  3355  		copied, err := NewMapFromBatchData(
  3356  			storage,
  3357  			address,
  3358  			digesterBuilder,
  3359  			m.Type(),
  3360  			compare,
  3361  			hashInputProvider,
  3362  			m.Seed(),
  3363  			func() (Value, Value, error) {
  3364  				k, v, err := iter.Next()
  3365  
  3366  				if k != nil {
  3367  					sortedKeys = append(sortedKeys, k)
  3368  					keyValues[k] = v
  3369  				}
  3370  
  3371  				return k, v, err
  3372  			})
  3373  
  3374  		require.NoError(t, err)
  3375  		require.NotEqual(t, m.StorageID(), copied.StorageID())
  3376  
  3377  		verifyMap(t, storage, typeInfo, address, copied, keyValues, sortedKeys, false)
  3378  	})
  3379  
  3380  	t.Run("random", func(t *testing.T) {
  3381  		SetThreshold(256)
  3382  		defer SetThreshold(1024)
  3383  
  3384  		const mapSize = 4096
  3385  
  3386  		r := newRand(t)
  3387  
  3388  		typeInfo := testTypeInfo{42}
  3389  
  3390  		m, err := NewMap(
  3391  			newTestPersistentStorage(t),
  3392  			Address{1, 2, 3, 4, 5, 6, 7, 8},
  3393  			NewDefaultDigesterBuilder(),
  3394  			typeInfo,
  3395  		)
  3396  		require.NoError(t, err)
  3397  
  3398  		for m.Count() < mapSize {
  3399  			k := randomValue(r, int(maxInlineMapElementSize))
  3400  			v := randomValue(r, int(maxInlineMapElementSize))
  3401  
  3402  			_, err = m.Set(compare, hashInputProvider, k, v)
  3403  			require.NoError(t, err)
  3404  		}
  3405  
  3406  		require.Equal(t, uint64(mapSize), m.Count())
  3407  
  3408  		iter, err := m.Iterator()
  3409  		require.NoError(t, err)
  3410  
  3411  		storage := newTestPersistentStorage(t)
  3412  		address := Address{2, 3, 4, 5, 6, 7, 8, 9}
  3413  		digesterBuilder := NewDefaultDigesterBuilder()
  3414  
  3415  		var sortedKeys []Value
  3416  		keyValues := make(map[Value]Value, mapSize)
  3417  
  3418  		copied, err := NewMapFromBatchData(
  3419  			storage,
  3420  			address,
  3421  			digesterBuilder,
  3422  			m.Type(),
  3423  			compare,
  3424  			hashInputProvider,
  3425  			m.Seed(),
  3426  			func() (Value, Value, error) {
  3427  				k, v, err := iter.Next()
  3428  
  3429  				if k != nil {
  3430  					sortedKeys = append(sortedKeys, k)
  3431  					keyValues[k] = v
  3432  				}
  3433  
  3434  				return k, v, err
  3435  			})
  3436  
  3437  		require.NoError(t, err)
  3438  		require.NotEqual(t, m.StorageID(), copied.StorageID())
  3439  
  3440  		verifyMap(t, storage, typeInfo, address, copied, keyValues, sortedKeys, false)
  3441  	})
  3442  
  3443  	t.Run("collision", func(t *testing.T) {
  3444  
  3445  		const mapSize = 1024
  3446  
  3447  		SetThreshold(512)
  3448  		defer SetThreshold(1024)
  3449  
  3450  		savedMaxCollisionLimitPerDigest := MaxCollisionLimitPerDigest
  3451  		defer func() {
  3452  			MaxCollisionLimitPerDigest = savedMaxCollisionLimitPerDigest
  3453  		}()
  3454  		MaxCollisionLimitPerDigest = mapSize / 2
  3455  
  3456  		typeInfo := testTypeInfo{42}
  3457  
  3458  		digesterBuilder := &mockDigesterBuilder{}
  3459  
  3460  		m, err := NewMap(
  3461  			newTestPersistentStorage(t),
  3462  			Address{1, 2, 3, 4, 5, 6, 7, 8},
  3463  			digesterBuilder,
  3464  			typeInfo,
  3465  		)
  3466  		require.NoError(t, err)
  3467  
  3468  		for i := uint64(0); i < mapSize; i++ {
  3469  
  3470  			k, v := Uint64Value(i), Uint64Value(i*10)
  3471  
  3472  			digests := make([]Digest, 2)
  3473  			if i%2 == 0 {
  3474  				digests[0] = 0
  3475  			} else {
  3476  				digests[0] = Digest(i % (mapSize / 2))
  3477  			}
  3478  			digests[1] = Digest(i)
  3479  
  3480  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  3481  
  3482  			storable, err := m.Set(compare, hashInputProvider, k, v)
  3483  			require.NoError(t, err)
  3484  			require.Nil(t, storable)
  3485  		}
  3486  
  3487  		require.Equal(t, uint64(mapSize), m.Count())
  3488  
  3489  		iter, err := m.Iterator()
  3490  		require.NoError(t, err)
  3491  
  3492  		var sortedKeys []Value
  3493  		keyValues := make(map[Value]Value)
  3494  
  3495  		storage := newTestPersistentStorage(t)
  3496  		address := Address{2, 3, 4, 5, 6, 7, 8, 9}
  3497  
  3498  		i := 0
  3499  		copied, err := NewMapFromBatchData(
  3500  			storage,
  3501  			address,
  3502  			digesterBuilder,
  3503  			m.Type(),
  3504  			compare,
  3505  			hashInputProvider,
  3506  			m.Seed(),
  3507  			func() (Value, Value, error) {
  3508  				k, v, err := iter.Next()
  3509  
  3510  				if k != nil {
  3511  					sortedKeys = append(sortedKeys, k)
  3512  					keyValues[k] = v
  3513  				}
  3514  
  3515  				i++
  3516  				return k, v, err
  3517  			})
  3518  
  3519  		require.NoError(t, err)
  3520  		require.NotEqual(t, m.StorageID(), copied.StorageID())
  3521  
  3522  		verifyMap(t, storage, typeInfo, address, copied, keyValues, sortedKeys, false)
  3523  	})
  3524  
  3525  	t.Run("data slab too large", func(t *testing.T) {
  3526  		// Slab size must not exceed maxThreshold.
  3527  		// We cannot make this problem happen after Atree Issue #193
  3528  		// was fixed by PR #194 & PR #197. This test is to catch regressions.
  3529  
  3530  		SetThreshold(256)
  3531  		defer SetThreshold(1024)
  3532  
  3533  		r := newRand(t)
  3534  
  3535  		maxStringSize := int(MaxInlineMapKeyOrValueSize - 2)
  3536  
  3537  		typeInfo := testTypeInfo{42}
  3538  
  3539  		digesterBuilder := &mockDigesterBuilder{}
  3540  
  3541  		m, err := NewMap(
  3542  			newTestPersistentStorage(t),
  3543  			Address{1, 2, 3, 4, 5, 6, 7, 8},
  3544  			digesterBuilder,
  3545  			typeInfo,
  3546  		)
  3547  		require.NoError(t, err)
  3548  
  3549  		k := NewStringValue(randStr(r, maxStringSize))
  3550  		v := NewStringValue(randStr(r, maxStringSize))
  3551  		digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{3881892766069237908}})
  3552  
  3553  		storable, err := m.Set(compare, hashInputProvider, k, v)
  3554  		require.NoError(t, err)
  3555  		require.Nil(t, storable)
  3556  
  3557  		k = NewStringValue(randStr(r, maxStringSize))
  3558  		v = NewStringValue(randStr(r, maxStringSize))
  3559  		digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{3882976639190041664}})
  3560  
  3561  		storable, err = m.Set(compare, hashInputProvider, k, v)
  3562  		require.NoError(t, err)
  3563  		require.Nil(t, storable)
  3564  
  3565  		k = NewStringValue("zFKUYYNfIfJCCakcDuIEHj")
  3566  		v = NewStringValue("EZbaCxxjDtMnbRlXJMgfHnZ")
  3567  		digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{3883321011075439822}})
  3568  
  3569  		storable, err = m.Set(compare, hashInputProvider, k, v)
  3570  		require.NoError(t, err)
  3571  		require.Nil(t, storable)
  3572  
  3573  		iter, err := m.Iterator()
  3574  		require.NoError(t, err)
  3575  
  3576  		var sortedKeys []Value
  3577  		keyValues := make(map[Value]Value)
  3578  
  3579  		storage := newTestPersistentStorage(t)
  3580  		address := Address{2, 3, 4, 5, 6, 7, 8, 9}
  3581  
  3582  		copied, err := NewMapFromBatchData(
  3583  			storage,
  3584  			address,
  3585  			digesterBuilder,
  3586  			m.Type(),
  3587  			compare,
  3588  			hashInputProvider,
  3589  			m.Seed(),
  3590  			func() (Value, Value, error) {
  3591  				k, v, err := iter.Next()
  3592  
  3593  				if k != nil {
  3594  					sortedKeys = append(sortedKeys, k)
  3595  					keyValues[k] = v
  3596  				}
  3597  
  3598  				return k, v, err
  3599  			})
  3600  
  3601  		require.NoError(t, err)
  3602  		require.NotEqual(t, m.StorageID(), copied.StorageID())
  3603  
  3604  		verifyMap(t, storage, typeInfo, address, copied, keyValues, sortedKeys, false)
  3605  	})
  3606  }
  3607  
  3608  func TestMapNestedStorables(t *testing.T) {
  3609  
  3610  	t.Run("SomeValue", func(t *testing.T) {
  3611  
  3612  		const mapSize = 4096
  3613  
  3614  		typeInfo := testTypeInfo{42}
  3615  		storage := newTestPersistentStorage(t)
  3616  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3617  
  3618  		m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo)
  3619  		require.NoError(t, err)
  3620  
  3621  		keyValues := make(map[Value]Value)
  3622  		for i := uint64(0); i < mapSize; i++ {
  3623  
  3624  			ks := strings.Repeat("a", int(i))
  3625  			k := SomeValue{Value: NewStringValue(ks)}
  3626  
  3627  			vs := strings.Repeat("b", int(i))
  3628  			v := SomeValue{Value: NewStringValue(vs)}
  3629  
  3630  			keyValues[k] = v
  3631  
  3632  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  3633  			require.NoError(t, err)
  3634  			require.Nil(t, existingStorable)
  3635  		}
  3636  
  3637  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, true)
  3638  	})
  3639  
  3640  	t.Run("Array", func(t *testing.T) {
  3641  
  3642  		const mapSize = 4096
  3643  
  3644  		typeInfo := testTypeInfo{42}
  3645  		storage := newTestPersistentStorage(t)
  3646  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3647  
  3648  		m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo)
  3649  		require.NoError(t, err)
  3650  
  3651  		keyValues := make(map[Value]Value)
  3652  		for i := uint64(0); i < mapSize; i++ {
  3653  
  3654  			// Create a nested array with one element
  3655  			array, err := NewArray(storage, address, typeInfo)
  3656  			require.NoError(t, err)
  3657  
  3658  			vs := strings.Repeat("b", int(i))
  3659  			v := SomeValue{Value: NewStringValue(vs)}
  3660  
  3661  			err = array.Append(v)
  3662  			require.NoError(t, err)
  3663  
  3664  			// Insert nested array into map
  3665  			ks := strings.Repeat("a", int(i))
  3666  			k := SomeValue{Value: NewStringValue(ks)}
  3667  
  3668  			keyValues[k] = array
  3669  
  3670  			existingStorable, err := m.Set(compare, hashInputProvider, k, array)
  3671  			require.NoError(t, err)
  3672  			require.Nil(t, existingStorable)
  3673  		}
  3674  
  3675  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, true)
  3676  	})
  3677  }
  3678  
  3679  func TestMapMaxInlineElement(t *testing.T) {
  3680  	t.Parallel()
  3681  
  3682  	r := newRand(t)
  3683  	maxStringSize := int(MaxInlineMapKeyOrValueSize - 2)
  3684  	typeInfo := testTypeInfo{42}
  3685  	storage := newTestPersistentStorage(t)
  3686  	address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3687  
  3688  	m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo)
  3689  	require.NoError(t, err)
  3690  
  3691  	keyValues := make(map[Value]Value)
  3692  	for len(keyValues) < 2 {
  3693  		// String length is MaxInlineMapElementSize - 2 to account for string encoding overhead.
  3694  		k := NewStringValue(randStr(r, maxStringSize))
  3695  		v := NewStringValue(randStr(r, maxStringSize))
  3696  		keyValues[k] = v
  3697  
  3698  		_, err := m.Set(compare, hashInputProvider, k, v)
  3699  		require.NoError(t, err)
  3700  	}
  3701  
  3702  	require.True(t, m.root.IsData())
  3703  
  3704  	// Size of root data slab with two elements (key+value pairs) of
  3705  	// max inlined size is target slab size minus
  3706  	// storage id size (next storage id is omitted in root slab)
  3707  	require.Equal(t, targetThreshold-storageIDSize, uint64(m.root.Header().size))
  3708  
  3709  	verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  3710  }
  3711  
  3712  func TestMapString(t *testing.T) {
  3713  
  3714  	SetThreshold(256)
  3715  	defer SetThreshold(1024)
  3716  
  3717  	t.Run("small", func(t *testing.T) {
  3718  		const mapSize = 3
  3719  
  3720  		digesterBuilder := &mockDigesterBuilder{}
  3721  		typeInfo := testTypeInfo{42}
  3722  		storage := newTestPersistentStorage(t)
  3723  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3724  
  3725  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  3726  		require.NoError(t, err)
  3727  
  3728  		for i := uint64(0); i < mapSize; i++ {
  3729  			k := Uint64Value(i)
  3730  			v := Uint64Value(i)
  3731  			digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{Digest(i)}})
  3732  
  3733  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  3734  			require.NoError(t, err)
  3735  			require.Nil(t, existingStorable)
  3736  		}
  3737  
  3738  		want := `[0:0 1:1 2:2]`
  3739  		require.Equal(t, want, m.String())
  3740  	})
  3741  
  3742  	t.Run("large", func(t *testing.T) {
  3743  		const mapSize = 30
  3744  
  3745  		digesterBuilder := &mockDigesterBuilder{}
  3746  		typeInfo := testTypeInfo{42}
  3747  		storage := newTestPersistentStorage(t)
  3748  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3749  
  3750  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  3751  		require.NoError(t, err)
  3752  
  3753  		for i := uint64(0); i < mapSize; i++ {
  3754  			k := Uint64Value(i)
  3755  			v := Uint64Value(i)
  3756  			digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{Digest(i)}})
  3757  
  3758  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  3759  			require.NoError(t, err)
  3760  			require.Nil(t, existingStorable)
  3761  		}
  3762  
  3763  		want := `[0:0 1:1 2:2 3:3 4:4 5:5 6:6 7:7 8:8 9:9 10:10 11:11 12:12 13:13 14:14 15:15 16:16 17:17 18:18 19:19 20:20 21:21 22:22 23:23 24:24 25:25 26:26 27:27 28:28 29:29]`
  3764  		require.Equal(t, want, m.String())
  3765  	})
  3766  }
  3767  
  3768  func TestMapSlabDump(t *testing.T) {
  3769  
  3770  	SetThreshold(256)
  3771  	defer SetThreshold(1024)
  3772  
  3773  	t.Run("small", func(t *testing.T) {
  3774  		const mapSize = 3
  3775  
  3776  		digesterBuilder := &mockDigesterBuilder{}
  3777  		typeInfo := testTypeInfo{42}
  3778  		storage := newTestPersistentStorage(t)
  3779  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3780  
  3781  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  3782  		require.NoError(t, err)
  3783  
  3784  		for i := uint64(0); i < mapSize; i++ {
  3785  			k := Uint64Value(i)
  3786  			v := Uint64Value(i)
  3787  			digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{Digest(i)}})
  3788  
  3789  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  3790  			require.NoError(t, err)
  3791  			require.Nil(t, existingStorable)
  3792  		}
  3793  
  3794  		want := []string{
  3795  			"level 1, MapDataSlab id:0x102030405060708.1 size:67 firstkey:0 elements: [0:0:0 1:1:1 2:2:2]",
  3796  		}
  3797  		dumps, err := DumpMapSlabs(m)
  3798  		require.NoError(t, err)
  3799  		require.Equal(t, want, dumps)
  3800  	})
  3801  
  3802  	t.Run("large", func(t *testing.T) {
  3803  		const mapSize = 30
  3804  
  3805  		digesterBuilder := &mockDigesterBuilder{}
  3806  		typeInfo := testTypeInfo{42}
  3807  		storage := newTestPersistentStorage(t)
  3808  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3809  
  3810  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  3811  		require.NoError(t, err)
  3812  
  3813  		for i := uint64(0); i < mapSize; i++ {
  3814  			k := Uint64Value(i)
  3815  			v := Uint64Value(i)
  3816  			digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{Digest(i)}})
  3817  
  3818  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  3819  			require.NoError(t, err)
  3820  			require.Nil(t, existingStorable)
  3821  		}
  3822  
  3823  		want := []string{
  3824  			"level 1, MapMetaDataSlab id:0x102030405060708.1 size:60 firstKey:0 children: [{id:0x102030405060708.2 size:233 firstKey:0} {id:0x102030405060708.3 size:305 firstKey:13}]",
  3825  			"level 2, MapDataSlab id:0x102030405060708.2 size:233 firstkey:0 elements: [0:0:0 1:1:1 2:2:2 3:3:3 4:4:4 5:5:5 6:6:6 7:7:7 8:8:8 9:9:9 10:10:10 11:11:11 12:12:12]",
  3826  			"level 2, MapDataSlab id:0x102030405060708.3 size:305 firstkey:13 elements: [13:13:13 14:14:14 15:15:15 16:16:16 17:17:17 18:18:18 19:19:19 20:20:20 21:21:21 22:22:22 23:23:23 24:24:24 25:25:25 26:26:26 27:27:27 28:28:28 29:29:29]",
  3827  		}
  3828  		dumps, err := DumpMapSlabs(m)
  3829  		require.NoError(t, err)
  3830  		require.Equal(t, want, dumps)
  3831  	})
  3832  
  3833  	t.Run("inline collision", func(t *testing.T) {
  3834  		const mapSize = 30
  3835  
  3836  		digesterBuilder := &mockDigesterBuilder{}
  3837  		typeInfo := testTypeInfo{42}
  3838  		storage := newTestPersistentStorage(t)
  3839  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3840  
  3841  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  3842  		require.NoError(t, err)
  3843  
  3844  		for i := uint64(0); i < mapSize; i++ {
  3845  			k := Uint64Value(i)
  3846  			v := Uint64Value(i)
  3847  			digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{Digest(i % 10)}})
  3848  
  3849  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  3850  			require.NoError(t, err)
  3851  			require.Nil(t, existingStorable)
  3852  		}
  3853  
  3854  		want := []string{
  3855  			"level 1, MapMetaDataSlab id:0x102030405060708.1 size:60 firstKey:0 children: [{id:0x102030405060708.2 size:255 firstKey:0} {id:0x102030405060708.3 size:263 firstKey:5}]",
  3856  			"level 2, MapDataSlab id:0x102030405060708.2 size:255 firstkey:0 elements: [0:inline[:0:0 :10:10 :20:20] 1:inline[:1:1 :11:11 :21:21] 2:inline[:2:2 :12:12 :22:22] 3:inline[:3:3 :13:13 :23:23] 4:inline[:4:4 :14:14 :24:24]]",
  3857  			"level 2, MapDataSlab id:0x102030405060708.3 size:263 firstkey:5 elements: [5:inline[:5:5 :15:15 :25:25] 6:inline[:6:6 :16:16 :26:26] 7:inline[:7:7 :17:17 :27:27] 8:inline[:8:8 :18:18 :28:28] 9:inline[:9:9 :19:19 :29:29]]",
  3858  		}
  3859  		dumps, err := DumpMapSlabs(m)
  3860  		require.NoError(t, err)
  3861  		require.Equal(t, want, dumps)
  3862  	})
  3863  
  3864  	t.Run("external collision", func(t *testing.T) {
  3865  		const mapSize = 30
  3866  
  3867  		digesterBuilder := &mockDigesterBuilder{}
  3868  		typeInfo := testTypeInfo{42}
  3869  		storage := newTestPersistentStorage(t)
  3870  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3871  
  3872  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  3873  		require.NoError(t, err)
  3874  
  3875  		for i := uint64(0); i < mapSize; i++ {
  3876  			k := Uint64Value(i)
  3877  			v := Uint64Value(i)
  3878  			digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{Digest(i % 2)}})
  3879  
  3880  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  3881  			require.NoError(t, err)
  3882  			require.Nil(t, existingStorable)
  3883  		}
  3884  
  3885  		want := []string{
  3886  			"level 1, MapDataSlab id:0x102030405060708.1 size:80 firstkey:0 elements: [0:external(0x102030405060708.2) 1:external(0x102030405060708.3)]",
  3887  			"collision: MapDataSlab id:0x102030405060708.2 size:141 firstkey:0 elements: [:0:0 :2:2 :4:4 :6:6 :8:8 :10:10 :12:12 :14:14 :16:16 :18:18 :20:20 :22:22 :24:24 :26:26 :28:28]",
  3888  			"collision: MapDataSlab id:0x102030405060708.3 size:141 firstkey:0 elements: [:1:1 :3:3 :5:5 :7:7 :9:9 :11:11 :13:13 :15:15 :17:17 :19:19 :21:21 :23:23 :25:25 :27:27 :29:29]",
  3889  		}
  3890  		dumps, err := DumpMapSlabs(m)
  3891  		require.NoError(t, err)
  3892  		require.Equal(t, want, dumps)
  3893  	})
  3894  
  3895  	t.Run("overflow", func(t *testing.T) {
  3896  
  3897  		digesterBuilder := &mockDigesterBuilder{}
  3898  		typeInfo := testTypeInfo{42}
  3899  		storage := newTestPersistentStorage(t)
  3900  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3901  
  3902  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  3903  		require.NoError(t, err)
  3904  
  3905  		k := NewStringValue(strings.Repeat("a", int(MaxInlineMapKeyOrValueSize)))
  3906  		v := NewStringValue(strings.Repeat("b", int(MaxInlineMapKeyOrValueSize)))
  3907  		digesterBuilder.On("Digest", k).Return(mockDigester{d: []Digest{Digest(0)}})
  3908  
  3909  		existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  3910  		require.NoError(t, err)
  3911  		require.Nil(t, existingStorable)
  3912  
  3913  		want := []string{
  3914  			"level 1, MapDataSlab id:0x102030405060708.1 size:69 firstkey:0 elements: [0:StorageIDStorable({[1 2 3 4 5 6 7 8] [0 0 0 0 0 0 0 2]}):StorageIDStorable({[1 2 3 4 5 6 7 8] [0 0 0 0 0 0 0 3]})]",
  3915  			"overflow: &{0x102030405060708.2 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa}",
  3916  			"overflow: &{0x102030405060708.3 bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb}",
  3917  		}
  3918  		dumps, err := DumpMapSlabs(m)
  3919  		require.NoError(t, err)
  3920  		require.Equal(t, want, dumps)
  3921  	})
  3922  }
  3923  
  3924  func TestMaxCollisionLimitPerDigest(t *testing.T) {
  3925  	savedMaxCollisionLimitPerDigest := MaxCollisionLimitPerDigest
  3926  	defer func() {
  3927  		MaxCollisionLimitPerDigest = savedMaxCollisionLimitPerDigest
  3928  	}()
  3929  
  3930  	t.Run("collision limit 0", func(t *testing.T) {
  3931  		const mapSize = 1024
  3932  
  3933  		SetThreshold(256)
  3934  		defer SetThreshold(1024)
  3935  
  3936  		// Set noncryptographic hash collision limit as 0,
  3937  		// meaning no collision is allowed at first level.
  3938  		MaxCollisionLimitPerDigest = uint32(0)
  3939  
  3940  		digesterBuilder := &mockDigesterBuilder{}
  3941  		keyValues := make(map[Value]Value, mapSize)
  3942  		for i := uint64(0); i < mapSize; i++ {
  3943  			k := Uint64Value(i)
  3944  			v := Uint64Value(i)
  3945  			keyValues[k] = v
  3946  
  3947  			digests := []Digest{Digest(i)}
  3948  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  3949  		}
  3950  
  3951  		typeInfo := testTypeInfo{42}
  3952  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  3953  		storage := newTestPersistentStorage(t)
  3954  
  3955  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  3956  		require.NoError(t, err)
  3957  
  3958  		// Insert elements within collision limits
  3959  		for k, v := range keyValues {
  3960  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  3961  			require.NoError(t, err)
  3962  			require.Nil(t, existingStorable)
  3963  		}
  3964  
  3965  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  3966  
  3967  		// Insert elements exceeding collision limits
  3968  		collisionKeyValues := make(map[Value]Value, mapSize)
  3969  		for i := uint64(0); i < mapSize; i++ {
  3970  			k := Uint64Value(mapSize + i)
  3971  			v := Uint64Value(mapSize + i)
  3972  			collisionKeyValues[k] = v
  3973  
  3974  			digests := []Digest{Digest(i)}
  3975  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  3976  		}
  3977  
  3978  		for k, v := range collisionKeyValues {
  3979  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  3980  			require.Equal(t, 1, errorCategorizationCount(err))
  3981  			var fatalError *FatalError
  3982  			var collisionLimitError *CollisionLimitError
  3983  			require.ErrorAs(t, err, &fatalError)
  3984  			require.ErrorAs(t, err, &collisionLimitError)
  3985  			require.ErrorAs(t, fatalError, &collisionLimitError)
  3986  			require.Nil(t, existingStorable)
  3987  		}
  3988  
  3989  		// Verify that no new elements exceeding collision limit inserted
  3990  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  3991  
  3992  		// Update elements within collision limits
  3993  		for k := range keyValues {
  3994  			v := Uint64Value(0)
  3995  			keyValues[k] = v
  3996  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  3997  			require.NoError(t, err)
  3998  			require.NotNil(t, existingStorable)
  3999  		}
  4000  
  4001  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  4002  	})
  4003  
  4004  	t.Run("collision limit > 0", func(t *testing.T) {
  4005  		const mapSize = 1024
  4006  
  4007  		SetThreshold(256)
  4008  		defer SetThreshold(1024)
  4009  
  4010  		// Set noncryptographic hash collision limit as 7,
  4011  		// meaning at most 8 elements in collision group per digest at first level.
  4012  		MaxCollisionLimitPerDigest = uint32(7)
  4013  
  4014  		digesterBuilder := &mockDigesterBuilder{}
  4015  		keyValues := make(map[Value]Value, mapSize)
  4016  		for i := uint64(0); i < mapSize; i++ {
  4017  			k := Uint64Value(i)
  4018  			v := Uint64Value(i)
  4019  			keyValues[k] = v
  4020  
  4021  			digests := []Digest{Digest(i % 128)}
  4022  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  4023  		}
  4024  
  4025  		typeInfo := testTypeInfo{42}
  4026  		address := Address{1, 2, 3, 4, 5, 6, 7, 8}
  4027  		storage := newTestPersistentStorage(t)
  4028  
  4029  		m, err := NewMap(storage, address, digesterBuilder, typeInfo)
  4030  		require.NoError(t, err)
  4031  
  4032  		// Insert elements within collision limits
  4033  		for k, v := range keyValues {
  4034  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  4035  			require.NoError(t, err)
  4036  			require.Nil(t, existingStorable)
  4037  		}
  4038  
  4039  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  4040  
  4041  		// Insert elements exceeding collision limits
  4042  		collisionKeyValues := make(map[Value]Value, mapSize)
  4043  		for i := uint64(0); i < mapSize; i++ {
  4044  			k := Uint64Value(mapSize + i)
  4045  			v := Uint64Value(mapSize + i)
  4046  			collisionKeyValues[k] = v
  4047  
  4048  			digests := []Digest{Digest(i % 128)}
  4049  			digesterBuilder.On("Digest", k).Return(mockDigester{digests})
  4050  		}
  4051  
  4052  		for k, v := range collisionKeyValues {
  4053  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  4054  			require.Equal(t, 1, errorCategorizationCount(err))
  4055  			var fatalError *FatalError
  4056  			var collisionLimitError *CollisionLimitError
  4057  			require.ErrorAs(t, err, &fatalError)
  4058  			require.ErrorAs(t, err, &collisionLimitError)
  4059  			require.ErrorAs(t, fatalError, &collisionLimitError)
  4060  			require.Nil(t, existingStorable)
  4061  		}
  4062  
  4063  		// Verify that no new elements exceeding collision limit inserted
  4064  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  4065  
  4066  		// Update elements within collision limits
  4067  		for k := range keyValues {
  4068  			v := Uint64Value(0)
  4069  			keyValues[k] = v
  4070  			existingStorable, err := m.Set(compare, hashInputProvider, k, v)
  4071  			require.NoError(t, err)
  4072  			require.NotNil(t, existingStorable)
  4073  		}
  4074  
  4075  		verifyMap(t, storage, typeInfo, address, m, keyValues, nil, false)
  4076  	})
  4077  }