github.com/benz9527/xboot@v0.0.0-20240504061247-c23f15593274/lib/list/x_arena_skl.go (about)

     1  package list
     2  
     3  import (
     4  	"sync/atomic"
     5  	"unsafe"
     6  
     7  	"github.com/benz9527/xboot/lib/id"
     8  	"github.com/benz9527/xboot/lib/infra"
     9  )
    10  
    11  var _ SkipList[uint8, uint8] = (*xArenaSkl[uint8, uint8])(nil)
    12  
    13  type xArenaSkl[K infra.OrderedKey, V any] struct {
    14  	head       *xArenaSklElement[K, V]
    15  	arena      *autoGrowthArena[xArenaSklNode[K, V]] // recycle resources
    16  	kcmp       infra.OrderedKeyComparator[K]         // key comparator
    17  	rand       SklRand
    18  	optVer     id.UUIDGen // optimistic version generator
    19  	nodeLen    int64      // skip-list's node count.
    20  	indexCount uint64     // skip-list's index count.
    21  	levels     int32      // skip-list's max height value inside the indexCount.
    22  }
    23  
    24  func (skl *xArenaSkl[K, V]) atomicLoadHead() *xArenaSklElement[K, V] {
    25  	return (*xArenaSklElement[K, V])(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(&skl.head))))
    26  }
    27  
    28  // traverse locates the target key and store the nodes encountered during the indexCount traversal.
    29  func (skl *xArenaSkl[K, V]) traverse(
    30  	lvl int32,
    31  	key K,
    32  	aux []*xArenaSklNode[K, V],
    33  ) *xArenaSklNode[K, V] {
    34  	for /* vertical */ forward, l := skl.atomicLoadHead().nodeRef, lvl-1; l >= 0; l-- {
    35  		nIdx := forward.atomicLoadNextNode(l)
    36  		for /* horizontal */ nIdx != nil {
    37  			if res := skl.kcmp(key, nIdx.elementRef.key); /* horizontal next */ res > 0 {
    38  				forward = nIdx
    39  				nIdx = forward.atomicLoadNextNode(l)
    40  			} else if /* found */ res == 0 {
    41  				aux[l] = forward          /* pred */
    42  				aux[sklMaxLevel+l] = nIdx /* succ */
    43  				return nIdx
    44  			} else /* not found, vertical next */ {
    45  				break
    46  			}
    47  		}
    48  
    49  		aux[l] = forward          /* pred */
    50  		aux[sklMaxLevel+l] = nIdx /* succ */
    51  	}
    52  	return nil
    53  }
    54  
    55  // rmTraverse locates the remove target key and stores the nodes encountered
    56  // during the indices traversal.
    57  // Returns with the target key found level index.
    58  func (skl *xArenaSkl[K, V]) rmTraverse(
    59  	weight K,
    60  	aux []*xArenaSklNode[K, V],
    61  ) (foundAt int32) {
    62  	// foundAt represents the index of the first layer at which it found a node.
    63  	foundAt = -1
    64  	forward := skl.atomicLoadHead().nodeRef
    65  	for /* vertical */ l := skl.Levels() - 1; l >= 0; l-- {
    66  		nIdx := forward.atomicLoadNextNode(l)
    67  		for /* horizontal */ nIdx != nil && skl.kcmp(weight, nIdx.elementRef.key) > 0 {
    68  			forward = nIdx
    69  			nIdx = forward.atomicLoadNextNode(l)
    70  		}
    71  
    72  		aux[l] = forward
    73  		aux[sklMaxLevel+l] = nIdx
    74  
    75  		if foundAt == -1 && nIdx != nil && skl.kcmp(weight, nIdx.elementRef.key) == 0 {
    76  			foundAt = l
    77  		}
    78  		// Downward to next level.
    79  	}
    80  	return
    81  }
    82  
    83  // Classic Skip-List basic APIs
    84  
    85  // Len skip-list's node count.
    86  func (skl *xArenaSkl[K, V]) Len() int64 {
    87  	return atomic.LoadInt64(&skl.nodeLen)
    88  }
    89  
    90  func (skl *xArenaSkl[K, V]) IndexCount() uint64 {
    91  	return atomic.LoadUint64(&skl.indexCount)
    92  }
    93  
    94  // Levels skip-list's max height value inside the indexCount.
    95  func (skl *xArenaSkl[K, V]) Levels() int32 {
    96  	return atomic.LoadInt32(&skl.levels)
    97  }
    98  
    99  // Insert add the val by a key into skip-list.
   100  // Only works for unique element skip-list.
   101  func (skl *xArenaSkl[K, V]) Insert(key K, val V, ifNotPresent ...bool) error {
   102  	if skl.Len() >= sklMaxSize {
   103  		return ErrXSklIsFull
   104  	}
   105  
   106  	var (
   107  		aux     = make([]*xArenaSklNode[K, V], 2*sklMaxLevel)
   108  		oldLvls = skl.Levels()
   109  		newLvls = skl.rand(int(oldLvls), skl.Len()) // avoid loop call
   110  		ver     = skl.optVer.Number()
   111  	)
   112  
   113  	if len(ifNotPresent) <= 0 {
   114  		ifNotPresent = insertReplaceDisabled
   115  	}
   116  
   117  	for {
   118  		if node := skl.traverse(max(oldLvls, newLvls), key, aux); node != nil {
   119  			if /* conc rm */ atomicIsSet(&node.flags, nodeRemovingFlagBit) {
   120  				continue
   121  			} else if /* conc d-check */ skl.Len() >= sklMaxSize {
   122  				return ErrXSklIsFull
   123  			}
   124  
   125  			if ifNotPresent[0] {
   126  				return ErrXSklDisabledValReplace
   127  			}
   128  			node.elementRef.val.Store(val)
   129  			atomic.AddInt64(&skl.nodeLen, 1)
   130  			return nil
   131  		}
   132  		// Node not present. Add this node into skip list.
   133  		var (
   134  			pred, succ, prev *xArenaSklNode[K, V]
   135  			isValid          = true
   136  			lockedLevels     = int32(-1)
   137  		)
   138  		for l := int32(0); isValid && l < newLvls; l++ {
   139  			pred, succ = aux[l], aux[sklMaxLevel+l]
   140  			if /* lock */ pred != prev {
   141  				pred.lock(ver)
   142  				lockedLevels = l
   143  				prev = pred
   144  			}
   145  			// Check indexCount and data node:
   146  			//      +------+       +------+      +------+
   147  			// ...  | pred |------>|  new |----->| succ | ...
   148  			//      +------+       +------+      +------+
   149  			// 1. Both the pred and succ isn't removing.
   150  			// 2. The pred's next node is the succ in this level.
   151  			isValid = !atomicIsSet(&pred.flags, nodeRemovingFlagBit) &&
   152  				(succ == nil || !atomicIsSet(&succ.flags, nodeRemovingFlagBit)) &&
   153  				pred.atomicLoadNextNode(l) == succ
   154  		}
   155  		if /* conc insert */ !isValid {
   156  			unlockArenaNodes(ver, lockedLevels, aux[0:sklMaxLevel]...)
   157  			continue
   158  		} else if /* conc d-check */ skl.Len() >= sklMaxSize {
   159  			unlockArenaNodes(ver, lockedLevels, aux[0:sklMaxLevel]...)
   160  			return ErrXSklIsFull
   161  		}
   162  		// node := skl.arena.allocateXConcSklNode(uint32(newLvls))
   163  		// node.init(key, val, skl.loadXNodeMode(), skl.vcmp, skl.arena.xNodeArena)
   164  		e := newXArenaSklDataElement[K, V](key, val, uint32(newLvls), skl.arena)
   165  		e.prev = aux[0].elementRef
   166  		aux[0].elementRef.next = e
   167  		if aux[sklMaxLevel] != nil {
   168  			e.next = aux[sklMaxLevel].elementRef
   169  			aux[sklMaxLevel].elementRef.prev = e
   170  		}
   171  		for /* linking */ l := int32(0); l < newLvls; l++ {
   172  			//      +------+       +------+      +------+
   173  			// ...  | pred |------>|  new |----->| succ | ...
   174  			//      +------+       +------+      +------+
   175  			e.nodeRef.storeNextNode(l, aux[sklMaxLevel+l]) // Useless to use atomic here.
   176  			aux[l].atomicStoreNextNode(l, e.nodeRef)       // Memory barrier, concurrency safety.
   177  		}
   178  		atomicSet(&e.nodeRef.flags, nodeInsertedFlagBit)
   179  		if oldLvls = skl.Levels(); oldLvls < newLvls {
   180  			atomic.StoreInt32(&skl.levels, newLvls)
   181  		}
   182  
   183  		unlockArenaNodes(ver, lockedLevels, aux[0:sklMaxLevel]...)
   184  		atomic.AddInt64(&skl.nodeLen, 1)
   185  		atomic.AddUint64(&skl.indexCount, uint64(newLvls))
   186  		return nil
   187  	}
   188  }
   189  
   190  // Foreach iterates each node (xNode within the node) by pass in function.
   191  // Once the function return false, the iteration should be stopped.
   192  // This function doesn't guarantee correctness in the case of concurrent
   193  // reads and writes.
   194  func (skl *xArenaSkl[K, V]) Foreach(action func(i int64, item SklIterationItem[K, V]) bool) {
   195  	i := int64(0)
   196  	item := &xSklIter[K, V]{}
   197  	forward := skl.atomicLoadHead().nodeRef.atomicLoadNextNode(0)
   198  	for forward != nil {
   199  		if !atomicAreEqual(&forward.flags, nodeInsertedFlagBit|nodeRemovingFlagBit, insertFullyLinked) {
   200  			forward = forward.atomicLoadNextNode(0)
   201  			continue
   202  		}
   203  		item.nodeLevelFn = func() uint32 {
   204  			return atomic.LoadUint32(&forward.level)
   205  		}
   206  		item.nodeItemCountFn = func() int64 {
   207  			return atomic.LoadInt64(&forward.count)
   208  		}
   209  		item.keyFn = func() K {
   210  			return forward.elementRef.key
   211  		}
   212  		item.valFn = func() V {
   213  			ele := forward.elementRef
   214  			if ele == nil {
   215  				return *new(V)
   216  			}
   217  			return ele.val.Load().(V)
   218  		}
   219  		if res := action(i, item); !res {
   220  			break
   221  		}
   222  		forward = forward.atomicLoadNextNode(0)
   223  		i++
   224  	}
   225  }
   226  
   227  // LoadFirst returns the first value stored in the skip-list for a key,
   228  // or nil if no val is present.
   229  func (skl *xArenaSkl[K, V]) LoadFirst(key K) (element SklElement[K, V], err error) {
   230  	if skl.Len() <= 0 {
   231  		return nil, ErrXSklIsEmpty
   232  	}
   233  
   234  	forward := skl.atomicLoadHead().nodeRef
   235  	for /* vertical */ l := skl.Levels() - 1; l >= 0; l-- {
   236  		nIdx := forward.atomicLoadNextNode(l)
   237  		for /* horizontal */ nIdx != nil && skl.kcmp(key, nIdx.elementRef.key) > 0 {
   238  			forward = nIdx
   239  			nIdx = forward.atomicLoadNextNode(l)
   240  		}
   241  
   242  		if /* found */ nIdx != nil && skl.kcmp(key, nIdx.elementRef.key) == 0 {
   243  			if atomicAreEqual(&nIdx.flags, nodeInsertedFlagBit|nodeRemovingFlagBit, insertFullyLinked) {
   244  				if /* conc rw empty */ atomic.LoadInt64(&nIdx.count) <= 0 {
   245  					return nil, ErrXSklConcRWLoadEmpty
   246  				}
   247  				if x := nIdx.elementRef; x == nil {
   248  					return nil, ErrXSklConcRWLoadEmpty
   249  				} else {
   250  					return &xSklElement[K, V]{
   251  						key: key,
   252  						val: x.val.Load().(V),
   253  					}, nil
   254  				}
   255  			}
   256  			return nil, ErrXSklConcRWLoadFailed
   257  		}
   258  	}
   259  	return nil, ErrXSklNotFound
   260  }
   261  
   262  // RemoveFirst deletes the val for a key, only the first value.
   263  func (skl *xArenaSkl[K, V]) RemoveFirst(key K) (element SklElement[K, V], err error) {
   264  	if skl.Len() <= 0 {
   265  		return nil, ErrXSklIsEmpty
   266  	}
   267  
   268  	var (
   269  		aux      = make([]*xArenaSklNode[K, V], 2*sklMaxLevel)
   270  		rmNode   *xArenaSklNode[K, V]
   271  		isMarked bool // represents if this operation mark the node
   272  		topLevel = int32(-1)
   273  		ver      = skl.optVer.Number()
   274  		foundAt  = int32(-1)
   275  	)
   276  
   277  	for {
   278  		foundAt = skl.rmTraverse(key, aux)
   279  		if isMarked || foundAt != -1 &&
   280  			atomicAreEqual(&aux[sklMaxLevel+foundAt].flags, nodeInsertedFlagBit|nodeRemovingFlagBit, insertFullyLinked) &&
   281  			(int32(aux[sklMaxLevel+foundAt].level)-1) == foundAt {
   282  			if !isMarked {
   283  				rmNode = aux[sklMaxLevel+foundAt]
   284  				topLevel = foundAt
   285  				if !rmNode.tryLock(ver) {
   286  					if /* d-check */ atomicIsSet(&rmNode.flags, nodeRemovingFlagBit) {
   287  						return nil, ErrXSklConcRemoveTryLock
   288  					}
   289  					isMarked = false
   290  					continue
   291  				}
   292  
   293  				if /* node locked, d-check */ atomicIsSet(&rmNode.flags, nodeRemovingFlagBit) {
   294  					rmNode.unlock(ver)
   295  					return nil, ErrXSklConcRemoving
   296  				}
   297  
   298  				atomicSet(&rmNode.flags, nodeRemovingFlagBit)
   299  				isMarked = true
   300  			}
   301  
   302  			var (
   303  				lockedLayers         = int32(-1)
   304  				isValid              = true
   305  				pred, succ, prevPred *xArenaSklNode[K, V]
   306  			)
   307  			for /* node locked */ l := int32(0); isValid && (l <= topLevel); l++ {
   308  				pred, succ = aux[l], aux[sklMaxLevel+l]
   309  				if /* lock indexCount */ pred != prevPred {
   310  					pred.lock(ver)
   311  					lockedLayers = l
   312  					prevPred = pred
   313  				}
   314  				// Check:
   315  				// 1. the previous node exists.
   316  				// 2. no other nodes are inserted into the skip list in this layer.
   317  				isValid = !atomicIsSet(&pred.flags, nodeRemovingFlagBit) && pred.atomicLoadNextNode(l) == succ
   318  			}
   319  			if /* conc rm */ !isValid {
   320  				unlockArenaNodes(ver, lockedLayers, aux[0:sklMaxLevel]...)
   321  				continue
   322  			}
   323  
   324  			element = &xSklElement[K, V]{
   325  				key: key,
   326  				val: rmNode.elementRef.val.Load().(V),
   327  			}
   328  			atomic.AddInt64(&rmNode.count, -1)
   329  			atomic.AddInt64(&skl.nodeLen, -1)
   330  
   331  			if atomic.LoadInt64(&rmNode.count) <= 0 {
   332  				for /* re-linking, reduce levels */ l := topLevel; l >= 0; l-- {
   333  					aux[l].atomicStoreNextNode(l, rmNode.loadNextNode(l))
   334  				}
   335  				atomic.AddUint64(&skl.indexCount, ^uint64(rmNode.level-1))
   336  			}
   337  
   338  			rmNode.unlock(ver)
   339  			unlockArenaNodes(ver, lockedLayers, aux[0:sklMaxLevel]...)
   340  			return element, nil
   341  		}
   342  		break
   343  	}
   344  
   345  	if foundAt == -1 {
   346  		return nil, ErrXSklNotFound
   347  	}
   348  	return nil, ErrXSklUnknownReason
   349  }
   350  
   351  func (skl *xArenaSkl[K, V]) PeekHead() (element SklElement[K, V]) {
   352  	forward := skl.atomicLoadHead().nodeRef.atomicLoadNextNode(0)
   353  	for {
   354  		if !atomicAreEqual(&forward.flags, nodeInsertedFlagBit|nodeRemovingFlagBit, insertFullyLinked) {
   355  			forward = forward.atomicLoadNextNode(0)
   356  			continue
   357  		}
   358  		node := forward.elementRef
   359  		if node == nil {
   360  			return nil
   361  		}
   362  		element = &xSklElement[K, V]{
   363  			key: forward.elementRef.key,
   364  			val: node.val.Load().(V),
   365  		}
   366  		break
   367  	}
   368  
   369  	return element
   370  }
   371  
   372  func (skl *xArenaSkl[K, V]) PopHead() (element SklElement[K, V], err error) {
   373  	forward := skl.atomicLoadHead().nodeRef.atomicLoadNextNode(0)
   374  	if forward == nil {
   375  		return nil, ErrXSklIsEmpty
   376  	}
   377  	return skl.RemoveFirst(forward.elementRef.key)
   378  }