github.com/zhiqiangxu/util@v0.0.0-20230112053021-0a7aee056cd5/lf/mcas/arena.go (about)

     1  package mcas
     2  
     3  import (
     4  	"sync/atomic"
     5  	"unsafe"
     6  
     7  	"github.com/zhiqiangxu/util/bytes"
     8  )
     9  
    10  type arena struct {
    11  	offset uint32
    12  	buf    []byte
    13  }
    14  
    15  const (
    16  	mcDescSize = uint32(unsafe.Sizeof(mcDesc{}))
    17  	ccDescSize = uint32(unsafe.Sizeof(ccDesc{}))
    18  )
    19  
    20  func newArena(size uint32) *arena {
    21  	return &arena{buf: bytes.AlignedTo8(size)}
    22  }
    23  
    24  func (a *arena) alloc(size uint32) (offset uint32) {
    25  	if int(size) > len(a.buf) {
    26  		panic("size > buf size")
    27  	}
    28  
    29  	// Pad the allocation with enough bytes to ensure pointer alignment.
    30  	l := uint32(size + bytes.Align8Mask)
    31  
    32  try:
    33  	n := atomic.AddUint32(&a.offset, l)
    34  	if int(n) > len(a.buf) {
    35  		if atomic.CompareAndSwapUint32(&a.offset, 0, l) {
    36  			n = l
    37  			goto final
    38  		}
    39  		goto try
    40  	}
    41  
    42  final:
    43  	// Return the aligned offset.
    44  	offset = (n - l + uint32(bytes.Align8Mask)) & ^uint32(bytes.Align8Mask)
    45  	return
    46  }
    47  
    48  func (a *arena) getPointer(ptr uintptr) unsafe.Pointer {
    49  	offset := ptr - uintptr(unsafe.Pointer(&a.buf[0]))
    50  	return unsafe.Pointer(&a.buf[offset])
    51  }
    52  
    53  func (a *arena) putMCDesc() *mcDesc {
    54  	offset := a.alloc(mcDescSize)
    55  	return (*mcDesc)(unsafe.Pointer(&a.buf[offset]))
    56  }
    57  
    58  func (a *arena) putCCDesc() *ccDesc {
    59  	offset := a.alloc(ccDescSize)
    60  	return (*ccDesc)(unsafe.Pointer(&a.buf[offset]))
    61  }
    62  
    63  var are *arena
    64  
    65  func init() {
    66  	// enough for 10w concurrency
    67  	are = newArena(1024 * 1024)
    68  }