github.com/andy-kimball/arenaskl@v0.0.0-20200617143215-f701008588b9/arena.go (about)

     1  /*
     2   * Copyright 2017 Dgraph Labs, Inc. and Contributors
     3   * Modifications copyright (C) 2017 Andy Kimball and Contributors
     4   *
     5   * Licensed under the Apache License, Version 2.0 (the "License");
     6   * you may not use this file except in compliance with the License.
     7   * You may obtain a copy of the License at
     8   *
     9   *     http://www.apache.org/licenses/LICENSE-2.0
    10   *
    11   * Unless required by applicable law or agreed to in writing, software
    12   * distributed under the License is distributed on an "AS IS" BASIS,
    13   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    14   * See the License for the specific language governing permissions and
    15   * limitations under the License.
    16   */
    17  
    18  package arenaskl
    19  
    20  import (
    21  	"errors"
    22  	"math"
    23  	"sync/atomic"
    24  	"unsafe"
    25  )
    26  
    27  // Arena should be lock-free.
    28  type Arena struct {
    29  	n   uint64
    30  	buf []byte
    31  }
    32  
    33  type Align uint8
    34  
    35  const (
    36  	Align1 = 0
    37  	Align2 = 1
    38  	Align4 = 3
    39  	Align8 = 7
    40  )
    41  
    42  var (
    43  	ErrArenaFull = errors.New("allocation failed because arena is full")
    44  )
    45  
    46  // NewArena allocates a new arena of the specified size and returns it.
    47  func NewArena(size uint32) *Arena {
    48  	// Don't store data at position 0 in order to reserve offset=0 as a kind
    49  	// of nil pointer.
    50  	out := &Arena{
    51  		n:   1,
    52  		buf: make([]byte, size),
    53  	}
    54  
    55  	return out
    56  }
    57  
    58  func (a *Arena) Size() uint32 {
    59  	s := atomic.LoadUint64(&a.n)
    60  	if s > math.MaxUint32 {
    61  		// Saturate at MaxUint32.
    62  		return math.MaxUint32
    63  	}
    64  	return uint32(s)
    65  }
    66  
    67  func (a *Arena) Cap() uint32 {
    68  	return uint32(len(a.buf))
    69  }
    70  
    71  func (a *Arena) Reset() {
    72  	atomic.StoreUint64(&a.n, 1)
    73  }
    74  
    75  func (a *Arena) Alloc(size, overflow uint32, align Align) (uint32, error) {
    76  	// Verify that the arena isn't already full.
    77  	origSize := atomic.LoadUint64(&a.n)
    78  	if int(origSize) > len(a.buf) {
    79  		return 0, ErrArenaFull
    80  	}
    81  
    82  	// Pad the allocation with enough bytes to ensure the requested alignment.
    83  	padded := uint32(size) + uint32(align)
    84  
    85  	// Use 64-bit arithmetic to protect against overflow.
    86  	newSize := atomic.AddUint64(&a.n, uint64(padded))
    87  	if int(newSize)+int(overflow) > len(a.buf) {
    88  		// Doubles as a check against newSize > math.MaxUint32.
    89  		return 0, ErrArenaFull
    90  	}
    91  
    92  	// Return the aligned offset.
    93  	offset := (uint32(newSize) - padded + uint32(align)) & ^uint32(align)
    94  	return offset, nil
    95  }
    96  
    97  func (a *Arena) GetBytes(offset uint32, size uint32) []byte {
    98  	if offset == 0 {
    99  		return nil
   100  	}
   101  
   102  	return a.buf[offset : offset+size]
   103  }
   104  
   105  func (a *Arena) GetPointer(offset uint32) unsafe.Pointer {
   106  	if offset == 0 {
   107  		return nil
   108  	}
   109  
   110  	return unsafe.Pointer(&a.buf[offset])
   111  }
   112  
   113  func (a *Arena) GetPointerOffset(ptr unsafe.Pointer) uint32 {
   114  	if ptr == nil {
   115  		return 0
   116  	}
   117  
   118  	return uint32(uintptr(ptr) - uintptr(unsafe.Pointer(&a.buf[0])))
   119  }