github.com/petermattis/pebble@v0.0.0-20190905164901-ab51a2166067/internal/arenaskl/arena.go (about)

     1  /*
     2   * Copyright 2017 Dgraph Labs, Inc. and Contributors
     3   * Modifications copyright (C) 2017 Andy Kimball and Contributors
     4   *
     5   * Licensed under the Apache License, Version 2.0 (the "License");
     6   * you may not use this file except in compliance with the License.
     7   * You may obtain a copy of the License at
     8   *
     9   *     http://www.apache.org/licenses/LICENSE-2.0
    10   *
    11   * Unless required by applicable law or agreed to in writing, software
    12   * distributed under the License is distributed on an "AS IS" BASIS,
    13   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    14   * See the License for the specific language governing permissions and
    15   * limitations under the License.
    16   */
    17  
    18  package arenaskl
    19  
    20  import (
    21  	"errors"
    22  	"math"
    23  	"sync/atomic"
    24  	"unsafe"
    25  
    26  	"github.com/petermattis/pebble/internal/rawalloc"
    27  )
    28  
    29  // Arena is lock-free.
    30  type Arena struct {
    31  	n   uint64
    32  	buf []byte
    33  }
    34  
    35  const (
    36  	align4 = 3
    37  )
    38  
    39  var (
    40  	ErrArenaFull = errors.New("allocation failed because arena is full")
    41  )
    42  
    43  // NewArena allocates a new arena of the specified size and returns it.
    44  func NewArena(size, extValueThreshold uint32) *Arena {
    45  	// Don't store data at position 0 in order to reserve offset=0 as a kind
    46  	// of nil pointer.
    47  	return &Arena{
    48  		n:   1,
    49  		buf: rawalloc.New(int(size), int(size)),
    50  	}
    51  }
    52  
    53  func (a *Arena) Size() uint32 {
    54  	s := atomic.LoadUint64(&a.n)
    55  	if s > math.MaxUint32 {
    56  		// Saturate at MaxUint32.
    57  		return math.MaxUint32
    58  	}
    59  	return uint32(s)
    60  }
    61  
    62  func (a *Arena) Capacity() uint32 {
    63  	return uint32(len(a.buf))
    64  }
    65  
    66  func (a *Arena) alloc(size, align uint32) (uint32, uint32, error) {
    67  	// Verify that the arena isn't already full.
    68  	origSize := atomic.LoadUint64(&a.n)
    69  	if int(origSize) > len(a.buf) {
    70  		return 0, 0, ErrArenaFull
    71  	}
    72  
    73  	// Pad the allocation with enough bytes to ensure the requested alignment.
    74  	padded := uint32(size) + align
    75  
    76  	newSize := atomic.AddUint64(&a.n, uint64(padded))
    77  	if int(newSize) > len(a.buf) {
    78  		return 0, 0, ErrArenaFull
    79  	}
    80  
    81  	// Return the aligned offset.
    82  	offset := (uint32(newSize) - padded + uint32(align)) & ^uint32(align)
    83  	return offset, padded, nil
    84  }
    85  
    86  func (a *Arena) getBytes(offset uint32, size uint32) []byte {
    87  	if offset == 0 {
    88  		return nil
    89  	}
    90  	return a.buf[offset : offset+size : offset+size]
    91  }
    92  
    93  func (a *Arena) getPointer(offset uint32) unsafe.Pointer {
    94  	if offset == 0 {
    95  		return nil
    96  	}
    97  	return unsafe.Pointer(&a.buf[offset])
    98  }
    99  
   100  func (a *Arena) getPointerOffset(ptr unsafe.Pointer) uint32 {
   101  	if ptr == nil {
   102  		return 0
   103  	}
   104  	return uint32(uintptr(ptr) - uintptr(unsafe.Pointer(&a.buf[0])))
   105  }