github.com/SagerNet/gvisor@v0.0.0-20210707092255-7731c139d75c/pkg/atomicbitops/aligned_32bit_unsafe.go (about)

     1  // Copyright 2021 The gVisor Authors.
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //     http://www.apache.org/licenses/LICENSE-2.0
     8  //
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  // See the License for the specific language governing permissions and
    13  // limitations under the License.
    14  
    15  // +build arm mips 386
    16  
    17  package atomicbitops
    18  
    19  import (
    20  	"sync/atomic"
    21  	"unsafe"
    22  )
    23  
    24  // AlignedAtomicInt64 is an atomic int64 that is guaranteed to be 64-bit
    25  // aligned, even on 32-bit systems.
    26  //
    27  // Per https://golang.org/pkg/sync/atomic/#pkg-note-BUG:
    28  //
    29  // "On ARM, 386, and 32-bit MIPS, it is the caller's responsibility to arrange
    30  // for 64-bit alignment of 64-bit words accessed atomically. The first word in
    31  // a variable or in an allocated struct, array, or slice can be relied upon to
    32  // be 64-bit aligned."
    33  //
    34  // +stateify savable
    35  type AlignedAtomicInt64 struct {
    36  	value [15]byte
    37  }
    38  
    39  func (aa *AlignedAtomicInt64) ptr() *int64 {
    40  	// In the 15-byte aa.value, there are guaranteed to be 8 contiguous
    41  	// bytes with 64-bit alignment. We find an address in this range by
    42  	// adding 7, then clear the 3 least significant bits to get its start.
    43  	return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7))
    44  }
    45  
    46  // Load is analagous to atomic.LoadInt64.
    47  func (aa *AlignedAtomicInt64) Load() int64 {
    48  	return atomic.LoadInt64(aa.ptr())
    49  }
    50  
    51  // Store is analagous to atomic.StoreInt64.
    52  func (aa *AlignedAtomicInt64) Store(v int64) {
    53  	atomic.StoreInt64(aa.ptr(), v)
    54  }
    55  
    56  // Add is analagous to atomic.AddInt64.
    57  func (aa *AlignedAtomicInt64) Add(v int64) int64 {
    58  	return atomic.AddInt64(aa.ptr(), v)
    59  }
    60  
    61  // AlignedAtomicUint64 is an atomic uint64 that is guaranteed to be 64-bit
    62  // aligned, even on 32-bit systems.
    63  //
    64  // Per https://golang.org/pkg/sync/atomic/#pkg-note-BUG:
    65  //
    66  // "On ARM, 386, and 32-bit MIPS, it is the caller's responsibility to arrange
    67  // for 64-bit alignment of 64-bit words accessed atomically. The first word in
    68  // a variable or in an allocated struct, array, or slice can be relied upon to
    69  // be 64-bit aligned."
    70  //
    71  // +stateify savable
    72  type AlignedAtomicUint64 struct {
    73  	value [15]byte
    74  }
    75  
    76  func (aa *AlignedAtomicUint64) ptr() *uint64 {
    77  	// In the 15-byte aa.value, there are guaranteed to be 8 contiguous
    78  	// bytes with 64-bit alignment. We find an address in this range by
    79  	// adding 7, then clear the 3 least significant bits to get its start.
    80  	return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7))
    81  }
    82  
    83  // Load is analagous to atomic.LoadUint64.
    84  func (aa *AlignedAtomicUint64) Load() uint64 {
    85  	return atomic.LoadUint64(aa.ptr())
    86  }
    87  
    88  // Store is analagous to atomic.StoreUint64.
    89  func (aa *AlignedAtomicUint64) Store(v uint64) {
    90  	atomic.StoreUint64(aa.ptr(), v)
    91  }
    92  
    93  // Add is analagous to atomic.AddUint64.
    94  func (aa *AlignedAtomicUint64) Add(v uint64) uint64 {
    95  	return atomic.AddUint64(aa.ptr(), v)
    96  }