github.com/golang-haiku/go-1.4.3@v0.0.0-20190609233734-1f5ae41cc308/src/sync/atomic/asm_linux_arm.s (about)

     1  // Copyright 2011 The Go Authors.  All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // +build !race
     6  
     7  #include "textflag.h"
     8  
     9  // Linux/ARM atomic operations.
    10  
    11  // Because there is so much variation in ARM devices,
    12  // the Linux kernel provides an appropriate compare-and-swap
    13  // implementation at address 0xffff0fc0.  Caller sets:
    14  //	R0 = old value
    15  //	R1 = new value
    16  //	R2 = addr
    17  //	LR = return address
    18  // The function returns with CS true if the swap happened.
    19  // http://lxr.linux.no/linux+v2.6.37.2/arch/arm/kernel/entry-armv.S#L850
    20  // On older kernels (before 2.6.24) the function can incorrectly
    21  // report a conflict, so we have to double-check the compare ourselves
    22  // and retry if necessary.
    23  //
    24  // http://git.kernel.org/?p=linux/kernel/git/torvalds/linux-2.6.git;a=commit;h=b49c0f24cf6744a3f4fd09289fe7cade349dead5
    25  //
    26  TEXT cas<>(SB),NOSPLIT,$0
    27  	MOVW	$0xffff0fc0, PC
    28  
    29  TEXT ·CompareAndSwapInt32(SB),NOSPLIT,$0
    30  	B	·CompareAndSwapUint32(SB)
    31  
    32  // Implement using kernel cas for portability.
    33  TEXT ·CompareAndSwapUint32(SB),NOSPLIT,$0-13
    34  	MOVW	addr+0(FP), R2
    35  	// trigger potential paging fault here,
    36  	// because we don't know how to traceback through __kuser_cmpxchg
    37  	MOVW	(R2), R0
    38  	MOVW	old+4(FP), R0
    39  casagain:
    40  	MOVW	new+8(FP), R1
    41  	BL	cas<>(SB)
    42  	BCC	cascheck
    43  	MOVW	$1, R0
    44  casret:
    45  	MOVB	R0, swapped+12(FP)
    46  	RET
    47  cascheck:
    48  	// Kernel lies; double-check.
    49  	MOVW	addr+0(FP), R2
    50  	MOVW	old+4(FP), R0
    51  	MOVW	0(R2), R3
    52  	CMP	R0, R3
    53  	BEQ	casagain
    54  	MOVW	$0, R0
    55  	B	casret
    56  
    57  TEXT ·CompareAndSwapUintptr(SB),NOSPLIT,$0
    58  	B	·CompareAndSwapUint32(SB)
    59  
    60  TEXT ·CompareAndSwapPointer(SB),NOSPLIT,$0
    61  	B	·CompareAndSwapUint32(SB)
    62  
    63  TEXT ·AddInt32(SB),NOSPLIT,$0
    64  	B	·AddUint32(SB)
    65  
    66  // Implement using kernel cas for portability.
    67  TEXT ·AddUint32(SB),NOSPLIT,$0-12
    68  	MOVW	addr+0(FP), R2
    69  	MOVW	delta+4(FP), R4
    70  addloop1:
    71  	MOVW	0(R2), R0
    72  	MOVW	R0, R1
    73  	ADD	R4, R1
    74  	BL	cas<>(SB)
    75  	BCC	addloop1
    76  	MOVW	R1, new+8(FP)
    77  	RET
    78  
    79  TEXT ·AddUintptr(SB),NOSPLIT,$0
    80  	B	·AddUint32(SB)
    81  
    82  TEXT ·SwapInt32(SB),NOSPLIT,$0
    83  	B	·SwapUint32(SB)
    84  
    85  // Implement using kernel cas for portability.
    86  TEXT ·SwapUint32(SB),NOSPLIT,$0-12
    87  	MOVW	addr+0(FP), R2
    88  	MOVW	new+4(FP), R1
    89  swaploop1:
    90  	MOVW	0(R2), R0
    91  	MOVW	R0, R4 // cas smashes R0
    92  	BL	cas<>(SB)
    93  	BCC	swaploop1
    94  	MOVW	R4, old+8(FP)
    95  	RET
    96  
    97  TEXT ·SwapUintptr(SB),NOSPLIT,$0
    98  	B	·SwapUint32(SB)
    99  
   100  TEXT ·SwapPointer(SB),NOSPLIT,$0
   101  	B	·SwapUint32(SB)
   102  
   103  TEXT cas64<>(SB),NOSPLIT,$0
   104  	MOVW	$0xffff0f60, PC // __kuser_cmpxchg64: Linux-3.1 and above
   105  
   106  TEXT kernelCAS64<>(SB),NOSPLIT,$0-21
   107  	// int (*__kuser_cmpxchg64_t)(const int64_t *oldval, const int64_t *newval, volatile int64_t *ptr);
   108  	MOVW	addr+0(FP), R2 // ptr
   109  	// trigger potential paging fault here,
   110  	// because we don't know how to traceback through __kuser_cmpxchg64
   111  	MOVW	(R2), R0
   112  	// make unaligned atomic access panic
   113  	AND.S	$7, R2, R1
   114  	BEQ 	2(PC)
   115  	MOVW	R1, (R1)
   116  	MOVW	$4(FP), R0 // oldval
   117  	MOVW	$12(FP), R1 // newval
   118  	BL	cas64<>(SB)
   119  	MOVW.CS	$1, R0 // C is set if the kernel has changed *ptr
   120  	MOVW.CC	$0, R0
   121  	MOVW	R0, 20(FP)
   122  	RET
   123  
   124  TEXT ·generalCAS64(SB),NOSPLIT,$0-21
   125  	B  	runtime·cas64(SB)
   126  
   127  GLOBL armCAS64(SB), NOPTR, $4
   128  
   129  TEXT setupAndCallCAS64<>(SB),NOSPLIT,$-4-21
   130  	MOVW	$0xffff0ffc, R0 // __kuser_helper_version
   131  	MOVW	(R0), R0
   132  	// __kuser_cmpxchg64 only present if helper version >= 5
   133  	CMP 	$5, R0
   134  	MOVW.CS	$kernelCAS64<>(SB), R1
   135  	MOVW.CS	R1, armCAS64(SB)
   136  	MOVW.CS	R1, PC
   137  	MOVB	runtime·armArch(SB), R0
   138  	// LDREXD, STREXD only present on ARMv6K or higher
   139  	CMP	$6, R0 // TODO(minux): how to differentiate ARMv6 with ARMv6K?
   140  	MOVW.CS	$·armCompareAndSwapUint64(SB), R1
   141  	MOVW.CS	R1, armCAS64(SB)
   142  	MOVW.CS	R1, PC
   143  	// we are out of luck, can only use runtime's emulated 64-bit cas
   144  	MOVW	$·generalCAS64(SB), R1
   145  	MOVW	R1, armCAS64(SB)
   146  	MOVW	R1, PC
   147  
   148  TEXT ·CompareAndSwapInt64(SB),NOSPLIT,$0
   149  	B   	·CompareAndSwapUint64(SB)
   150  
   151  TEXT ·CompareAndSwapUint64(SB),NOSPLIT,$-4-21
   152  	MOVW	armCAS64(SB), R0
   153  	CMP 	$0, R0
   154  	MOVW.NE	R0, PC
   155  	B	setupAndCallCAS64<>(SB)
   156  
   157  TEXT ·AddInt64(SB),NOSPLIT,$0
   158  	B	·addUint64(SB)
   159  
   160  TEXT ·AddUint64(SB),NOSPLIT,$0
   161  	B	·addUint64(SB)
   162  
   163  TEXT ·SwapInt64(SB),NOSPLIT,$0
   164  	B	·swapUint64(SB)
   165  
   166  TEXT ·SwapUint64(SB),NOSPLIT,$0
   167  	B	·swapUint64(SB)
   168  
   169  TEXT ·LoadInt32(SB),NOSPLIT,$0
   170  	B	·LoadUint32(SB)
   171  
   172  TEXT ·LoadUint32(SB),NOSPLIT,$0-8
   173  	MOVW	addr+0(FP), R2
   174  loadloop1:
   175  	MOVW	0(R2), R0
   176  	MOVW	R0, R1
   177  	BL	cas<>(SB)
   178  	BCC	loadloop1
   179  	MOVW	R1, val+4(FP)
   180  	RET
   181  
   182  TEXT ·LoadInt64(SB),NOSPLIT,$0
   183  	B	·loadUint64(SB)
   184  
   185  TEXT ·LoadUint64(SB),NOSPLIT,$0
   186  	B	·loadUint64(SB)
   187  
   188  TEXT ·LoadUintptr(SB),NOSPLIT,$0
   189  	B	·LoadUint32(SB)
   190  
   191  TEXT ·LoadPointer(SB),NOSPLIT,$0
   192  	B	·LoadUint32(SB)
   193  
   194  TEXT ·StoreInt32(SB),NOSPLIT,$0
   195  	B	·StoreUint32(SB)
   196  
   197  TEXT ·StoreUint32(SB),NOSPLIT,$0-8
   198  	MOVW	addr+0(FP), R2
   199  	MOVW	val+4(FP), R1
   200  storeloop1:
   201  	MOVW	0(R2), R0
   202  	BL	cas<>(SB)
   203  	BCC	storeloop1
   204  	RET
   205  
   206  TEXT ·StoreInt64(SB),NOSPLIT,$0
   207  	B	·storeUint64(SB)
   208  
   209  TEXT ·StoreUint64(SB),NOSPLIT,$0
   210  	B	·storeUint64(SB)
   211  
   212  TEXT ·StoreUintptr(SB),NOSPLIT,$0
   213  	B	·StoreUint32(SB)
   214  
   215  TEXT ·StorePointer(SB),NOSPLIT,$0
   216  	B	·StoreUint32(SB)