github.com/ader1990/go@v0.0.0-20140630135419-8c24447fa791/src/pkg/sync/atomic/asm_linux_arm.s (about)

     1  // Copyright 2011 The Go Authors.  All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // +build !race
     6  
     7  #include "../../../cmd/ld/textflag.h"
     8  
     9  // Linux/ARM atomic operations.
    10  
    11  // Because there is so much variation in ARM devices,
    12  // the Linux kernel provides an appropriate compare-and-swap
    13  // implementation at address 0xffff0fc0.  Caller sets:
    14  //	R0 = old value
    15  //	R1 = new value
    16  //	R2 = addr
    17  //	LR = return address
    18  // The function returns with CS true if the swap happened.
    19  // http://lxr.linux.no/linux+v2.6.37.2/arch/arm/kernel/entry-armv.S#L850
    20  // On older kernels (before 2.6.24) the function can incorrectly
    21  // report a conflict, so we have to double-check the compare ourselves
    22  // and retry if necessary.
    23  //
    24  // http://git.kernel.org/?p=linux/kernel/git/torvalds/linux-2.6.git;a=commit;h=b49c0f24cf6744a3f4fd09289fe7cade349dead5
    25  //
    26  TEXT cas<>(SB),NOSPLIT,$0
    27  	MOVW	$0xffff0fc0, PC
    28  
    29  TEXT ·CompareAndSwapInt32(SB),NOSPLIT,$0
    30  	B	·CompareAndSwapUint32(SB)
    31  
    32  // Implement using kernel cas for portability.
    33  TEXT ·CompareAndSwapUint32(SB),NOSPLIT,$0-13
    34  	MOVW	addr+0(FP), R2
    35  	// trigger potential paging fault here,
    36  	// because we don't know how to traceback through __kuser_cmpxchg
    37  	MOVW	(R2), R0
    38  	MOVW	old+4(FP), R0
    39  casagain:
    40  	MOVW	new+8(FP), R1
    41  	BL	cas<>(SB)
    42  	BCC	cascheck
    43  	MOVW	$1, R0
    44  casret:
    45  	MOVB	R0, swapped+12(FP)
    46  	RET
    47  cascheck:
    48  	// Kernel lies; double-check.
    49  	MOVW	addr+0(FP), R2
    50  	MOVW	old+4(FP), R0
    51  	MOVW	0(R2), R3
    52  	CMP	R0, R3
    53  	BEQ	casagain
    54  	MOVW	$0, R0
    55  	B	casret
    56  
    57  TEXT ·CompareAndSwapUintptr(SB),NOSPLIT,$0
    58  	B	·CompareAndSwapUint32(SB)
    59  
    60  TEXT ·CompareAndSwapPointer(SB),NOSPLIT,$0
    61  	B	·CompareAndSwapUint32(SB)
    62  
    63  TEXT ·AddInt32(SB),NOSPLIT,$0
    64  	B	·AddUint32(SB)
    65  
    66  // Implement using kernel cas for portability.
    67  TEXT ·AddUint32(SB),NOSPLIT,$0-12
    68  	MOVW	addr+0(FP), R2
    69  	MOVW	delta+4(FP), R4
    70  addloop1:
    71  	MOVW	0(R2), R0
    72  	MOVW	R0, R1
    73  	ADD	R4, R1
    74  	BL	cas<>(SB)
    75  	BCC	addloop1
    76  	MOVW	R1, new+8(FP)
    77  	RET
    78  
    79  TEXT ·AddUintptr(SB),NOSPLIT,$0
    80  	B	·AddUint32(SB)
    81  
    82  TEXT ·SwapInt32(SB),NOSPLIT,$0
    83  	B	·SwapUint32(SB)
    84  
    85  // Implement using kernel cas for portability.
    86  TEXT ·SwapUint32(SB),NOSPLIT,$0-12
    87  	MOVW	addr+0(FP), R2
    88  	MOVW	new+4(FP), R1
    89  swaploop1:
    90  	MOVW	0(R2), R0
    91  	MOVW	R0, R4 // cas smashes R0
    92  	BL	cas<>(SB)
    93  	BCC	swaploop1
    94  	MOVW	R4, old+8(FP)
    95  	RET
    96  
    97  TEXT ·SwapUintptr(SB),NOSPLIT,$0
    98  	B	·SwapUint32(SB)
    99  
   100  TEXT ·SwapPointer(SB),NOSPLIT,$0
   101  	B	·SwapUint32(SB)
   102  
   103  TEXT cas64<>(SB),NOSPLIT,$0
   104  	MOVW	$0xffff0f60, PC // __kuser_cmpxchg64: Linux-3.1 and above
   105  
   106  TEXT kernelCAS64<>(SB),NOSPLIT,$0-21
   107  	// int (*__kuser_cmpxchg64_t)(const int64_t *oldval, const int64_t *newval, volatile int64_t *ptr);
   108  	MOVW	addr+0(FP), R2 // ptr
   109  	// trigger potential paging fault here,
   110  	// because we don't know how to traceback through __kuser_cmpxchg64
   111  	MOVW	(R2), R0
   112  	// make unaligned atomic access panic
   113  	AND.S	$7, R2, R1
   114  	BEQ 	2(PC)
   115  	MOVW	R1, (R1)
   116  	MOVW	$4(FP), R0 // oldval
   117  	MOVW	$12(FP), R1 // newval
   118  	BL	cas64<>(SB)
   119  	MOVW.CS	$1, R0 // C is set if the kernel has changed *ptr
   120  	MOVW.CC	$0, R0
   121  	MOVW	R0, 20(FP)
   122  	RET
   123  
   124  TEXT ·generalCAS64(SB),NOSPLIT,$20-21
   125  	// bool runtime·cas64(uint64 volatile *addr, uint64 old, uint64 new)
   126  	MOVW	addr+0(FP), R0
   127  	// trigger potential paging fault here,
   128  	// because a fault in runtime.cas64 will hang.
   129  	MOVW	(R0), R2
   130  	// make unaligned atomic access panic
   131  	AND.S	$7, R0, R1
   132  	BEQ 	2(PC)
   133  	MOVW	R1, (R1)
   134  	MOVW	R0, 4(R13)
   135  	MOVW	old_lo+4(FP), R1
   136  	MOVW	R1, 8(R13)
   137  	MOVW	old_hi+8(FP), R1
   138  	MOVW	R1, 12(R13)
   139  	MOVW	new_lo+12(FP), R2
   140  	MOVW	R2, 16(R13)
   141  	MOVW	new_hi+16(FP), R3
   142  	MOVW	R3, 20(R13)
   143  	BL  	runtime·cas64(SB)
   144  	MOVB	R0, ret+20(FP)
   145  	RET
   146  
   147  GLOBL armCAS64(SB), $4
   148  
   149  TEXT setupAndCallCAS64<>(SB),NOSPLIT,$-4-21
   150  	MOVW	$0xffff0ffc, R0 // __kuser_helper_version
   151  	MOVW	(R0), R0
   152  	// __kuser_cmpxchg64 only present if helper version >= 5
   153  	CMP 	$5, R0
   154  	MOVW.CS	$kernelCAS64<>(SB), R1
   155  	MOVW.CS	R1, armCAS64(SB)
   156  	MOVW.CS	R1, PC
   157  	MOVB	runtime·armArch(SB), R0
   158  	// LDREXD, STREXD only present on ARMv6K or higher
   159  	CMP	$6, R0 // TODO(minux): how to differentiate ARMv6 with ARMv6K?
   160  	MOVW.CS	$·armCompareAndSwapUint64(SB), R1
   161  	MOVW.CS	R1, armCAS64(SB)
   162  	MOVW.CS	R1, PC
   163  	// we are out of luck, can only use runtime's emulated 64-bit cas
   164  	MOVW	$·generalCAS64(SB), R1
   165  	MOVW	R1, armCAS64(SB)
   166  	MOVW	R1, PC
   167  
   168  TEXT ·CompareAndSwapInt64(SB),NOSPLIT,$0
   169  	B   	·CompareAndSwapUint64(SB)
   170  
   171  TEXT ·CompareAndSwapUint64(SB),NOSPLIT,$-4-21
   172  	MOVW	armCAS64(SB), R0
   173  	CMP 	$0, R0
   174  	MOVW.NE	R0, PC
   175  	B	setupAndCallCAS64<>(SB)
   176  
   177  TEXT ·AddInt64(SB),NOSPLIT,$0
   178  	B	·addUint64(SB)
   179  
   180  TEXT ·AddUint64(SB),NOSPLIT,$0
   181  	B	·addUint64(SB)
   182  
   183  TEXT ·SwapInt64(SB),NOSPLIT,$0
   184  	B	·swapUint64(SB)
   185  
   186  TEXT ·SwapUint64(SB),NOSPLIT,$0
   187  	B	·swapUint64(SB)
   188  
   189  TEXT ·LoadInt32(SB),NOSPLIT,$0
   190  	B	·LoadUint32(SB)
   191  
   192  TEXT ·LoadUint32(SB),NOSPLIT,$0-8
   193  	MOVW	addr+0(FP), R2
   194  loadloop1:
   195  	MOVW	0(R2), R0
   196  	MOVW	R0, R1
   197  	BL	cas<>(SB)
   198  	BCC	loadloop1
   199  	MOVW	R1, val+4(FP)
   200  	RET
   201  
   202  TEXT ·LoadInt64(SB),NOSPLIT,$0
   203  	B	·loadUint64(SB)
   204  
   205  TEXT ·LoadUint64(SB),NOSPLIT,$0
   206  	B	·loadUint64(SB)
   207  
   208  TEXT ·LoadUintptr(SB),NOSPLIT,$0
   209  	B	·LoadUint32(SB)
   210  
   211  TEXT ·LoadPointer(SB),NOSPLIT,$0
   212  	B	·LoadUint32(SB)
   213  
   214  TEXT ·StoreInt32(SB),NOSPLIT,$0
   215  	B	·StoreUint32(SB)
   216  
   217  TEXT ·StoreUint32(SB),NOSPLIT,$0-8
   218  	MOVW	addr+0(FP), R2
   219  	MOVW	val+4(FP), R1
   220  storeloop1:
   221  	MOVW	0(R2), R0
   222  	BL	cas<>(SB)
   223  	BCC	storeloop1
   224  	RET
   225  
   226  TEXT ·StoreInt64(SB),NOSPLIT,$0
   227  	B	·storeUint64(SB)
   228  
   229  TEXT ·StoreUint64(SB),NOSPLIT,$0
   230  	B	·storeUint64(SB)
   231  
   232  TEXT ·StoreUintptr(SB),NOSPLIT,$0
   233  	B	·StoreUint32(SB)
   234  
   235  TEXT ·StorePointer(SB),NOSPLIT,$0
   236  	B	·StoreUint32(SB)