github.com/xushiwei/go@v0.0.0-20130601165731-2b9d83f45bc9/src/pkg/sync/atomic/asm_linux_arm.s (about)

     1  // Copyright 2011 The Go Authors.  All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // +build !race
     6  
     7  // Linux/ARM atomic operations.
     8  
     9  // Because there is so much variation in ARM devices,
    10  // the Linux kernel provides an appropriate compare-and-swap
    11  // implementation at address 0xffff0fc0.  Caller sets:
    12  //	R0 = old value
    13  //	R1 = new value
    14  //	R2 = addr
    15  //	LR = return address
    16  // The function returns with CS true if the swap happened.
    17  // http://lxr.linux.no/linux+v2.6.37.2/arch/arm/kernel/entry-armv.S#L850
    18  // On older kernels (before 2.6.24) the function can incorrectly
    19  // report a conflict, so we have to double-check the compare ourselves
    20  // and retry if necessary.
    21  //
    22  // http://git.kernel.org/?p=linux/kernel/git/torvalds/linux-2.6.git;a=commit;h=b49c0f24cf6744a3f4fd09289fe7cade349dead5
    23  //
    24  TEXT cas<>(SB),7,$0
    25  	MOVW	$0xffff0fc0, PC
    26  
    27  TEXT ·CompareAndSwapInt32(SB),7,$0
    28  	B	·CompareAndSwapUint32(SB)
    29  
    30  // Implement using kernel cas for portability.
    31  TEXT ·CompareAndSwapUint32(SB),7,$0
    32  	MOVW	addr+0(FP), R2
    33  	MOVW	old+4(FP), R0
    34  casagain:
    35  	MOVW	new+8(FP), R1
    36  	BL cas<>(SB)
    37  	BCC	cascheck
    38  	MOVW	$1, R0
    39  casret:
    40  	MOVW	R0, ret+12(FP)
    41  	RET
    42  cascheck:
    43  	// Kernel lies; double-check.
    44  	MOVW	addr+0(FP), R2
    45  	MOVW	old+4(FP), R0
    46  	MOVW	0(R2), R3
    47  	CMP	R0, R3
    48  	BEQ	casagain
    49  	MOVW	$0, R0
    50  	B	casret
    51  
    52  TEXT ·CompareAndSwapUintptr(SB),7,$0
    53  	B	·CompareAndSwapUint32(SB)
    54  
    55  TEXT ·CompareAndSwapPointer(SB),7,$0
    56  	B	·CompareAndSwapUint32(SB)
    57  
    58  TEXT ·AddInt32(SB),7,$0
    59  	B	·AddUint32(SB)
    60  
    61  // Implement using kernel cas for portability.
    62  TEXT ·AddUint32(SB),7,$0
    63  	MOVW	addr+0(FP), R2
    64  	MOVW	delta+4(FP), R4
    65  addloop1:
    66  	MOVW	0(R2), R0
    67  	MOVW	R0, R1
    68  	ADD	R4, R1
    69  	BL	cas<>(SB)
    70  	BCC	addloop1
    71  	MOVW	R1, ret+8(FP)
    72  	RET
    73  
    74  TEXT ·AddUintptr(SB),7,$0
    75  	B	·AddUint32(SB)
    76  
    77  TEXT cas64<>(SB),7,$0
    78  	MOVW	$0xffff0f60, PC // __kuser_cmpxchg64: Linux-3.1 and above
    79  
    80  TEXT kernelCAS64<>(SB),7,$0
    81  	// int (*__kuser_cmpxchg64_t)(const int64_t *oldval, const int64_t *newval, volatile int64_t *ptr);
    82  	MOVW	addr+0(FP), R2 // ptr
    83  	// make unaligned atomic access panic
    84  	AND.S	$7, R2, R1
    85  	BEQ 	2(PC)
    86  	MOVW	R1, (R1)
    87  	MOVW	$4(FP), R0 // oldval
    88  	MOVW	$12(FP), R1 // newval
    89  	BL		cas64<>(SB)
    90  	MOVW.CS	$1, R0 // C is set if the kernel has changed *ptr
    91  	MOVW.CC	$0, R0
    92  	MOVW	R0, 20(FP)
    93  	RET
    94  
    95  TEXT generalCAS64<>(SB),7,$20
    96  	// bool runtime·cas64(uint64 volatile *addr, uint64 *old, uint64 new)
    97  	MOVW	addr+0(FP), R0
    98  	// make unaligned atomic access panic
    99  	AND.S	$7, R0, R1
   100  	BEQ 	2(PC)
   101  	MOVW	R1, (R1)
   102  	MOVW	R0, 4(R13)
   103  	MOVW	$4(FP), R1 // oldval
   104  	MOVW	R1, 8(R13)
   105  	MOVW	newlo+12(FP), R2
   106  	MOVW	R2, 12(R13)
   107  	MOVW	newhi+16(FP), R3
   108  	MOVW	R3, 16(R13)
   109  	BL  	runtime·cas64(SB)
   110  	MOVW	R0, 20(FP)
   111  	RET
   112  
   113  GLOBL armCAS64(SB), $4
   114  
   115  TEXT setupAndCallCAS64<>(SB),7,$-4
   116  	MOVW	$0xffff0ffc, R0 // __kuser_helper_version
   117  	MOVW	(R0), R0
   118  	// __kuser_cmpxchg64 only present if helper version >= 5
   119  	CMP 	$5, R0
   120  	MOVW.CS	$kernelCAS64<>(SB), R1
   121  	MOVW.CS	R1, armCAS64(SB)
   122  	MOVW.CS	R1, PC
   123  	MOVB	runtime·armArch(SB), R0
   124  	// LDREXD, STREXD only present on ARMv6K or higher
   125  	CMP		$6, R0 // TODO(minux): how to differentiate ARMv6 with ARMv6K?
   126  	MOVW.CS	$·armCompareAndSwapUint64(SB), R1
   127  	MOVW.CS	R1, armCAS64(SB)
   128  	MOVW.CS	R1, PC
   129  	// we are out of luck, can only use runtime's emulated 64-bit cas
   130  	MOVW	$generalCAS64<>(SB), R1
   131  	MOVW	R1, armCAS64(SB)
   132  	MOVW	R1, PC
   133  
   134  TEXT ·CompareAndSwapInt64(SB),7,$0
   135  	B   	·CompareAndSwapUint64(SB)
   136  
   137  TEXT ·CompareAndSwapUint64(SB),7,$-4
   138  	MOVW	armCAS64(SB), R0
   139  	CMP 	$0, R0
   140  	MOVW.NE	R0, PC
   141  	B		setupAndCallCAS64<>(SB)
   142  
   143  TEXT ·AddInt64(SB),7,$0
   144  	B	·addUint64(SB)
   145  
   146  TEXT ·AddUint64(SB),7,$0
   147  	B	·addUint64(SB)
   148  
   149  TEXT ·LoadInt32(SB),7,$0
   150  	B	·LoadUint32(SB)
   151  
   152  TEXT ·LoadUint32(SB),7,$0
   153  	MOVW	addr+0(FP), R2
   154  loadloop1:
   155  	MOVW	0(R2), R0
   156  	MOVW	R0, R1
   157  	BL	cas<>(SB)
   158  	BCC	loadloop1
   159  	MOVW	R1, val+4(FP)
   160  	RET
   161  
   162  TEXT ·LoadInt64(SB),7,$0
   163  	B	·loadUint64(SB)
   164  
   165  TEXT ·LoadUint64(SB),7,$0
   166  	B	·loadUint64(SB)
   167  
   168  TEXT ·LoadUintptr(SB),7,$0
   169  	B	·LoadUint32(SB)
   170  
   171  TEXT ·LoadPointer(SB),7,$0
   172  	B	·LoadUint32(SB)
   173  
   174  TEXT ·StoreInt32(SB),7,$0
   175  	B	·StoreUint32(SB)
   176  
   177  TEXT ·StoreUint32(SB),7,$0
   178  	MOVW	addr+0(FP), R2
   179  	MOVW	val+4(FP), R1
   180  storeloop1:
   181  	MOVW	0(R2), R0
   182  	BL	cas<>(SB)
   183  	BCC	storeloop1
   184  	RET
   185  
   186  TEXT ·StoreInt64(SB),7,$0
   187  	B	·storeUint64(SB)
   188  
   189  TEXT ·StoreUint64(SB),7,$0
   190  	B	·storeUint64(SB)
   191  
   192  TEXT ·StoreUintptr(SB),7,$0
   193  	B	·StoreUint32(SB)
   194  
   195  TEXT ·StorePointer(SB),7,$0
   196  	B	·StoreUint32(SB)