github.com/varialus/godfly@v0.0.0-20130904042352-1934f9f095ab/src/pkg/sync/atomic/asm_linux_arm.s (about)

     1  // Copyright 2011 The Go Authors.  All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // +build !race
     6  
     7  #include "../../../cmd/ld/textflag.h"
     8  
     9  // Linux/ARM atomic operations.
    10  
    11  // Because there is so much variation in ARM devices,
    12  // the Linux kernel provides an appropriate compare-and-swap
    13  // implementation at address 0xffff0fc0.  Caller sets:
    14  //	R0 = old value
    15  //	R1 = new value
    16  //	R2 = addr
    17  //	LR = return address
    18  // The function returns with CS true if the swap happened.
    19  // http://lxr.linux.no/linux+v2.6.37.2/arch/arm/kernel/entry-armv.S#L850
    20  // On older kernels (before 2.6.24) the function can incorrectly
    21  // report a conflict, so we have to double-check the compare ourselves
    22  // and retry if necessary.
    23  //
    24  // http://git.kernel.org/?p=linux/kernel/git/torvalds/linux-2.6.git;a=commit;h=b49c0f24cf6744a3f4fd09289fe7cade349dead5
    25  //
    26  TEXT cas<>(SB),NOSPLIT,$0
    27  	MOVW	$0xffff0fc0, PC
    28  
    29  TEXT ·CompareAndSwapInt32(SB),NOSPLIT,$0
    30  	B	·CompareAndSwapUint32(SB)
    31  
    32  // Implement using kernel cas for portability.
    33  TEXT ·CompareAndSwapUint32(SB),NOSPLIT,$0-13
    34  	MOVW	addr+0(FP), R2
    35  	// trigger potential paging fault here,
    36  	// because we don't know how to traceback through __kuser_cmpxchg
    37  	MOVW	(R2), R0
    38  	MOVW	old+4(FP), R0
    39  casagain:
    40  	MOVW	new+8(FP), R1
    41  	BL	cas<>(SB)
    42  	BCC	cascheck
    43  	MOVW	$1, R0
    44  casret:
    45  	MOVW	R0, ret+12(FP)
    46  	RET
    47  cascheck:
    48  	// Kernel lies; double-check.
    49  	MOVW	addr+0(FP), R2
    50  	MOVW	old+4(FP), R0
    51  	MOVW	0(R2), R3
    52  	CMP	R0, R3
    53  	BEQ	casagain
    54  	MOVW	$0, R0
    55  	B	casret
    56  
    57  TEXT ·CompareAndSwapUintptr(SB),NOSPLIT,$0
    58  	B	·CompareAndSwapUint32(SB)
    59  
    60  TEXT ·CompareAndSwapPointer(SB),NOSPLIT,$0
    61  	B	·CompareAndSwapUint32(SB)
    62  
    63  TEXT ·AddInt32(SB),NOSPLIT,$0
    64  	B	·AddUint32(SB)
    65  
    66  // Implement using kernel cas for portability.
    67  TEXT ·AddUint32(SB),NOSPLIT,$0-12
    68  	MOVW	addr+0(FP), R2
    69  	MOVW	delta+4(FP), R4
    70  addloop1:
    71  	MOVW	0(R2), R0
    72  	MOVW	R0, R1
    73  	ADD	R4, R1
    74  	BL	cas<>(SB)
    75  	BCC	addloop1
    76  	MOVW	R1, ret+8(FP)
    77  	RET
    78  
    79  TEXT ·AddUintptr(SB),NOSPLIT,$0
    80  	B	·AddUint32(SB)
    81  
    82  TEXT ·SwapInt32(SB),NOSPLIT,$0
    83  	B	·SwapUint32(SB)
    84  
    85  // Implement using kernel cas for portability.
    86  TEXT ·SwapUint32(SB),NOSPLIT,$0-12
    87  	MOVW	addr+0(FP), R2
    88  	MOVW	new+4(FP), R1
    89  swaploop1:
    90  	MOVW	0(R2), R0
    91  	MOVW	R0, R4 // cas smashes R0
    92  	BL	cas<>(SB)
    93  	BCC	swaploop1
    94  	MOVW	R4, old+8(FP)
    95  	RET
    96  
    97  TEXT ·SwapUintptr(SB),NOSPLIT,$0
    98  	B	·SwapUint32(SB)
    99  
   100  TEXT ·SwapPointer(SB),NOSPLIT,$0
   101  	B	·SwapUint32(SB)
   102  
   103  TEXT cas64<>(SB),NOSPLIT,$0
   104  	MOVW	$0xffff0f60, PC // __kuser_cmpxchg64: Linux-3.1 and above
   105  
   106  TEXT kernelCAS64<>(SB),NOSPLIT,$0-21
   107  	// int (*__kuser_cmpxchg64_t)(const int64_t *oldval, const int64_t *newval, volatile int64_t *ptr);
   108  	MOVW	addr+0(FP), R2 // ptr
   109  	// trigger potential paging fault here,
   110  	// because we don't know how to traceback through __kuser_cmpxchg64
   111  	MOVW	(R2), R0
   112  	// make unaligned atomic access panic
   113  	AND.S	$7, R2, R1
   114  	BEQ 	2(PC)
   115  	MOVW	R1, (R1)
   116  	MOVW	$4(FP), R0 // oldval
   117  	MOVW	$12(FP), R1 // newval
   118  	BL	cas64<>(SB)
   119  	MOVW.CS	$1, R0 // C is set if the kernel has changed *ptr
   120  	MOVW.CC	$0, R0
   121  	MOVW	R0, 20(FP)
   122  	RET
   123  
   124  TEXT generalCAS64<>(SB),NOSPLIT,$20-21
   125  	// bool runtime·cas64(uint64 volatile *addr, uint64 *old, uint64 new)
   126  	MOVW	addr+0(FP), R0
   127  	// make unaligned atomic access panic
   128  	AND.S	$7, R0, R1
   129  	BEQ 	2(PC)
   130  	MOVW	R1, (R1)
   131  	MOVW	R0, 4(R13)
   132  	MOVW	$4(FP), R1 // oldval
   133  	MOVW	R1, 8(R13)
   134  	MOVW	newlo+12(FP), R2
   135  	MOVW	R2, 12(R13)
   136  	MOVW	newhi+16(FP), R3
   137  	MOVW	R3, 16(R13)
   138  	BL  	runtime·cas64(SB)
   139  	MOVW	R0, 20(FP)
   140  	RET
   141  
   142  GLOBL armCAS64(SB), $4
   143  
   144  TEXT setupAndCallCAS64<>(SB),NOSPLIT,$-21
   145  	MOVW	$0xffff0ffc, R0 // __kuser_helper_version
   146  	MOVW	(R0), R0
   147  	// __kuser_cmpxchg64 only present if helper version >= 5
   148  	CMP 	$5, R0
   149  	MOVW.CS	$kernelCAS64<>(SB), R1
   150  	MOVW.CS	R1, armCAS64(SB)
   151  	MOVW.CS	R1, PC
   152  	MOVB	runtime·armArch(SB), R0
   153  	// LDREXD, STREXD only present on ARMv6K or higher
   154  	CMP	$6, R0 // TODO(minux): how to differentiate ARMv6 with ARMv6K?
   155  	MOVW.CS	$·armCompareAndSwapUint64(SB), R1
   156  	MOVW.CS	R1, armCAS64(SB)
   157  	MOVW.CS	R1, PC
   158  	// we are out of luck, can only use runtime's emulated 64-bit cas
   159  	MOVW	$generalCAS64<>(SB), R1
   160  	MOVW	R1, armCAS64(SB)
   161  	MOVW	R1, PC
   162  
   163  TEXT ·CompareAndSwapInt64(SB),NOSPLIT,$0
   164  	B   	·CompareAndSwapUint64(SB)
   165  
   166  TEXT ·CompareAndSwapUint64(SB),NOSPLIT,$-21
   167  	MOVW	armCAS64(SB), R0
   168  	CMP 	$0, R0
   169  	MOVW.NE	R0, PC
   170  	B	setupAndCallCAS64<>(SB)
   171  
   172  TEXT ·AddInt64(SB),NOSPLIT,$0
   173  	B	·addUint64(SB)
   174  
   175  TEXT ·AddUint64(SB),NOSPLIT,$0
   176  	B	·addUint64(SB)
   177  
   178  TEXT ·SwapInt64(SB),NOSPLIT,$0
   179  	B	·swapUint64(SB)
   180  
   181  TEXT ·SwapUint64(SB),NOSPLIT,$0
   182  	B	·swapUint64(SB)
   183  
   184  TEXT ·LoadInt32(SB),NOSPLIT,$0
   185  	B	·LoadUint32(SB)
   186  
   187  TEXT ·LoadUint32(SB),NOSPLIT,$0-8
   188  	MOVW	addr+0(FP), R2
   189  loadloop1:
   190  	MOVW	0(R2), R0
   191  	MOVW	R0, R1
   192  	BL	cas<>(SB)
   193  	BCC	loadloop1
   194  	MOVW	R1, val+4(FP)
   195  	RET
   196  
   197  TEXT ·LoadInt64(SB),NOSPLIT,$0
   198  	B	·loadUint64(SB)
   199  
   200  TEXT ·LoadUint64(SB),NOSPLIT,$0
   201  	B	·loadUint64(SB)
   202  
   203  TEXT ·LoadUintptr(SB),NOSPLIT,$0
   204  	B	·LoadUint32(SB)
   205  
   206  TEXT ·LoadPointer(SB),NOSPLIT,$0
   207  	B	·LoadUint32(SB)
   208  
   209  TEXT ·StoreInt32(SB),NOSPLIT,$0
   210  	B	·StoreUint32(SB)
   211  
   212  TEXT ·StoreUint32(SB),NOSPLIT,$0-8
   213  	MOVW	addr+0(FP), R2
   214  	MOVW	val+4(FP), R1
   215  storeloop1:
   216  	MOVW	0(R2), R0
   217  	BL	cas<>(SB)
   218  	BCC	storeloop1
   219  	RET
   220  
   221  TEXT ·StoreInt64(SB),NOSPLIT,$0
   222  	B	·storeUint64(SB)
   223  
   224  TEXT ·StoreUint64(SB),NOSPLIT,$0
   225  	B	·storeUint64(SB)
   226  
   227  TEXT ·StoreUintptr(SB),NOSPLIT,$0
   228  	B	·StoreUint32(SB)
   229  
   230  TEXT ·StorePointer(SB),NOSPLIT,$0
   231  	B	·StoreUint32(SB)