...
Run Format

Text file src/sync/atomic/asm_linux_arm.s

Documentation: sync/atomic

     1	// Copyright 2011 The Go Authors. All rights reserved.
     2	// Use of this source code is governed by a BSD-style
     3	// license that can be found in the LICENSE file.
     4	
     5	// +build !race
     6	
     7	#include "textflag.h"
     8	
     9	// Linux/ARM atomic operations.
    10	
    11	// Because there is so much variation in ARM devices,
    12	// the Linux kernel provides an appropriate compare-and-swap
    13	// implementation at address 0xffff0fc0.  Caller sets:
    14	//	R0 = old value
    15	//	R1 = new value
    16	//	R2 = addr
    17	//	LR = return address
    18	// The function returns with CS true if the swap happened.
    19	// http://lxr.linux.no/linux+v2.6.37.2/arch/arm/kernel/entry-armv.S#L850
    20	// On older kernels (before 2.6.24) the function can incorrectly
    21	// report a conflict, so we have to double-check the compare ourselves
    22	// and retry if necessary.
    23	//
    24	// http://git.kernel.org/?p=linux/kernel/git/torvalds/linux-2.6.git;a=commit;h=b49c0f24cf6744a3f4fd09289fe7cade349dead5
    25	//
    26	TEXT cas<>(SB),NOSPLIT,$0
    27		MOVW	$0xffff0fc0, R15
    28	
    29	TEXT ·CompareAndSwapInt32(SB),NOSPLIT,$0
    30		B	·CompareAndSwapUint32(SB)
    31	
    32	// Implement using kernel cas for portability.
    33	TEXT ·CompareAndSwapUint32(SB),NOSPLIT,$0-13
    34		MOVW	addr+0(FP), R2
    35		// trigger potential paging fault here,
    36		// because we don't know how to traceback through __kuser_cmpxchg
    37		MOVW	(R2), R0
    38		MOVW	old+4(FP), R0
    39	casagain:
    40		MOVW	new+8(FP), R1
    41		BL	cas<>(SB)
    42		BCC	cascheck
    43		MOVW	$1, R0
    44	casret:
    45		MOVB	R0, swapped+12(FP)
    46		RET
    47	cascheck:
    48		// Kernel lies; double-check.
    49		MOVW	addr+0(FP), R2
    50		MOVW	old+4(FP), R0
    51		MOVW	0(R2), R3
    52		CMP	R0, R3
    53		BEQ	casagain
    54		MOVW	$0, R0
    55		B	casret
    56	
    57	TEXT ·CompareAndSwapUintptr(SB),NOSPLIT,$0
    58		B	·CompareAndSwapUint32(SB)
    59	
    60	TEXT ·AddInt32(SB),NOSPLIT,$0
    61		B	·AddUint32(SB)
    62	
    63	// Implement using kernel cas for portability.
    64	TEXT ·AddUint32(SB),NOSPLIT,$0-12
    65		MOVW	addr+0(FP), R2
    66		MOVW	delta+4(FP), R4
    67	addloop1:
    68		MOVW	0(R2), R0
    69		MOVW	R0, R1
    70		ADD	R4, R1
    71		BL	cas<>(SB)
    72		BCC	addloop1
    73		MOVW	R1, new+8(FP)
    74		RET
    75	
    76	TEXT ·AddUintptr(SB),NOSPLIT,$0
    77		B	·AddUint32(SB)
    78	
    79	TEXT ·SwapInt32(SB),NOSPLIT,$0
    80		B	·SwapUint32(SB)
    81	
    82	// Implement using kernel cas for portability.
    83	TEXT ·SwapUint32(SB),NOSPLIT,$0-12
    84		MOVW	addr+0(FP), R2
    85		MOVW	new+4(FP), R1
    86	swaploop1:
    87		MOVW	0(R2), R0
    88		MOVW	R0, R4 // cas smashes R0
    89		BL	cas<>(SB)
    90		BCC	swaploop1
    91		MOVW	R4, old+8(FP)
    92		RET
    93	
    94	TEXT ·SwapUintptr(SB),NOSPLIT,$0
    95		B	·SwapUint32(SB)
    96	
    97	TEXT cas64<>(SB),NOSPLIT,$0
    98		MOVW	$0xffff0f60, R15 // R15 = hardware PC. __kuser_cmpxchg64: Linux-3.1 and above
    99	
   100	TEXT kernelCAS64<>(SB),NOSPLIT,$0-21
   101		// int (*__kuser_cmpxchg64_t)(const int64_t *oldval, const int64_t *newval, volatile int64_t *ptr);
   102		MOVW	addr+0(FP), R2 // ptr
   103		// trigger potential paging fault here,
   104		// because we don't know how to traceback through __kuser_cmpxchg64
   105		MOVW	(R2), R0
   106		// make unaligned atomic access panic
   107		AND.S	$7, R2, R1
   108		BEQ 	2(PC)
   109		MOVW	R1, (R1)
   110		MOVW	$oldval+4(FP), R0
   111		MOVW	$newval+12(FP), R1
   112		BL	cas64<>(SB)
   113		MOVW.CS	$1, R0 // C is set if the kernel has changed *ptr
   114		MOVW.CC	$0, R0
   115		MOVW	R0, ret+20(FP)
   116		RET
   117	
   118	TEXT ·generalCAS64(SB),NOSPLIT,$0-21
   119		B  	runtime∕internal∕atomic·Cas64(SB)
   120	
   121	GLOBL armCAS64(SB), NOPTR, $4
   122	
   123	TEXT setupAndCallCAS64<>(SB),NOSPLIT,$-4-21
   124		MOVW	$0xffff0ffc, R0 // __kuser_helper_version
   125		MOVW	(R0), R0
   126		// __kuser_cmpxchg64 only present if helper version >= 5
   127		CMP 	$5, R0
   128		MOVW.CS	$kernelCAS64<>(SB), R1
   129		MOVW.CS	R1, armCAS64(SB)
   130		MOVW.CS	R1, R15 // R15 = hardware PC
   131		MOVB	runtime·armArch(SB), R0
   132		// LDREXD, STREXD only present on ARMv6K or higher
   133		CMP	$6, R0 // TODO(minux): how to differentiate ARMv6 with ARMv6K?
   134		MOVW.CS	$·armCompareAndSwapUint64(SB), R1
   135		MOVW.CS	R1, armCAS64(SB)
   136		MOVW.CS	R1, R15
   137		// we are out of luck, can only use runtime's emulated 64-bit cas
   138		MOVW	$·generalCAS64(SB), R1
   139		MOVW	R1, armCAS64(SB)
   140		MOVW	R1, R15
   141	
   142	TEXT ·CompareAndSwapInt64(SB),NOSPLIT,$0
   143		B   	·CompareAndSwapUint64(SB)
   144	
   145	TEXT ·CompareAndSwapUint64(SB),NOSPLIT,$-4-21
   146		MOVW	armCAS64(SB), R0
   147		CMP 	$0, R0
   148		MOVW.NE	R0, R15 // R15 = hardware PC
   149		B	setupAndCallCAS64<>(SB)
   150	
   151	TEXT ·AddInt64(SB),NOSPLIT,$0
   152		B	·addUint64(SB)
   153	
   154	TEXT ·AddUint64(SB),NOSPLIT,$0
   155		B	·addUint64(SB)
   156	
   157	TEXT ·SwapInt64(SB),NOSPLIT,$0
   158		B	·swapUint64(SB)
   159	
   160	TEXT ·SwapUint64(SB),NOSPLIT,$0
   161		B	·swapUint64(SB)
   162	
   163	TEXT ·LoadInt32(SB),NOSPLIT,$0
   164		B	·LoadUint32(SB)
   165	
   166	TEXT ·LoadUint32(SB),NOSPLIT,$0-8
   167		MOVW	addr+0(FP), R2
   168	loadloop1:
   169		MOVW	0(R2), R0
   170		MOVW	R0, R1
   171		BL	cas<>(SB)
   172		BCC	loadloop1
   173		MOVW	R1, val+4(FP)
   174		RET
   175	
   176	TEXT ·LoadInt64(SB),NOSPLIT,$0
   177		B	·loadUint64(SB)
   178	
   179	TEXT ·LoadUint64(SB),NOSPLIT,$0
   180		B	·loadUint64(SB)
   181	
   182	TEXT ·LoadUintptr(SB),NOSPLIT,$0
   183		B	·LoadUint32(SB)
   184	
   185	TEXT ·LoadPointer(SB),NOSPLIT,$0
   186		B	·LoadUint32(SB)
   187	
   188	TEXT ·StoreInt32(SB),NOSPLIT,$0
   189		B	·StoreUint32(SB)
   190	
   191	TEXT ·StoreUint32(SB),NOSPLIT,$0-8
   192		MOVW	addr+0(FP), R2
   193		MOVW	val+4(FP), R1
   194	storeloop1:
   195		MOVW	0(R2), R0
   196		BL	cas<>(SB)
   197		BCC	storeloop1
   198		RET
   199	
   200	TEXT ·StoreInt64(SB),NOSPLIT,$0
   201		B	·storeUint64(SB)
   202	
   203	TEXT ·StoreUint64(SB),NOSPLIT,$0
   204		B	·storeUint64(SB)
   205	
   206	TEXT ·StoreUintptr(SB),NOSPLIT,$0
   207		B	·StoreUint32(SB)

View as plain text