summaryrefslogtreecommitdiff
path: root/src/pkg/sync/atomic/asm_arm.s
diff options
context:
space:
mode:
authorOndřej Surý <ondrej@sury.org>2011-04-26 09:55:32 +0200
committerOndřej Surý <ondrej@sury.org>2011-04-26 09:55:32 +0200
commit7b15ed9ef455b6b66c6b376898a88aef5d6a9970 (patch)
tree3ef530baa80cdf29436ba981f5783be6b4d2202b /src/pkg/sync/atomic/asm_arm.s
parent50104cc32a498f7517a51c8dc93106c51c7a54b4 (diff)
downloadgolang-7b15ed9ef455b6b66c6b376898a88aef5d6a9970.tar.gz
Imported Upstream version 2011.04.13upstream/2011.04.13
Diffstat (limited to 'src/pkg/sync/atomic/asm_arm.s')
-rw-r--r--src/pkg/sync/atomic/asm_arm.s41
1 files changed, 41 insertions, 0 deletions
diff --git a/src/pkg/sync/atomic/asm_arm.s b/src/pkg/sync/atomic/asm_arm.s
index 1ae0a995e..3363bbcf1 100644
--- a/src/pkg/sync/atomic/asm_arm.s
+++ b/src/pkg/sync/atomic/asm_arm.s
@@ -25,6 +25,7 @@ casfail:
RET
TEXT ·armCompareAndSwapUint64(SB),7,$0
+ BL fastCheck64<>(SB)
MOVW valptr+0(FP), R1
MOVW oldlo+4(FP), R2
MOVW oldhi+8(FP), R3
@@ -62,6 +63,7 @@ addloop:
RET
TEXT ·armAddUint64(SB),7,$0
+ BL fastCheck64<>(SB)
MOVW valptr+0(FP), R1
MOVW deltalo+4(FP), R2
MOVW deltahi+8(FP), R3
@@ -76,3 +78,42 @@ add64loop:
MOVW R4, retlo+12(FP)
MOVW R5, rethi+16(FP)
RET
+
+// Check for broken 64-bit LDREXD as found in QEMU.
+// LDREXD followed by immediate STREXD should succeed.
+// If it fails, try a few times just to be sure (maybe our thread got
+// rescheduled between the two instructions) and then panic.
+// A bug in some copies of QEMU makes STREXD never succeed,
+// which will make uses of the 64-bit atomic operations loop forever.
+// If things are working, set okLDREXD to avoid future checks.
+// https://bugs.launchpad.net/qemu/+bug/670883.
+TEXT check64<>(SB),7,$8
+ MOVW $10, R1
+loop:
+ LDREXD (SP), R2
+ STREXD R2, (SP), R0
+ CMP $0, R0
+ BEQ ok
+ SUB $1, R1
+ CMP $0, R1
+ BNE loop
+ // Must be buggy QEMU.
+ BL ·panic64(SB)
+ok:
+ RET
+
+// Fast, cached version of check. No frame, just MOVW CMP RET after first time.
+TEXT fastCheck64<>(SB),7,$-4
+ MOVW ok64<>(SB), R0
+ CMP $0, R0 // have we been here before?
+ RET.NE
+ B slowCheck64<>(SB)
+
+TEXT slowCheck64<>(SB),7,$0
+ BL check64<>(SB)
+ // Still here, must be okay.
+ MOVW $1, R0
+ MOVW R0, ok64<>(SB)
+ RET
+
+GLOBL ok64<>(SB), $4