summaryrefslogtreecommitdiff
path: root/src/pkg/sync/atomic/asm_arm.s
diff options
context:
space:
mode:
Diffstat (limited to 'src/pkg/sync/atomic/asm_arm.s')
-rw-r--r--src/pkg/sync/atomic/asm_arm.s16
1 files changed, 16 insertions, 0 deletions
diff --git a/src/pkg/sync/atomic/asm_arm.s b/src/pkg/sync/atomic/asm_arm.s
index 4faf5b5d9..a0525881e 100644
--- a/src/pkg/sync/atomic/asm_arm.s
+++ b/src/pkg/sync/atomic/asm_arm.s
@@ -29,6 +29,10 @@ casfail:
TEXT ·armCompareAndSwapUint64(SB),7,$0
BL fastCheck64<>(SB)
MOVW addr+0(FP), R1
+ // make unaligned atomic access panic
+ AND.S $7, R1, R2
+ BEQ 2(PC)
+ MOVW R2, (R2)
MOVW oldlo+4(FP), R2
MOVW oldhi+8(FP), R3
MOVW newlo+12(FP), R4
@@ -67,6 +71,10 @@ addloop:
TEXT ·armAddUint64(SB),7,$0
BL fastCheck64<>(SB)
MOVW addr+0(FP), R1
+ // make unaligned atomic access panic
+ AND.S $7, R1, R2
+ BEQ 2(PC)
+ MOVW R2, (R2)
MOVW deltalo+4(FP), R2
MOVW deltahi+8(FP), R3
add64loop:
@@ -84,6 +92,10 @@ add64loop:
TEXT ·armLoadUint64(SB),7,$0
BL fastCheck64<>(SB)
MOVW addr+0(FP), R1
+ // make unaligned atomic access panic
+ AND.S $7, R1, R2
+ BEQ 2(PC)
+ MOVW R2, (R2)
load64loop:
LDREXD (R1), R2 // loads R2 and R3
STREXD R2, (R1), R0 // stores R2 and R3
@@ -96,6 +108,10 @@ load64loop:
TEXT ·armStoreUint64(SB),7,$0
BL fastCheck64<>(SB)
MOVW addr+0(FP), R1
+ // make unaligned atomic access panic
+ AND.S $7, R1, R2
+ BEQ 2(PC)
+ MOVW R2, (R2)
MOVW vallo+4(FP), R2
MOVW valhi+8(FP), R3
store64loop: