summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorCyrIng <labs@cyring.fr>2024-05-05 16:51:40 +0200
committerCyrIng <labs@cyring.fr>2024-05-05 16:51:40 +0200
commit0a21f0163c055ff77f358f30199f8d3036bddc2d (patch)
tree364b8767fd7e7dffdbe133d214caade75cab70dc
parent25434d9ddfa2c3e1b7c7faa806d3258d50813fed (diff)
[AArch64] Providing Exclusive Load/Store to bitwise operations
-rw-r--r--aarch64/bitasm.h99
1 files changed, 84 insertions, 15 deletions
diff --git a/aarch64/bitasm.h b/aarch64/bitasm.h
index 7669ddf..d41e7a2 100644
--- a/aarch64/bitasm.h
+++ b/aarch64/bitasm.h
@@ -399,50 +399,119 @@ ASM_RDTSC_PMCx1(x14, x15, ASM_RDTSC, mem_tsc, __VA_ARGS__)
_ret; \
})
+#define _BITWISEAND_PRE_INST_FULL_LOCK \
+ "1:" "\n\t" \
+ "ldxr x11, [%[addr]]" "\n\t" \
+
+#define _BITWISEAND_PRE_INST_LOCK_LESS \
+ "ldr x11, [%[addr]]" "\n\t" \
+
+#define _BITWISEAND_POST_INST_FULL_LOCK \
+ "stxr w9, x11, [%[addr]]" "\n\t" \
+ "cbnz w9, 1b" "\n\t" \
+ "dmb ish"
+
+#define _BITWISEAND_POST_INST_LOCK_LESS \
+ "# NOP"
+
+#define _BITWISEAND_CLOBBERS_FULL_LOCK \
+ : "cc", "memory", "%w9", "%x10", "%x11" \
+
+#define _BITWISEAND_CLOBBERS_LOCK_LESS \
+ : "cc", "memory", "%x10", "%x11" \
+
#define _BITWISEAND(_lock, _opl, _opr) \
({ \
volatile Bit64 _dest __attribute__ ((aligned (8))); \
\
__asm__ volatile \
( \
- "and x10, %[opl], %[opr]" "\n\t" \
- "str x10, %[dest]" \
+ _BITWISEAND_PRE_INST_##_lock \
+ "and x10, x11, %[opr]" "\n\t" \
+ "str x10, %[dest]" "\n\t" \
+ _BITWISEAND_POST_INST_##_lock \
: [dest] "=m" (_dest) \
- : [opl] "Jr" (_opl), \
- [opr] "Jr" (_opr) \
- : "memory", "%x10" \
+ : [addr] "r" (&_opl), \
+ [opr] "Lr" (_opr) \
+ _BITWISEAND_CLOBBERS_##_lock \
); \
_dest; \
})
+#define _BITWISEOR_PRE_INST_FULL_LOCK \
+ "1:" "\n\t" \
+ "ldxr x11, [%[addr]]" "\n\t" \
+
+#define _BITWISEOR_PRE_INST_LOCK_LESS \
+ "ldr x11, [%[addr]]" "\n\t" \
+
+#define _BITWISEOR_POST_INST_FULL_LOCK \
+ "stxr w9, x11, [%[addr]]" "\n\t" \
+ "cbnz w9, 1b" "\n\t" \
+ "dmb ish"
+
+#define _BITWISEOR_POST_INST_LOCK_LESS \
+ "# NOP"
+
+#define _BITWISEOR_CLOBBERS_FULL_LOCK \
+ : "cc", "memory", "%w9", "%x10", "%x11" \
+
+#define _BITWISEOR_CLOBBERS_LOCK_LESS \
+ : "cc", "memory", "%x10", "%x11" \
+
#define _BITWISEOR(_lock, _opl, _opr) \
({ \
volatile Bit64 _dest __attribute__ ((aligned (8))); \
\
__asm__ volatile \
( \
- "orr x10, %[opl], %[opr]" "\n\t" \
- "str x10, %[dest]" \
+ _BITWISEOR_PRE_INST_##_lock \
+ "orr x10, x11, %[opr]" "\n\t" \
+ "str x10, %[dest]" "\n\t" \
+ _BITWISEOR_POST_INST_##_lock \
: [dest] "=m" (_dest) \
- : [opl] "Jr" (_opl), \
- [opr] "Jr" (_opr) \
- : "memory", "%x10" \
+ : [addr] "r" (&_opl), \
+ [opr] "Lr" (_opr) \
+ _BITWISEOR_CLOBBERS_##_lock \
); \
_dest; \
})
+#define _BITWISEXOR_PRE_INST_FULL_LOCK \
+ "1:" "\n\t" \
+ "ldxr x11, [%[addr]]" "\n\t" \
+
+#define _BITWISEXOR_PRE_INST_LOCK_LESS \
+ "ldr x11, [%[addr]]" "\n\t" \
+
+#define _BITWISEXOR_POST_INST_FULL_LOCK \
+ "stxr w9, x11, [%[addr]]" "\n\t" \
+ "cbnz w9, 1b" "\n\t" \
+ "dmb ish"
+
+#define _BITWISEXOR_POST_INST_LOCK_LESS \
+ "# NOP"
+
+#define _BITWISEXOR_CLOBBERS_FULL_LOCK \
+ : "cc", "memory", "%w9", "%x10", "%x11" \
+
+#define _BITWISEXOR_CLOBBERS_LOCK_LESS \
+ : "cc", "memory", "%x10", "%x11" \
+
#define _BITWISEXOR(_lock, _opl, _opr) \
({ \
volatile Bit64 _dest __attribute__ ((aligned (8))); \
\
__asm__ volatile \
( \
- "eor x10, %[opl], %[opr]" "\n\t" \
- "str x10, %[dest]" \
+ _BITWISEXOR_PRE_INST_##_lock \
+ "eor x10, x11, %[opr]" "\n\t" \
+ "str x10, %[dest]" "\n\t" \
+ _BITWISEXOR_POST_INST_##_lock \
: [dest] "=m" (_dest) \
- : [opl] "Jr" (_opl), \
- [opr] "Jr" (_opr) \
- : "memory", "%x10" \
+ : [addr] "r" (&_opl), \
+ [opr] "Lr" (_opr) \
+ _BITWISEXOR_CLOBBERS_##_lock \
); \
_dest; \
})