summaryrefslogtreecommitdiff
path: root/includes/stg/SMP.h
diff options
context:
space:
mode:
Diffstat (limited to 'includes/stg/SMP.h')
-rw-r--r--includes/stg/SMP.h37
1 files changed, 17 insertions, 20 deletions
diff --git a/includes/stg/SMP.h b/includes/stg/SMP.h
index 21ab0e1214..0e806b6716 100644
--- a/includes/stg/SMP.h
+++ b/includes/stg/SMP.h
@@ -14,7 +14,7 @@
#ifndef SMP_H
#define SMP_H
-#if defined(arm_HOST_ARCH) && defined(arm_HOST_ARCH_PRE_ARMv6)
+#if arm_HOST_ARCH && defined(arm_HOST_ARCH_PRE_ARMv6)
void arm_atomic_spin_lock(void);
void arm_atomic_spin_unlock(void);
#endif
@@ -187,15 +187,14 @@ EXTERN_INLINE void
write_barrier(void) {
#if defined(NOSMP)
return;
-#elif defined(i386_HOST_ARCH) || defined(x86_64_HOST_ARCH)
+#elif i386_HOST_ARCH || x86_64_HOST_ARCH
__asm__ __volatile__ ("" : : : "memory");
-#elif defined(powerpc_HOST_ARCH) || defined(powerpc64_HOST_ARCH) \
- || defined(powerpc64le_HOST_ARCH)
+#elif powerpc_HOST_ARCH || powerpc64_HOST_ARCH || powerpc64le_HOST_ARCH
__asm__ __volatile__ ("lwsync" : : : "memory");
-#elif defined(sparc_HOST_ARCH)
+#elif sparc_HOST_ARCH
/* Sparc in TSO mode does not require store/store barriers. */
__asm__ __volatile__ ("" : : : "memory");
-#elif defined(arm_HOST_ARCH) || defined(aarch64_HOST_ARCH)
+#elif (arm_HOST_ARCH) || aarch64_HOST_ARCH
__asm__ __volatile__ ("dmb st" : : : "memory");
#else
#error memory barriers unimplemented on this architecture
@@ -206,18 +205,17 @@ EXTERN_INLINE void
store_load_barrier(void) {
#if defined(NOSMP)
return;
-#elif defined(i386_HOST_ARCH)
+#elif i386_HOST_ARCH
__asm__ __volatile__ ("lock; addl $0,0(%%esp)" : : : "memory");
-#elif defined(x86_64_HOST_ARCH)
+#elif x86_64_HOST_ARCH
__asm__ __volatile__ ("lock; addq $0,0(%%rsp)" : : : "memory");
-#elif defined(powerpc_HOST_ARCH) || defined(powerpc64_HOST_ARCH) \
- || defined(powerpc64le_HOST_ARCH)
+#elif powerpc_HOST_ARCH || powerpc64_HOST_ARCH || powerpc64le_HOST_ARCH
__asm__ __volatile__ ("sync" : : : "memory");
-#elif defined(sparc_HOST_ARCH)
+#elif sparc_HOST_ARCH
__asm__ __volatile__ ("membar #StoreLoad" : : : "memory");
-#elif defined(arm_HOST_ARCH)
+#elif arm_HOST_ARCH
__asm__ __volatile__ ("dmb" : : : "memory");
-#elif defined(aarch64_HOST_ARCH)
+#elif aarch64_HOST_ARCH
__asm__ __volatile__ ("dmb sy" : : : "memory");
#else
#error memory barriers unimplemented on this architecture
@@ -228,19 +226,18 @@ EXTERN_INLINE void
load_load_barrier(void) {
#if defined(NOSMP)
return;
-#elif defined(i386_HOST_ARCH)
+#elif i386_HOST_ARCH
__asm__ __volatile__ ("" : : : "memory");
-#elif defined(x86_64_HOST_ARCH)
+#elif x86_64_HOST_ARCH
__asm__ __volatile__ ("" : : : "memory");
-#elif defined(powerpc_HOST_ARCH) || defined(powerpc64_HOST_ARCH) \
- || defined(powerpc64le_HOST_ARCH)
+#elif powerpc_HOST_ARCH || powerpc64_HOST_ARCH || powerpc64le_HOST_ARCH
__asm__ __volatile__ ("lwsync" : : : "memory");
-#elif defined(sparc_HOST_ARCH)
+#elif sparc_HOST_ARCH
/* Sparc in TSO mode does not require load/load barriers. */
__asm__ __volatile__ ("" : : : "memory");
-#elif defined(arm_HOST_ARCH)
+#elif arm_HOST_ARCH
__asm__ __volatile__ ("dmb" : : : "memory");
-#elif defined(aarch64_HOST_ARCH)
+#elif aarch64_HOST_ARCH
__asm__ __volatile__ ("dmb sy" : : : "memory");
#else
#error memory barriers unimplemented on this architecture