summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJean Girardet <jean.girardet@atos.net>2016-10-10 11:04:24 +0300
committerIvan Maidanski <ivmai@mail.ru>2016-10-10 11:04:24 +0300
commitd93573cd0a9cf507e19002459ca0420e68839e34 (patch)
treef0627e07356494d298e95fa356bae322f0c5cfa4
parentd5e5ac5af63c98492922128740e0ee0a64720b71 (diff)
downloadlibatomic_ops-d93573cd0a9cf507e19002459ca0420e68839e34.tar.gz
Support AIX/ppc (gcc)
* src/atomic_ops/sysdeps/gcc/powerpc.h (AO_PPC_L, AO_PPC_BR_A): New internal macros (undefined at the end of file); definition depends on _AIX macro presence. * src/atomic_ops/sysdeps/gcc/powerpc.h (AO_load_acquire, AO_test_and_set, AO_compare_and_swap, AO_fetch_compare_and_swap, AO_fetch_and_add): Use AO_PPC_L for label definitions; use AO_PPC_BR_A to specify jump target.
-rw-r--r--src/atomic_ops/sysdeps/gcc/powerpc.h50
1 files changed, 34 insertions, 16 deletions
diff --git a/src/atomic_ops/sysdeps/gcc/powerpc.h b/src/atomic_ops/sysdeps/gcc/powerpc.h
index fdf6ffb..6650338 100644
--- a/src/atomic_ops/sysdeps/gcc/powerpc.h
+++ b/src/atomic_ops/sysdeps/gcc/powerpc.h
@@ -76,6 +76,16 @@ AO_lwsync(void)
# define AO_T_IS_INT
#endif
+#ifdef _AIX
+ /* Labels are not supported on AIX. */
+ /* ppc64 has same size of instructions as 32-bit one. */
+# define AO_PPC_L(label) /* empty */
+# define AO_PPC_BR_A(labelBF, addr) addr
+#else
+# define AO_PPC_L(label) label ": "
+# define AO_PPC_BR_A(labelBF, addr) labelBF
+#endif
+
/* We explicitly specify load_acquire, since it is important, and can */
/* be implemented relatively cheaply. It could be implemented */
/* with an ordinary load followed by a lwsync. But the general wisdom */
@@ -90,8 +100,8 @@ AO_load_acquire(const volatile AO_t *addr)
__asm__ __volatile__ (
AO_PPC_LD "%U1%X1 %0,%1\n"
"cmpw %0,%0\n"
- "bne- 1f\n"
- "1: isync\n"
+ "bne- " AO_PPC_BR_A("1f", "$+4") "\n"
+ AO_PPC_L("1") "isync\n"
: "=r" (result)
: "m"(*addr) : "memory", AO_PPC_LOAD_CLOBBER);
return result;
@@ -119,12 +129,15 @@ AO_test_and_set(volatile AO_TS_t *addr) {
AO_t temp = 1; /* locked value */
__asm__ __volatile__(
- "1: " AO_PPC_LxARX " %0,0,%1\n" /* load and reserve */
+ AO_PPC_L("1") AO_PPC_LxARX " %0,0,%1\n"
+ /* load and reserve */
AO_PPC_CMPx "i %0, 0\n" /* if load is */
- "bne 2f\n" /* non-zero, return already set */
+ "bne " AO_PPC_BR_A("2f", "$+12") "\n"
+ /* non-zero, return already set */
AO_PPC_STxCXd " %2,0,%1\n" /* else store conditional */
- "bne- 1b\n" /* retry if lost reservation */
- "2:\n" /* oldval is zero if we set */
+ "bne- " AO_PPC_BR_A("1b", "$-16") "\n"
+ /* retry if lost reservation */
+ AO_PPC_L("2") "\n" /* oldval is zero if we set */
: "=&r"(oldval)
: "r"(addr), "r"(temp)
: "memory", "cr0");
@@ -167,13 +180,14 @@ AO_test_and_set_full(volatile AO_TS_t *addr) {
int result = 0;
__asm__ __volatile__(
- "1: " AO_PPC_LxARX " %0,0,%2\n" /* load and reserve */
+ AO_PPC_L("1") AO_PPC_LxARX " %0,0,%2\n" /* load and reserve */
AO_PPC_CMPx " %0, %4\n" /* if load is not equal to */
- "bne 2f\n" /* old, fail */
+ "bne " AO_PPC_BR_A("2f", "$+16") "\n" /* old, fail */
AO_PPC_STxCXd " %3,0,%2\n" /* else store conditional */
- "bne- 1b\n" /* retry if lost reservation */
+ "bne- " AO_PPC_BR_A("1b", "$-16") "\n"
+ /* retry if lost reservation */
"li %1,1\n" /* result = 1; */
- "2:\n"
+ AO_PPC_L("2") "\n"
: "=&r"(oldval), "=&r"(result)
: "r"(addr), "r"(new_val), "r"(old), "1"(result)
: "memory", "cr0");
@@ -218,12 +232,13 @@ AO_fetch_compare_and_swap(volatile AO_t *addr, AO_t old_val, AO_t new_val)
AO_t fetched_val;
__asm__ __volatile__(
- "1: " AO_PPC_LxARX " %0,0,%1\n" /* load and reserve */
+ AO_PPC_L("1") AO_PPC_LxARX " %0,0,%1\n" /* load and reserve */
AO_PPC_CMPx " %0, %3\n" /* if load is not equal to */
- "bne 2f\n" /* old_val, fail */
+ "bne " AO_PPC_BR_A("2f", "$+12") "\n" /* old_val, fail */
AO_PPC_STxCXd " %2,0,%1\n" /* else store conditional */
- "bne- 1b\n" /* retry if lost reservation */
- "2:\n"
+ "bne- " AO_PPC_BR_A("1b", "$-16") "\n"
+ /* retry if lost reservation */
+ AO_PPC_L("2") "\n"
: "=&r"(fetched_val)
: "r"(addr), "r"(new_val), "r"(old_val)
: "memory", "cr0");
@@ -270,10 +285,11 @@ AO_fetch_and_add(volatile AO_t *addr, AO_t incr) {
AO_t newval;
__asm__ __volatile__(
- "1: " AO_PPC_LxARX " %0,0,%2\n" /* load and reserve */
+ AO_PPC_L("1") AO_PPC_LxARX " %0,0,%2\n" /* load and reserve */
"add %1,%0,%3\n" /* increment */
AO_PPC_STxCXd " %1,0,%2\n" /* store conditional */
- "bne- 1b\n" /* retry if lost reservation */
+ "bne- " AO_PPC_BR_A("1b", "$-12") "\n"
+ /* retry if lost reservation */
: "=&r"(oldval), "=&r"(newval)
: "r"(addr), "r"(incr)
: "memory", "cr0");
@@ -309,7 +325,9 @@ AO_fetch_and_add_full(volatile AO_t *addr, AO_t incr) {
/* TODO: Implement double-wide operations if available. */
+#undef AO_PPC_BR_A
#undef AO_PPC_CMPx
+#undef AO_PPC_L
#undef AO_PPC_LD
#undef AO_PPC_LOAD_CLOBBER
#undef AO_PPC_LxARX