diff options
author | ylavic <ylavic@13f79535-47bb-0310-9956-ffa450edef68> | 2023-02-14 10:25:34 +0000 |
---|---|---|
committer | ylavic <ylavic@13f79535-47bb-0310-9956-ffa450edef68> | 2023-02-14 10:25:34 +0000 |
commit | 24b1a6436f789ae16be4d7a2a7a68cb9325315e5 (patch) | |
tree | 3cbca57c0f6d67d86d4c24274c4b382de8452d10 | |
parent | 87c8321e094af6973c7c7404998b2b6792646954 (diff) | |
download | libapr-24b1a6436f789ae16be4d7a2a7a68cb9325315e5.tar.gz |
atomic: No raw 64bit load/store on 32bit systems or anything but x86_64 or s390x.
Raw 64 bit load and store need two intructions on 32bit systems (tearing) so
they are not atomic, and only x86(_64) and s390(x) have stong mempry ordering
guarantees. Always use builtin functions for the cases where raw load/store
don't work as expected.
* atomic/unix/builtins.c, atomic/unix/builtins64.c:
Use an accept-list rather than a reject-list to define WEAK_MEMORY_ORDERING.
Test APR_SIZEOF_VOIDP < 8 to force usage of __sync builtins for _read{32,64}
and _set{32,64} on 32bit systems when __atomic_{load,store} buitlins are not
available.
git-svn-id: https://svn.apache.org/repos/asf/apr/apr/trunk@1907637 13f79535-47bb-0310-9956-ffa450edef68
-rw-r--r-- | atomic/unix/builtins.c | 7 | ||||
-rw-r--r-- | atomic/unix/builtins64.c | 11 |
2 files changed, 10 insertions, 8 deletions
diff --git a/atomic/unix/builtins.c b/atomic/unix/builtins.c index fbbb141e8..0a085a0b7 100644 --- a/atomic/unix/builtins.c +++ b/atomic/unix/builtins.c @@ -18,10 +18,11 @@ #ifdef USE_ATOMICS_BUILTINS -#if defined(__arm__) || defined(__powerpc__) || defined(__powerpc64__) -#define WEAK_MEMORY_ORDERING 1 -#else +#if defined(__i386__) || defined(__x86_64__) \ + || defined(__s390__) || defined(__s390x__) #define WEAK_MEMORY_ORDERING 0 +#else +#define WEAK_MEMORY_ORDERING 1 #endif APR_DECLARE(apr_status_t) apr_atomic_init(apr_pool_t *p) diff --git a/atomic/unix/builtins64.c b/atomic/unix/builtins64.c index d76de2472..5d07c3f78 100644 --- a/atomic/unix/builtins64.c +++ b/atomic/unix/builtins64.c @@ -18,17 +18,18 @@ #ifdef USE_ATOMICS_BUILTINS64 -#if defined(__arm__) || defined(__powerpc__) || defined(__powerpc64__) -#define WEAK_MEMORY_ORDERING 1 -#else +#if defined(__i386__) || defined(__x86_64__) \ + || defined(__s390__) || defined(__s390x__) #define WEAK_MEMORY_ORDERING 0 +#else +#define WEAK_MEMORY_ORDERING 1 #endif APR_DECLARE(apr_uint64_t) apr_atomic_read64(volatile apr_uint64_t *mem) { #if HAVE__ATOMIC_BUILTINS64 return __atomic_load_n(mem, __ATOMIC_SEQ_CST); -#elif WEAK_MEMORY_ORDERING +#elif WEAK_MEMORY_ORDERING || APR_SIZEOF_VOIDP < 8 /* No __sync_load() available => apr_atomic_add64(mem, 0) */ return __sync_fetch_and_add(mem, 0); #else @@ -40,7 +41,7 @@ APR_DECLARE(void) apr_atomic_set64(volatile apr_uint64_t *mem, apr_uint64_t val) { #if HAVE__ATOMIC_BUILTINS64 __atomic_store_n(mem, val, __ATOMIC_SEQ_CST); -#elif WEAK_MEMORY_ORDERING +#elif WEAK_MEMORY_ORDERING || APR_SIZEOF_VOIDP < 8 /* No __sync_store() available => apr_atomic_xchg64(mem, val) */ __sync_synchronize(); __sync_lock_test_and_set(mem, val); |