diff options
author | ylavic <ylavic@13f79535-47bb-0310-9956-ffa450edef68> | 2021-10-29 15:04:55 +0000 |
---|---|---|
committer | ylavic <ylavic@13f79535-47bb-0310-9956-ffa450edef68> | 2021-10-29 15:04:55 +0000 |
commit | ca229155fffd5b16e86a3e54394a86a6045462b0 (patch) | |
tree | 25b4113ed609457e9e3cf87d6ae719853868d253 /atomic/unix/builtins64.c | |
parent | 728d093b75806ec3ecc519cd9a4e657e22cb40ea (diff) | |
download | libapr-ca229155fffd5b16e86a3e54394a86a6045462b0.tar.gz |
apr_atomic: Use __atomic builtins when available.
Unlike Intel's atomic builtins (__sync_*), the more recent __atomic builtins
provide atomic load and store for weakly ordered architectures like ARM32 or
powerpc[64], so use them when available (gcc 4.6.3+).
[Reverted by r1894619]
git-svn-id: https://svn.apache.org/repos/asf/apr/apr/trunk@1894618 13f79535-47bb-0310-9956-ffa450edef68
Diffstat (limited to 'atomic/unix/builtins64.c')
-rw-r--r-- | atomic/unix/builtins64.c | 38 |
1 files changed, 35 insertions, 3 deletions
diff --git a/atomic/unix/builtins64.c b/atomic/unix/builtins64.c index 4a4b685c7..0ac950c15 100644 --- a/atomic/unix/builtins64.c +++ b/atomic/unix/builtins64.c @@ -20,45 +20,77 @@ APR_DECLARE(apr_uint64_t) apr_atomic_read64(volatile apr_uint64_t *mem) { +#if HAVE__ATOMIC_BUILTINS + return __atomic_load_n(mem, __ATOMIC_SEQ_CST); +#else return *mem; +#endif } APR_DECLARE(void) apr_atomic_set64(volatile apr_uint64_t *mem, apr_uint64_t val) { +#if HAVE__ATOMIC_BUILTINS + __atomic_store_n(mem, val, __ATOMIC_SEQ_CST); +#else *mem = val; +#endif } APR_DECLARE(apr_uint64_t) apr_atomic_add64(volatile apr_uint64_t *mem, apr_uint64_t val) { +#if HAVE__ATOMIC_BUILTINS + return __atomic_fetch_add(mem, val, __ATOMIC_SEQ_CST); +#else return __sync_fetch_and_add(mem, val); +#endif } APR_DECLARE(void) apr_atomic_sub64(volatile apr_uint64_t *mem, apr_uint64_t val) { +#if HAVE__ATOMIC_BUILTINS + __atomic_fetch_sub(mem, val, __ATOMIC_SEQ_CST); +#else __sync_fetch_and_sub(mem, val); +#endif } APR_DECLARE(apr_uint64_t) apr_atomic_inc64(volatile apr_uint64_t *mem) { +#if HAVE__ATOMIC_BUILTINS + return __atomic_fetch_add(mem, 1, __ATOMIC_SEQ_CST); +#else return __sync_fetch_and_add(mem, 1); +#endif } APR_DECLARE(int) apr_atomic_dec64(volatile apr_uint64_t *mem) { +#if HAVE__ATOMIC_BUILTINS + return __atomic_sub_fetch(mem, 1, __ATOMIC_SEQ_CST); +#else return __sync_sub_and_fetch(mem, 1); +#endif } -APR_DECLARE(apr_uint64_t) apr_atomic_cas64(volatile apr_uint64_t *mem, apr_uint64_t with, +APR_DECLARE(apr_uint64_t) apr_atomic_cas64(volatile apr_uint64_t *mem, apr_uint64_t val, apr_uint64_t cmp) { - return __sync_val_compare_and_swap(mem, cmp, with); +#if HAVE__ATOMIC_BUILTINS + __atomic_compare_exchange_n(mem, &cmp, val, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return cmp; +#else + return __sync_val_compare_and_swap(mem, cmp, val); +#endif } APR_DECLARE(apr_uint64_t) apr_atomic_xchg64(volatile apr_uint64_t *mem, apr_uint64_t val) { +#if HAVE__ATOMIC_BUILTINS + return __atomic_exchange_n(mem, val, __ATOMIC_SEQ_CST); +#else __sync_synchronize(); - return __sync_lock_test_and_set(mem, val); +#endif } #endif /* USE_ATOMICS_BUILTINS */ |