summaryrefslogtreecommitdiff
path: root/runtime/caml
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/caml')
-rw-r--r--runtime/caml/atomic_refcount.h2
-rw-r--r--runtime/caml/domain.h2
-rw-r--r--runtime/caml/lf_skiplist.h3
-rw-r--r--runtime/caml/platform.h31
4 files changed, 13 insertions, 25 deletions
diff --git a/runtime/caml/atomic_refcount.h b/runtime/caml/atomic_refcount.h
index 3e4a239d51..aba5ce7f67 100644
--- a/runtime/caml/atomic_refcount.h
+++ b/runtime/caml/atomic_refcount.h
@@ -21,7 +21,7 @@
#include "camlatomic.h"
Caml_inline void caml_atomic_refcount_init(atomic_uintnat* refc, uintnat n){
- atomic_store_rel(refc, n);
+ atomic_store_release(refc, n);
}
Caml_inline uintnat caml_atomic_refcount_decr(atomic_uintnat* refcount){
diff --git a/runtime/caml/domain.h b/runtime/caml/domain.h
index 17c011ecee..49194ae73d 100644
--- a/runtime/caml/domain.h
+++ b/runtime/caml/domain.h
@@ -92,7 +92,7 @@ CAMLextern atomic_uintnat caml_num_domains_running;
Caml_inline intnat caml_domain_alone(void)
{
- return atomic_load_acq(&caml_num_domains_running) == 1;
+ return atomic_load_acquire(&caml_num_domains_running) == 1;
}
#ifdef DEBUG
diff --git a/runtime/caml/lf_skiplist.h b/runtime/caml/lf_skiplist.h
index f35f112256..db6544c867 100644
--- a/runtime/caml/lf_skiplist.h
+++ b/runtime/caml/lf_skiplist.h
@@ -95,8 +95,7 @@ extern void caml_lf_skiplist_free_garbage(struct lf_skiplist *sk);
#define LF_SK_UNMARK(p) ((struct lf_skipcell *)(((uintptr_t)(p)) & ~1))
#define LF_SK_EXTRACT(from, mark_to, ptr_to) \
{ \
- uintptr_t tmp = \
- (uintptr_t)atomic_load_explicit(&from, memory_order_acquire); \
+ uintptr_t tmp = (uintptr_t)atomic_load_acquire(&(from)); \
mark_to = LF_SK_IS_MARKED(tmp); \
ptr_to = LF_SK_UNMARK(tmp); \
}
diff --git a/runtime/caml/platform.h b/runtime/caml/platform.h
index 77027405be..373419e3c9 100644
--- a/runtime/caml/platform.h
+++ b/runtime/caml/platform.h
@@ -49,27 +49,16 @@ Caml_inline void cpu_relax(void) {
#endif
}
-/* Loads and stores with acquire and release semantics respectively */
+/* Loads and stores with acquire, release and relaxed semantics */
-Caml_inline uintnat atomic_load_acq(atomic_uintnat* p)
-{
- return atomic_load_explicit(p, memory_order_acquire);
-}
-
-Caml_inline uintnat atomic_load_relaxed(atomic_uintnat* p)
-{
- return atomic_load_explicit(p, memory_order_relaxed);
-}
-
-Caml_inline void atomic_store_rel(atomic_uintnat* p, uintnat v)
-{
- atomic_store_explicit(p, v, memory_order_release);
-}
-
-Caml_inline void atomic_store_relaxed(atomic_uintnat* p, uintnat v)
-{
- atomic_store_explicit(p, v, memory_order_relaxed);
-}
+#define atomic_load_acquire(p) \
+ atomic_load_explicit((p), memory_order_acquire)
+#define atomic_load_relaxed(p) \
+ atomic_load_explicit((p), memory_order_relaxed)
+#define atomic_store_release(p, v) \
+ atomic_store_explicit((p), (v), memory_order_release)
+#define atomic_store_relaxed(p, v) \
+ atomic_store_explicit((p), (v), memory_order_relaxed)
/* Spin-wait loops */
@@ -94,7 +83,7 @@ CAMLextern unsigned caml_plat_spin_wait(unsigned spins,
Caml_inline uintnat atomic_load_wait_nonzero(atomic_uintnat* p) {
SPIN_WAIT {
- uintnat v = atomic_load_acq(p);
+ uintnat v = atomic_load_acquire(p);
if (v) return v;
}
}