summaryrefslogtreecommitdiff
path: root/gcc/builtins.c
diff options
context:
space:
mode:
authoramacleod <amacleod@138bc75d-0d04-0410-961f-82ee72b054a4>2011-11-24 23:14:31 +0000
committeramacleod <amacleod@138bc75d-0d04-0410-961f-82ee72b054a4>2011-11-24 23:14:31 +0000
commit7821cde17c13b06463a34e7b6888ef168f38ee8d (patch)
treebf092826976e956804e16b109cecf1f0d3830700 /gcc/builtins.c
parentb5a229ff4f4b0943e3f38a8107df9e05285e3584 (diff)
downloadgcc-7821cde17c13b06463a34e7b6888ef168f38ee8d.tar.gz
2011-11-24 Andrew MacLeod <amacleod@redhat.com>
* optab.c (maybe_emit_atomic_exchange): New. Try to emit an atomic_exchange pattern. (maybe_emit_sync_lock_test_and_set): New. Try to emit an exchange using __sync_lock_test_and_set. (maybe_emit_compare_and_swap_exchange_loop): New. Try to emit an exchange using a compare_and_swap loop. (expand_sync_lock_test_and_set): New. Expand sync_lock_test_and_set. (expand_atomic_test_and_set): New. Expand test_and_set operation. (expand_atomic_exchange): Use new maybe_emit_* functions. (expand_atomic_store): Use new maybe_emit_* functions. * builtins.c (expand_builtin_sync_lock_test_and_set): Call expand_sync_lock_test_and_set routine. (expand_builtin_atomic_exchange): Remove parameter from call. (expand_builtin_atomic_clear): Use atomic_clear pattern if present. (expand_builtin_atomic_test_and_set): Add target and simply call expand_atomic_test_and_set. (expand_builtin): Add target to expand_builtin_atomic_test_and_set. * expr.h (expand_atomic_exchange): Add parameter. (expand_sync_lock_test_and_set): New prototype. (expand_atomic_test_and_set, expand_atomic_clear): New prototypes. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@181702 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/builtins.c')
-rw-r--r--gcc/builtins.c36
1 files changed, 17 insertions, 19 deletions
diff --git a/gcc/builtins.c b/gcc/builtins.c
index 0fc5a420c82..c9c02d10550 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -5227,7 +5227,7 @@ expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
- return expand_atomic_exchange (target, mem, val, MEMMODEL_ACQUIRE, true);
+ return expand_sync_lock_test_and_set (target, mem, val);
}
/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
@@ -5291,7 +5291,7 @@ expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
- return expand_atomic_exchange (target, mem, val, model, false);
+ return expand_atomic_exchange (target, mem, val, model);
}
/* Expand the __atomic_compare_exchange intrinsic:
@@ -5482,6 +5482,11 @@ expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
}
+#ifndef HAVE_atomic_clear
+# define HAVE_atomic_clear 0
+# define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
+#endif
+
/* Expand an atomic clear operation.
void _atomic_clear (BOOL *obj, enum memmodel)
EXP is the call expression. */
@@ -5503,6 +5508,12 @@ expand_builtin_atomic_clear (tree exp)
return const0_rtx;
}
+ if (HAVE_atomic_clear)
+ {
+ emit_insn (gen_atomic_clear (mem, model));
+ return const0_rtx;
+ }
+
/* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
Failing that, a store is issued by __atomic_store. The only way this can
fail is if the bool type is larger than a word size. Unlikely, but
@@ -5519,9 +5530,9 @@ expand_builtin_atomic_clear (tree exp)
EXP is the call expression. */
static rtx
-expand_builtin_atomic_test_and_set (tree exp)
+expand_builtin_atomic_test_and_set (tree exp, rtx target)
{
- rtx mem, ret;
+ rtx mem;
enum memmodel model;
enum machine_mode mode;
@@ -5529,20 +5540,7 @@ expand_builtin_atomic_test_and_set (tree exp)
mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
model = get_memmodel (CALL_EXPR_ARG (exp, 1));
- /* Try issuing an exchange. If it is lock free, or if there is a limited
- functionality __sync_lock_test_and_set, this will utilize it. */
- ret = expand_atomic_exchange (NULL_RTX, mem, const1_rtx, model, true);
- if (ret)
- return ret;
-
- /* Otherwise, there is no lock free support for test and set. Simply
- perform a load and a store. Since this presumes a non-atomic architecture,
- also assume single threadedness and don't issue barriers either. */
-
- ret = gen_reg_rtx (mode);
- emit_move_insn (ret, mem);
- emit_move_insn (mem, const1_rtx);
- return ret;
+ return expand_atomic_test_and_set (target, mem, model);
}
@@ -6711,7 +6709,7 @@ expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
break;
case BUILT_IN_ATOMIC_TEST_AND_SET:
- return expand_builtin_atomic_test_and_set (exp);
+ return expand_builtin_atomic_test_and_set (exp, target);
case BUILT_IN_ATOMIC_CLEAR:
return expand_builtin_atomic_clear (exp);