summaryrefslogtreecommitdiff
path: root/gcc/builtins.c
diff options
context:
space:
mode:
authorjakub <jakub@138bc75d-0d04-0410-961f-82ee72b054a4>2016-06-28 08:27:18 +0000
committerjakub <jakub@138bc75d-0d04-0410-961f-82ee72b054a4>2016-06-28 08:27:18 +0000
commit5a5ef6591c05aeaa6dfe4cc37711b5519380b37a (patch)
tree532f4d56f6e6da5f9c6f2a7cf4ae65a7b9a8342d /gcc/builtins.c
parent1fcfb8f7ac7dcd6c506182d5bd9d11f3caf76d98 (diff)
downloadgcc-5a5ef6591c05aeaa6dfe4cc37711b5519380b37a.tar.gz
PR middle-end/66867
* builtins.c (expand_ifn_atomic_compare_exchange_into_call, expand_ifn_atomic_compare_exchange): New functions. * internal-fn.c (expand_ATOMIC_COMPARE_EXCHANGE): New function. * tree.h (build_call_expr_internal_loc): Rename to ... (build_call_expr_internal_loc_array): ... this. Fix up type of last argument. * internal-fn.def (ATOMIC_COMPARE_EXCHANGE): New internal fn. * predict.c (expr_expected_value_1): Handle IMAGPART_EXPR of ATOMIC_COMPARE_EXCHANGE result. * builtins.h (expand_ifn_atomic_compare_exchange): New prototype. * gimple-fold.h (optimize_atomic_compare_exchange_p, fold_builtin_atomic_compare_exchange): New prototypes. * gimple-fold.c (optimize_atomic_compare_exchange_p, fold_builtin_atomic_compare_exchange): New functions.. * tree-ssa.c (execute_update_addresses_taken): If optimize_atomic_compare_exchange_p, ignore &var in 2nd argument of call when finding addressable vars, and if such var becomes non-addressable, call fold_builtin_atomic_compare_exchange. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@237814 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/builtins.c')
-rw-r--r--gcc/builtins.c117
1 files changed, 117 insertions, 0 deletions
diff --git a/gcc/builtins.c b/gcc/builtins.c
index 5d234a5c827..1465c60c98f 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -5158,6 +5158,123 @@ expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
return target;
}
+/* Helper function for expand_ifn_atomic_compare_exchange - expand
+ internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
+ call. The weak parameter must be dropped to match the expected parameter
+ list and the expected argument changed from value to pointer to memory
+ slot. */
+
+static void
+expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
+{
+ unsigned int z;
+ vec<tree, va_gc> *vec;
+
+ vec_alloc (vec, 5);
+ vec->quick_push (gimple_call_arg (call, 0));
+ tree expected = gimple_call_arg (call, 1);
+ rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
+ TREE_TYPE (expected));
+ rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
+ if (expd != x)
+ emit_move_insn (x, expd);
+ tree v = make_tree (TREE_TYPE (expected), x);
+ vec->quick_push (build1 (ADDR_EXPR,
+ build_pointer_type (TREE_TYPE (expected)), v));
+ vec->quick_push (gimple_call_arg (call, 2));
+ /* Skip the boolean weak parameter. */
+ for (z = 4; z < 6; z++)
+ vec->quick_push (gimple_call_arg (call, z));
+ built_in_function fncode
+ = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
+ + exact_log2 (GET_MODE_SIZE (mode)));
+ tree fndecl = builtin_decl_explicit (fncode);
+ tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
+ fndecl);
+ tree exp = build_call_vec (boolean_type_node, fn, vec);
+ tree lhs = gimple_call_lhs (call);
+ rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
+ if (lhs)
+ {
+ rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
+ if (GET_MODE (boolret) != mode)
+ boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
+ x = force_reg (mode, x);
+ write_complex_part (target, boolret, true);
+ write_complex_part (target, x, false);
+ }
+}
+
+/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
+
+void
+expand_ifn_atomic_compare_exchange (gcall *call)
+{
+ int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
+ gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
+ machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
+ rtx expect, desired, mem, oldval, boolret;
+ enum memmodel success, failure;
+ tree lhs;
+ bool is_weak;
+ source_location loc
+ = expansion_point_location_if_in_system_header (gimple_location (call));
+
+ success = get_memmodel (gimple_call_arg (call, 4));
+ failure = get_memmodel (gimple_call_arg (call, 5));
+
+ if (failure > success)
+ {
+ warning_at (loc, OPT_Winvalid_memory_model,
+ "failure memory model cannot be stronger than success "
+ "memory model for %<__atomic_compare_exchange%>");
+ success = MEMMODEL_SEQ_CST;
+ }
+
+ if (is_mm_release (failure) || is_mm_acq_rel (failure))
+ {
+ warning_at (loc, OPT_Winvalid_memory_model,
+ "invalid failure memory model for "
+ "%<__atomic_compare_exchange%>");
+ failure = MEMMODEL_SEQ_CST;
+ success = MEMMODEL_SEQ_CST;
+ }
+
+ if (!flag_inline_atomics)
+ {
+ expand_ifn_atomic_compare_exchange_into_call (call, mode);
+ return;
+ }
+
+ /* Expand the operands. */
+ mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
+
+ expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
+ desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
+
+ is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
+
+ boolret = NULL;
+ oldval = NULL;
+
+ if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
+ is_weak, success, failure))
+ {
+ expand_ifn_atomic_compare_exchange_into_call (call, mode);
+ return;
+ }
+
+ lhs = gimple_call_lhs (call);
+ if (lhs)
+ {
+ rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
+ if (GET_MODE (boolret) != mode)
+ boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
+ write_complex_part (target, boolret, true);
+ write_complex_part (target, oldval, false);
+ }
+}
+
/* Expand the __atomic_load intrinsic:
TYPE __atomic_load (TYPE *object, enum memmodel)
EXP is the CALL_EXPR.