diff options
Diffstat (limited to 'gcc/tsan.c')
-rw-r--r-- | gcc/tsan.c | 137 |
1 files changed, 104 insertions, 33 deletions
diff --git a/gcc/tsan.c b/gcc/tsan.c index 47764bc568..dd8cd85647 100644 --- a/gcc/tsan.c +++ b/gcc/tsan.c @@ -1,5 +1,5 @@ /* GCC instrumentation plugin for ThreadSanitizer. - Copyright (C) 2011-2016 Free Software Foundation, Inc. + Copyright (C) 2011-2017 Free Software Foundation, Inc. Contributed by Dmitry Vyukov <dvyukov@google.com> This file is part of GCC. @@ -25,6 +25,7 @@ along with GCC; see the file COPYING3. If not see #include "backend.h" #include "rtl.h" #include "tree.h" +#include "memmodel.h" #include "gimple.h" #include "tree-pass.h" #include "ssa.h" @@ -37,9 +38,11 @@ along with GCC; see the file COPYING3. If not see #include "tree-iterator.h" #include "tree-ssa-propagate.h" #include "tree-ssa-loop-ivopts.h" +#include "tree-eh.h" #include "tsan.h" #include "asan.h" #include "builtins.h" +#include "target.h" /* Number of instrumented memory accesses in the current function. */ @@ -110,7 +113,7 @@ instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) machine_mode mode; int unsignedp, reversep, volatilep = 0; base = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode, - &unsignedp, &reversep, &volatilep, false); + &unsignedp, &reversep, &volatilep); /* No need to instrument accesses to decls that don't escape, they can't escape to other threads then. */ @@ -126,9 +129,7 @@ instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) return false; } - if (TREE_READONLY (base) - || (TREE_CODE (base) == VAR_DECL - && DECL_HARD_REGISTER (base))) + if (TREE_READONLY (base) || (VAR_P (base) && DECL_HARD_REGISTER (base))) return false; stmt = gsi_stmt (gsi); @@ -174,7 +175,7 @@ instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) if ((align - 1) & bitpos) { align = (align - 1) & bitpos; - align = align & -align; + align = least_bit_hwi (align); } expr = build_fold_addr_expr (unshare_expr (base)); expr = build2 (MEM_REF, char_type_node, expr, @@ -240,7 +241,8 @@ instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) enum tsan_atomic_action { check_last, add_seq_cst, add_acquire, weak_cas, strong_cas, - bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst + bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst, + bool_clear, bool_test_and_set }; /* Table how to map sync/atomic builtins to their corresponding @@ -274,6 +276,10 @@ static const struct tsan_map_atomic TRANSFORM (fcode, tsan_fcode, fetch_op, code) #define FETCH_OPS(fcode, tsan_fcode, code) \ TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code) +#define BOOL_CLEAR(fcode, tsan_fcode) \ + TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK) +#define BOOL_TEST_AND_SET(fcode, tsan_fcode) \ + TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK) CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD), CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD), @@ -463,7 +469,11 @@ static const struct tsan_map_atomic LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE), LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE), LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE), - LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE) + LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE), + + BOOL_CLEAR (ATOMIC_CLEAR, TSAN_ATOMIC8_STORE), + + BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET, TSAN_ATOMIC8_EXCHANGE) }; /* Instrument an atomic builtin. */ @@ -490,11 +500,12 @@ instrument_builtin_call (gimple_stmt_iterator *gsi) case check_last: case fetch_op: last_arg = gimple_call_arg (stmt, num - 1); - if (!tree_fits_uhwi_p (last_arg) - || memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) + if (tree_fits_uhwi_p (last_arg) + && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) return; gimple_call_set_fndecl (stmt, decl); update_stmt (stmt); + maybe_clean_eh_stmt (stmt); if (tsan_atomic_table[i].action == fetch_op) { args[1] = gimple_call_arg (stmt, 1); @@ -515,6 +526,7 @@ instrument_builtin_call (gimple_stmt_iterator *gsi) ? MEMMODEL_SEQ_CST : MEMMODEL_ACQUIRE); update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]); + maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); stmt = gsi_stmt (*gsi); if (tsan_atomic_table[i].action == fetch_op_seq_cst) { @@ -555,14 +567,15 @@ instrument_builtin_call (gimple_stmt_iterator *gsi) gcc_assert (num == 6); for (j = 0; j < 6; j++) args[j] = gimple_call_arg (stmt, j); - if (!tree_fits_uhwi_p (args[4]) - || memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST) + if (tree_fits_uhwi_p (args[4]) + && memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST) return; - if (!tree_fits_uhwi_p (args[5]) - || memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST) + if (tree_fits_uhwi_p (args[5]) + && memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST) return; update_gimple_call (gsi, decl, 5, args[0], args[1], args[2], args[4], args[5]); + maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); return; case bool_cas: case val_cas: @@ -590,6 +603,7 @@ instrument_builtin_call (gimple_stmt_iterator *gsi) MEMMODEL_SEQ_CST), build_int_cst (NULL_TREE, MEMMODEL_SEQ_CST)); + maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); if (tsan_atomic_table[i].action == val_cas && lhs) { tree cond; @@ -614,6 +628,60 @@ instrument_builtin_call (gimple_stmt_iterator *gsi) build_int_cst (t, 0), build_int_cst (NULL_TREE, MEMMODEL_RELEASE)); + maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); + return; + case bool_clear: + case bool_test_and_set: + if (BOOL_TYPE_SIZE != 8) + { + decl = NULL_TREE; + for (j = 1; j < 5; j++) + if (BOOL_TYPE_SIZE == (8 << j)) + { + enum built_in_function tsan_fcode + = (enum built_in_function) + (tsan_atomic_table[i].tsan_fcode + j); + decl = builtin_decl_implicit (tsan_fcode); + break; + } + if (decl == NULL_TREE) + return; + } + last_arg = gimple_call_arg (stmt, num - 1); + if (tree_fits_uhwi_p (last_arg) + && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) + return; + t = TYPE_ARG_TYPES (TREE_TYPE (decl)); + t = TREE_VALUE (TREE_CHAIN (t)); + if (tsan_atomic_table[i].action == bool_clear) + { + update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), + build_int_cst (t, 0), last_arg); + maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); + return; + } + t = build_int_cst (t, targetm.atomic_test_and_set_trueval); + update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), + t, last_arg); + maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); + stmt = gsi_stmt (*gsi); + lhs = gimple_call_lhs (stmt); + if (lhs == NULL_TREE) + return; + if (targetm.atomic_test_and_set_trueval != 1 + || !useless_type_conversion_p (TREE_TYPE (lhs), + TREE_TYPE (t))) + { + tree new_lhs = make_ssa_name (TREE_TYPE (t)); + gimple_call_set_lhs (stmt, new_lhs); + if (targetm.atomic_test_and_set_trueval != 1) + g = gimple_build_assign (lhs, NE_EXPR, new_lhs, + build_int_cst (TREE_TYPE (t), 0)); + else + g = gimple_build_assign (lhs, NOP_EXPR, new_lhs); + gsi_insert_after (gsi, g, GSI_NEW_STMT); + update_stmt (stmt); + } return; default: continue; @@ -706,7 +774,7 @@ instrument_func_exit (void) Return true if func entry/exit should be instrumented. */ static bool -instrument_memory_accesses (void) +instrument_memory_accesses (bool *cfg_changed) { basic_block bb; gimple_stmt_iterator gsi; @@ -715,22 +783,24 @@ instrument_memory_accesses (void) auto_vec<gimple *> tsan_func_exits; FOR_EACH_BB_FN (bb, cfun) - for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) - { - gimple *stmt = gsi_stmt (gsi); - if (is_gimple_call (stmt) - && gimple_call_internal_p (stmt) - && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT) - { - if (fentry_exit_instrument) - replace_func_exit (stmt); - else - tsan_func_exits.safe_push (stmt); - func_exit_seen = true; - } - else - fentry_exit_instrument |= instrument_gimple (&gsi); - } + { + for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) + { + gimple *stmt = gsi_stmt (gsi); + if (gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT)) + { + if (fentry_exit_instrument) + replace_func_exit (stmt); + else + tsan_func_exits.safe_push (stmt); + func_exit_seen = true; + } + else + fentry_exit_instrument |= instrument_gimple (&gsi); + } + if (gimple_purge_dead_eh_edges (bb)) + *cfg_changed = true; + } unsigned int i; gimple *stmt; FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt) @@ -777,9 +847,10 @@ static unsigned tsan_pass (void) { initialize_sanitizer_builtins (); - if (instrument_memory_accesses ()) + bool cfg_changed = false; + if (instrument_memory_accesses (&cfg_changed)) instrument_func_entry (); - return 0; + return cfg_changed ? TODO_cleanup_cfg : 0; } /* Inserts __tsan_init () into the list of CTORs. */ |