diff options
Diffstat (limited to 'gcc/tree-inline.c')
-rw-r--r-- | gcc/tree-inline.c | 145 |
1 files changed, 93 insertions, 52 deletions
diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c index ee9c895e778..39225ca6ed5 100644 --- a/gcc/tree-inline.c +++ b/gcc/tree-inline.c @@ -36,9 +36,9 @@ along with GCC; see the file COPYING3. If not see #include "cgraph.h" #include "intl.h" #include "tree-mudflap.h" -#include "tree-flow.h" +#include "tree-ssa.h" #include "function.h" -#include "tree-flow.h" +#include "tree-ssa.h" #include "tree-pretty-print.h" #include "except.h" #include "debug.h" @@ -751,6 +751,20 @@ copy_gimple_bind (gimple stmt, copy_body_data *id) return new_bind; } +/* Return true if DECL is a parameter or a SSA_NAME for a parameter. */ + +static bool +is_parm (tree decl) +{ + if (TREE_CODE (decl) == SSA_NAME) + { + decl = SSA_NAME_VAR (decl); + if (!decl) + return false; + } + + return (TREE_CODE (decl) == PARM_DECL); +} /* Remap the GIMPLE operand pointed to by *TP. DATA is really a 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'. @@ -839,20 +853,24 @@ remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data) if (TREE_CODE (*tp) == MEM_REF) { - tree ptr = TREE_OPERAND (*tp, 0); - tree type = remap_type (TREE_TYPE (*tp), id); - tree old = *tp; - /* We need to re-canonicalize MEM_REFs from inline substitutions that can happen when a pointer argument is an ADDR_EXPR. Recurse here manually to allow that. */ + tree ptr = TREE_OPERAND (*tp, 0); + tree type = remap_type (TREE_TYPE (*tp), id); + tree old = *tp; walk_tree (&ptr, remap_gimple_op_r, data, NULL); - *tp = fold_build2 (MEM_REF, type, - ptr, TREE_OPERAND (*tp, 1)); - TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old); + *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1)); TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old); TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old); + /* We cannot propagate the TREE_THIS_NOTRAP flag if we have + remapped a parameter as the property might be valid only + for the parameter itself. */ + if (TREE_THIS_NOTRAP (old) + && (!is_parm (TREE_OPERAND (old, 0)) + || (!id->transform_parameter && is_parm (ptr)))) + TREE_THIS_NOTRAP (*tp) = 1; *walk_subtrees = 0; return NULL; } @@ -1041,45 +1059,44 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data) /* Get rid of *& from inline substitutions that can happen when a pointer argument is an ADDR_EXPR. */ tree decl = TREE_OPERAND (*tp, 0); - tree *n; - - n = (tree *) pointer_map_contains (id->decl_map, decl); + tree *n = (tree *) pointer_map_contains (id->decl_map, decl); if (n) { - tree new_tree; - tree old; /* If we happen to get an ADDR_EXPR in n->value, strip it manually here as we'll eventually get ADDR_EXPRs which lie about their types pointed to. In this case build_fold_indirect_ref wouldn't strip the INDIRECT_REF, but we absolutely rely on that. As fold_indirect_ref does other useful transformations, try that first, though. */ - tree type = TREE_TYPE (TREE_TYPE (*n)); - if (id->do_not_unshare) - new_tree = *n; - else - new_tree = unshare_expr (*n); - old = *tp; - *tp = gimple_fold_indirect_ref (new_tree); + tree type = TREE_TYPE (*tp); + tree ptr = id->do_not_unshare ? *n : unshare_expr (*n); + tree old = *tp; + *tp = gimple_fold_indirect_ref (ptr); if (! *tp) { - if (TREE_CODE (new_tree) == ADDR_EXPR) + if (TREE_CODE (ptr) == ADDR_EXPR) { - *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree), - type, new_tree); + *tp + = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr); /* ??? We should either assert here or build a VIEW_CONVERT_EXPR instead of blindly leaking incompatible types to our IL. */ if (! *tp) - *tp = TREE_OPERAND (new_tree, 0); + *tp = TREE_OPERAND (ptr, 0); } else { - *tp = build1 (INDIRECT_REF, type, new_tree); + *tp = build1 (INDIRECT_REF, type, ptr); TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old); TREE_READONLY (*tp) = TREE_READONLY (old); - TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old); + /* We cannot propagate the TREE_THIS_NOTRAP flag if we + have remapped a parameter as the property might be + valid only for the parameter itself. */ + if (TREE_THIS_NOTRAP (old) + && (!is_parm (TREE_OPERAND (old, 0)) + || (!id->transform_parameter && is_parm (ptr)))) + TREE_THIS_NOTRAP (*tp) = 1; } } *walk_subtrees = 0; @@ -1088,20 +1105,24 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data) } else if (TREE_CODE (*tp) == MEM_REF) { - tree ptr = TREE_OPERAND (*tp, 0); - tree type = remap_type (TREE_TYPE (*tp), id); - tree old = *tp; - /* We need to re-canonicalize MEM_REFs from inline substitutions that can happen when a pointer argument is an ADDR_EXPR. Recurse here manually to allow that. */ + tree ptr = TREE_OPERAND (*tp, 0); + tree type = remap_type (TREE_TYPE (*tp), id); + tree old = *tp; walk_tree (&ptr, copy_tree_body_r, data, NULL); - *tp = fold_build2 (MEM_REF, type, - ptr, TREE_OPERAND (*tp, 1)); - TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old); + *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1)); TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old); TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old); + /* We cannot propagate the TREE_THIS_NOTRAP flag if we have + remapped a parameter as the property might be valid only + for the parameter itself. */ + if (TREE_THIS_NOTRAP (old) + && (!is_parm (TREE_OPERAND (old, 0)) + || (!id->transform_parameter && is_parm (ptr)))) + TREE_THIS_NOTRAP (*tp) = 1; *walk_subtrees = 0; return NULL; } @@ -2239,14 +2260,14 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb) as siblings of DEST_PARENT. */ static void -copy_loops (bitmap blocks_to_copy, +copy_loops (copy_body_data *id, struct loop *dest_parent, struct loop *src_parent) { struct loop *src_loop = src_parent->inner; while (src_loop) { - if (!blocks_to_copy - || bitmap_bit_p (blocks_to_copy, src_loop->header->index)) + if (!id->blocks_to_copy + || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index)) { struct loop *dest_loop = alloc_loop (); @@ -2270,8 +2291,19 @@ copy_loops (bitmap blocks_to_copy, place_new_loop (cfun, dest_loop); flow_loop_tree_node_add (dest_parent, dest_loop); + if (src_loop->simduid) + { + dest_loop->simduid = remap_decl (src_loop->simduid, id); + cfun->has_simduid_loops = true; + } + if (src_loop->force_vect) + { + dest_loop->force_vect = true; + cfun->has_force_vect_loops = true; + } + /* Recurse. */ - copy_loops (blocks_to_copy, dest_loop, src_loop); + copy_loops (id, dest_loop, src_loop); } src_loop = src_loop->next; } @@ -2300,7 +2332,7 @@ redirect_all_calls (copy_body_data * id, basic_block bb) static tree copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale, basic_block entry_block_map, basic_block exit_block_map, - bitmap blocks_to_copy, basic_block new_entry) + basic_block new_entry) { tree callee_fndecl = id->src_fn; /* Original cfun for the callee, doesn't change. */ @@ -2364,7 +2396,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale, /* Use aux pointers to map the original blocks to copy. */ FOR_EACH_BB_FN (bb, cfun_to_copy) - if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index)) + if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index)) { basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale); bb->aux = new_bb; @@ -2378,8 +2410,8 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale, bool can_make_abormal_goto = id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call); FOR_ALL_BB_FN (bb, cfun_to_copy) - if (!blocks_to_copy - || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index))) + if (!id->blocks_to_copy + || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index))) need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map, can_make_abormal_goto); @@ -2394,12 +2426,10 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale, if (loops_for_fn (src_cfun) != NULL && current_loops != NULL) { - copy_loops (blocks_to_copy, entry_block_map->loop_father, + copy_loops (id, entry_block_map->loop_father, get_loop (src_cfun, 0)); /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */ loops_state_set (LOOPS_NEED_FIXUP); - cfun->has_force_vect_loops |= src_cfun->has_force_vect_loops; - cfun->has_simduid_loops |= src_cfun->has_simduid_loops; } /* If the loop tree in the source function needed fixup, mark the @@ -2409,8 +2439,8 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale, if (gimple_in_ssa_p (cfun)) FOR_ALL_BB_FN (bb, cfun_to_copy) - if (!blocks_to_copy - || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index))) + if (!id->blocks_to_copy + || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index))) copy_phis_for_bb (bb, id); FOR_ALL_BB_FN (bb, cfun_to_copy) @@ -2582,7 +2612,7 @@ copy_tree_body (copy_body_data *id) static tree copy_body (copy_body_data *id, gcov_type count, int frequency_scale, basic_block entry_block_map, basic_block exit_block_map, - bitmap blocks_to_copy, basic_block new_entry) + basic_block new_entry) { tree fndecl = id->src_fn; tree body; @@ -2590,7 +2620,7 @@ copy_body (copy_body_data *id, gcov_type count, int frequency_scale, /* If this body has a CFG, walk CFG and copy. */ gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl))); body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map, - blocks_to_copy, new_entry); + new_entry); copy_debug_stmts (id); return body; @@ -3742,7 +3772,14 @@ estimate_num_insns (gimple stmt, eni_weights *weights) return 0; case GIMPLE_ASM: - return asm_str_count (gimple_asm_string (stmt)); + { + int count = asm_str_count (gimple_asm_string (stmt)); + /* 1000 means infinity. This avoids overflows later + with very long asm statements. */ + if (count > 1000) + count = 1000; + return count; + } case GIMPLE_RESX: /* This is either going to be an external function call with one @@ -4192,7 +4229,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id) duplicate our body before altering anything. */ copy_body (id, bb->count, GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE), - bb, return_block, NULL, NULL); + bb, return_block, NULL); /* Reset the escaped solution. */ if (cfun->gimple_df) @@ -4434,6 +4471,7 @@ optimize_inline_calls (tree fn) id.transform_call_graph_edges = CB_CGE_DUPLICATE; id.transform_new_cfg = false; id.transform_return_to_modify = true; + id.transform_parameter = true; id.transform_lang_insert_block = NULL; id.statements_to_fold = pointer_set_create (); @@ -4739,6 +4777,7 @@ copy_gimple_seq_and_replace_locals (gimple_seq seq) id.transform_call_graph_edges = CB_CGE_DUPLICATE; id.transform_new_cfg = false; id.transform_return_to_modify = false; + id.transform_parameter = false; id.transform_lang_insert_block = NULL; /* Walk the tree once to find local labels. */ @@ -5198,6 +5237,7 @@ tree_function_versioning (tree old_decl, tree new_decl, = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE; id.transform_new_cfg = true; id.transform_return_to_modify = false; + id.transform_parameter = false; id.transform_lang_insert_block = NULL; old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION @@ -5319,7 +5359,7 @@ tree_function_versioning (tree old_decl, tree new_decl, /* Copy the Function's body. */ copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE, - ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry); + ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, new_entry); /* Renumber the lexical scoping (non-code) blocks consecutively. */ number_blocks (new_decl); @@ -5422,6 +5462,7 @@ maybe_inline_call_in_expr (tree exp) id.transform_call_graph_edges = CB_CGE_DUPLICATE; id.transform_new_cfg = false; id.transform_return_to_modify = true; + id.transform_parameter = true; id.transform_lang_insert_block = NULL; /* Make sure not to unshare trees behind the front-end's back |