diff options
author | dberlin <dberlin@138bc75d-0d04-0410-961f-82ee72b054a4> | 2007-08-19 23:23:29 +0000 |
---|---|---|
committer | dberlin <dberlin@138bc75d-0d04-0410-961f-82ee72b054a4> | 2007-08-19 23:23:29 +0000 |
commit | a6db8f1419b14fc5afed873dbc6ddc25cab75bf7 (patch) | |
tree | baa8e18c063d5687f098c1d8745169221e56ef6a /gcc/tree-ssa-alias.c | |
parent | 662e4881030345c1e1e37a422ca3e3af928fc931 (diff) | |
download | gcc-a6db8f1419b14fc5afed873dbc6ddc25cab75bf7.tar.gz |
2007-08-19 Daniel Berlin <dberlin@dberlin.org>
Fix PR 32772
Fix PR 32716
Fix PR 32328
Fix PR 32303
* tree-flow.h (struct stmt_ann_d): Remove makes_clobbering_call.
* tree-ssa-alias.c (init_transitive_clobber_worklist): Add
on_worklist argument and avoid adding things to worklist multiple
times.
(add_to_worklist): Ditto.
(mark_aliases_call_clobbered): Mark entire structure clobbered if
single SFT is clobbered.
(set_initial_properties): Ditto.
(compute_call_clobbered): Update for changes to function
arguments.
(create_overlap_variables_for): Always create SFT for offset 0.
(create_structure_vars): Handle PHI's, since we are in SSA form at
this point.
* tree-ssa-loop-ivopts.c (get_ref_tag): Don't return subvars.
* tree-ssa-operands.c (access_can_touch_variable): Don't handle
TARGET_MEM_REF.
(add_vars_for_offset): Figure out aliases from access + points-to.
(add_virtual_operand): Use add_vars-for_offset.
(get_tmr_operands): Update for NMT changes, rewrite to be correct.
(add_call_clobber_ops): Remove makes_clobbering_call set.
(get_expr_operands): Always pass through the INDIRECT_REF
reference.
* tree-ssa-structalias.c (struct constraint_graph): Remove
variables member.
Add pe, pe_rep, pointer_label, loc_label, pointed_by, points_to,
address_taken, pt_used, number_incoming.
(FIRST_ADDR_NODE): Removed.
(merge_graph_nodes): Remove broken code for the moment.
(init_graph): New function.
(build_pred_graph): Remove code to init_graph.
Add location equivalence support.
(struct scc_info): Rename roots to deleted.
(scc_visit): Ditto.
(init_scc_info): Ditto
(init_topo_info): Use graph->size.
(compute_topo_order): Ditto.
(do_da_constraint): Removed.
(do_sd_constraint): Remove calls to find().
set_union_with_increment should always get 0 as last arg here.
(do_complex_constraint): Replace do_da_constraint with assert.
Stop calling find.
(struct equiv_class_label): New.
(pointer_equiv_class_table): Ditto.
(location_equiv_class_table): Ditto.
(equiv_class_label_hash): Ditto.
(equiv_class_label_eq): Ditto
(equiv_class_lookup): Ditto.
(equiv_class_ladd): Ditto.
(pointer_equiv_class): Ditto.
(location_equiv_class): Ditto.
(condense_visit): Rename and rewrite from label_visit to do only
SCC related stuff for HU.
(label_visit): Do HU work for HU.
(perform_var_substitution): Update to do HU and location
equivalence.
(free_var_substitution_info): Update to free HU and location
equivalence structures. */
(find_equivalent_node): Update for pointer but not location
equivalence.
(unite_pointer_equivalences): New function.
(move_complex_constraints): Rewrite to only do moving.
(rewrite_constraints): Split out of move_complex_constraints.
(solve_graph): Use graph->size.
(process_constraint_1): Add from_call argument, use it.
Split *a = &b into two constraints.
(process_constraint): Use new process_constraint_1.
(get_constraint_for_component_ref): Handle bitmaxsize == -1 case.
(get_constraint_for): Handle non-pointer integers properly.
Remove code that used to handle structures.
(handle_ptr_arith): Fix a few bugs in pointer arithmetic handling
with unknown addends.
(handle_rhs_call): New function.
(find_func_aliases): Use handle_rhs_call.
(set_uids_in_ptset): Add an assert.
(set_used_smts): Fix bug in not considering unified vars.
(compute_tbaa_pruning): Stop initing useless iteration_obstack.
(compute_points_to_sets): Update for other function changes.
(delete_points_to_sets): Ditto.
(ipa_pta_execute): Ditto.
(pass_ipa_pta): We need to update SSA after ipa_pta.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@127629 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/tree-ssa-alias.c')
-rw-r--r-- | gcc/tree-ssa-alias.c | 107 |
1 files changed, 84 insertions, 23 deletions
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c index 3a490fcc225..103a0231fb3 100644 --- a/gcc/tree-ssa-alias.c +++ b/gcc/tree-ssa-alias.c @@ -322,7 +322,8 @@ sort_tags_by_id (const void *pa, const void *pb) static void init_transitive_clobber_worklist (VEC (tree, heap) **worklist, - VEC (int, heap) **worklist2) + VEC (int, heap) **worklist2, + bitmap on_worklist) { referenced_var_iterator rvi; tree curr; @@ -332,7 +333,9 @@ init_transitive_clobber_worklist (VEC (tree, heap) **worklist, if (MTAG_P (curr) && is_call_clobbered (curr)) { VEC_safe_push (tree, heap, *worklist, curr); - VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask); + VEC_safe_push (int, heap, *worklist2, + var_ann (curr)->escape_mask); + bitmap_set_bit (on_worklist, DECL_UID (curr)); } } } @@ -343,13 +346,15 @@ init_transitive_clobber_worklist (VEC (tree, heap) **worklist, static void add_to_worklist (tree alias, VEC (tree, heap) **worklist, - VEC (int, heap) **worklist2, - int reason) + VEC (int, heap) **worklist2, int reason, + bitmap on_worklist) { - if (MTAG_P (alias) && !is_call_clobbered (alias)) + if (MTAG_P (alias) && !is_call_clobbered (alias) + && !bitmap_bit_p (on_worklist, DECL_UID (alias))) { VEC_safe_push (tree, heap, *worklist, alias); VEC_safe_push (int, heap, *worklist2, reason); + bitmap_set_bit (on_worklist, DECL_UID (alias)); } } @@ -358,7 +363,8 @@ add_to_worklist (tree alias, VEC (tree, heap) **worklist, static void mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist, - VEC (int, heap) **worklist2) + VEC (int, heap) **worklist2, + bitmap on_worklist) { bitmap aliases; bitmap_iterator bi; @@ -375,9 +381,23 @@ mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist, EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi) { entry = referenced_var (i); - if (!unmodifiable_var_p (entry)) + /* If you clobber one part of a structure, you + clobber the entire thing. While this does not make + the world a particularly nice place, it is necessary + in order to allow C/C++ tricks that involve + pointer arithmetic to work. */ + if (TREE_CODE (entry) == STRUCT_FIELD_TAG) { - add_to_worklist (entry, worklist, worklist2, ta->escape_mask); + subvar_t svars; + svars = get_subvars_for_var (SFT_PARENT_VAR (entry)); + for (; svars; svars = svars->next) + if (!unmodifiable_var_p (entry)) + mark_call_clobbered (svars->var, ta->escape_mask); + } + else if (!unmodifiable_var_p (entry)) + { + add_to_worklist (entry, worklist, worklist2, ta->escape_mask, + on_worklist); mark_call_clobbered (entry, ta->escape_mask); } } @@ -528,8 +548,25 @@ set_initial_properties (struct alias_info *ai) bitmap_iterator bi; unsigned int j; EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi) - if (!unmodifiable_var_p (referenced_var (j))) - mark_call_clobbered (referenced_var (j), pi->escape_mask); + { + tree alias = referenced_var (j); + + /* If you clobber one part of a structure, you + clobber the entire thing. While this does not make + the world a particularly nice place, it is necessary + in order to allow C/C++ tricks that involve + pointer arithmetic to work. */ + if (TREE_CODE (alias) == STRUCT_FIELD_TAG) + { + subvar_t svars; + svars = get_subvars_for_var (SFT_PARENT_VAR (alias)); + for (; svars; svars = svars->next) + if (!unmodifiable_var_p (alias)) + mark_call_clobbered (svars->var, pi->escape_mask); + } + else if (!unmodifiable_var_p (alias)) + mark_call_clobbered (alias, pi->escape_mask); + } } } @@ -573,21 +610,27 @@ static void compute_call_clobbered (struct alias_info *ai) { VEC (tree, heap) *worklist = NULL; - VEC(int,heap) *worklist2 = NULL; - + VEC (int,heap) *worklist2 = NULL; + bitmap on_worklist; + timevar_push (TV_CALL_CLOBBER); + on_worklist = BITMAP_ALLOC (NULL); + set_initial_properties (ai); - init_transitive_clobber_worklist (&worklist, &worklist2); + init_transitive_clobber_worklist (&worklist, &worklist2, on_worklist); while (VEC_length (tree, worklist) != 0) { tree curr = VEC_pop (tree, worklist); int reason = VEC_pop (int, worklist2); - + + bitmap_clear_bit (on_worklist, DECL_UID (curr)); mark_call_clobbered (curr, reason); - mark_aliases_call_clobbered (curr, &worklist, &worklist2); + mark_aliases_call_clobbered (curr, &worklist, &worklist2, + on_worklist); } VEC_free (tree, heap, worklist); VEC_free (int, heap, worklist2); + BITMAP_FREE (on_worklist); compute_tag_properties (); timevar_pop (TV_CALL_CLOBBER); } @@ -3783,11 +3826,14 @@ create_overlap_variables_for (tree var) /* If this field isn't in the used portion, or it has the exact same offset and size as the last - field, skip it. */ - - if (((fo->offset <= up->minused - && fo->offset + fosize <= up->minused) - || fo->offset >= up->maxused) + field, skip it. Note that we always need the field at + offset 0 so we can properly handle pointers to the + structure. */ + + if ((fo->offset != 0 + && ((fo->offset <= up->minused + && fo->offset + fosize <= up->minused) + || fo->offset >= up->maxused)) || (fo->offset == lastfooffset && fosize == lastfosize && currfotype == lastfotype)) @@ -3975,6 +4021,21 @@ create_structure_vars (void) FOR_EACH_BB (bb) { block_stmt_iterator bsi; + tree phi; + + for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi)) + { + use_operand_p use; + ssa_op_iter iter; + + FOR_EACH_PHI_ARG (use, phi, iter, SSA_OP_USE) + { + tree op = USE_FROM_PTR (use); + walk_tree_without_duplicates (&op, find_used_portions, + NULL); + } + } + for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) { walk_tree_without_duplicates (bsi_stmt_ptr (bsi), @@ -4013,7 +4074,7 @@ create_structure_vars (void) tree sym = referenced_var_lookup (i); if (get_subvars_for_var (sym)) { - update=true; + update = true; break; } } @@ -4024,7 +4085,7 @@ create_structure_vars (void) tree sym = referenced_var_lookup (i); if (get_subvars_for_var (sym)) { - update=true; + update = true; break; } } @@ -4036,7 +4097,7 @@ create_structure_vars (void) tree sym = referenced_var_lookup (i); if (get_subvars_for_var (sym)) { - update=true; + update = true; break; } } |