summaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-operands.c
diff options
context:
space:
mode:
authoramacleod <amacleod@138bc75d-0d04-0410-961f-82ee72b054a4>2004-11-25 20:24:59 +0000
committeramacleod <amacleod@138bc75d-0d04-0410-961f-82ee72b054a4>2004-11-25 20:24:59 +0000
commit39b644e9c5f0bff23408b78a2af980d6b49a05ea (patch)
treee6a3919cd01a314a36fdb8c06e651c13ea0543de /gcc/tree-ssa-operands.c
parent78d608e8bd7e417f8aa0f3e193d1a8624dd0424e (diff)
downloadgcc-39b644e9c5f0bff23408b78a2af980d6b49a05ea.tar.gz
2004-11-25 Andrew Macleod <amacleod@redhat.com>
PR tree-optimization/18587 * tree-flow-inline.h (mark_call_clobbered, mark_non_addressable): Flag call clobbered caches as invalid. * tree-ssa-operands.c (ssa_call_clobbered_cache_valid): New. Flag indicating whether the call clobbered operand cache is valid. (ssa_ro_call_cache_valid): New. Flag indicating whether the pure/const call operand cache is valid. (clobbered_v_may_defs, clobbered_vuses, ro_call_vuses): New. cached list of operands for cached call virtual operands. (clobbered_aliased_loads, clobbered_aliased_stores, ro_call_aliased_load): New. flags caching whether alias bits are to be set in call stmt's. */ (fini_ssa_operands): Remove call operand caches if present. (get_expr_operands, get_asm_expr_operands, get_indirect_ref_operands): Pass stmt annotation to add_stmt_operand. (get_call_expr_operands): Add call clobbered variables first. (add_stmt_operand): Take stmt annotation rather than stmt as a param. (add_call_clobber_ops, add_call_read_ops): Use the call operand cache if it is valid, otherise fill the cache. * tree-ssa-operands.h (ssa_clobbered_cache_valid): Declare extern. * tree-flow.h (struct var_ann_d): Add in_vuse_list and in_v_may_def_list bits. * tree-ssa-operands.c (cleanup_v_may_defs): New. Clear the in_list bits for the v_may_def elements and empty the operand build array. (finalize_ssa_vuses): Use cleanup_v_may_defs and remove redundant VUSES by checking the in_v_may_def_list bit. (append_v_may_def, append_vuse): Use the in_list bit rather than scanning the array for duplicates. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@91305 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/tree-ssa-operands.c')
-rw-r--r--gcc/tree-ssa-operands.c330
1 files changed, 241 insertions, 89 deletions
diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c
index 18af264ea0d..9138e677450 100644
--- a/gcc/tree-ssa-operands.c
+++ b/gcc/tree-ssa-operands.c
@@ -116,6 +116,17 @@ static GTY (()) varray_type build_vuses;
/* Array for building all the v_must_def operands. */
static GTY (()) varray_type build_v_must_defs;
+/* True if the operands for call clobbered vars are cached and valid. */
+bool ssa_call_clobbered_cache_valid;
+bool ssa_ro_call_cache_valid;
+
+/* These arrays are the cached operand vectors for call clobberd calls. */
+static GTY (()) varray_type clobbered_v_may_defs;
+static GTY (()) varray_type clobbered_vuses;
+static GTY (()) varray_type ro_call_vuses;
+static bool clobbered_aliased_loads;
+static bool clobbered_aliased_stores;
+static bool ro_call_aliased_loads;
#ifdef ENABLE_CHECKING
/* Used to make sure operand construction is working on the proper stmt. */
@@ -136,7 +147,7 @@ static void append_v_may_def (tree);
static void append_v_must_def (tree);
static void add_call_clobber_ops (tree);
static void add_call_read_ops (tree);
-static void add_stmt_operand (tree *, tree, int);
+static void add_stmt_operand (tree *, stmt_ann_t, int);
/* Return a vector of contiguous memory for NUM def operands. */
@@ -302,6 +313,18 @@ fini_ssa_operands (void)
build_v_may_defs = NULL;
build_vuses = NULL;
build_v_must_defs = NULL;
+ if (clobbered_v_may_defs)
+ {
+ ggc_free (clobbered_v_may_defs);
+ ggc_free (clobbered_vuses);
+ clobbered_v_may_defs = NULL;
+ clobbered_vuses = NULL;
+ }
+ if (ro_call_vuses)
+ {
+ ggc_free (ro_call_vuses);
+ ro_call_vuses = NULL;
+ }
}
@@ -490,6 +513,23 @@ finalize_ssa_v_may_defs (v_may_def_optype *old_ops_p)
}
+/* Clear the in_list bits and empty the build array for v_may_defs. */
+
+static inline void
+cleanup_v_may_defs (void)
+{
+ unsigned x, num;
+ num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
+
+ for (x = 0; x < num; x++)
+ {
+ tree t = VARRAY_TREE (build_v_may_defs, x);
+ var_ann_t ann = var_ann (t);
+ ann->in_v_may_def_list = 0;
+ }
+ VARRAY_POP_ALL (build_v_may_defs);
+}
+
/* Return a new vuse operand vector, comparing to OLD_OPS_P. */
static vuse_optype
@@ -502,7 +542,7 @@ finalize_ssa_vuses (vuse_optype *old_ops_p)
num = VARRAY_ACTIVE_SIZE (build_vuses);
if (num == 0)
{
- VARRAY_POP_ALL (build_v_may_defs);
+ cleanup_v_may_defs ();
return NULL;
}
@@ -522,44 +562,55 @@ finalize_ssa_vuses (vuse_optype *old_ops_p)
if (num_v_may_defs > 0)
{
- size_t i, j;
+ size_t i;
tree vuse;
for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
{
vuse = VARRAY_TREE (build_vuses, i);
- for (j = 0; j < num_v_may_defs; j++)
+ if (TREE_CODE (vuse) != SSA_NAME)
{
- if (vuse == VARRAY_TREE (build_v_may_defs, j))
- break;
- }
-
- /* If we found a useless VUSE operand, remove it from the
- operand array by replacing it with the last active element
- in the operand array (unless the useless VUSE was the
- last operand, in which case we simply remove it. */
- if (j != num_v_may_defs)
- {
- if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
- {
- VARRAY_TREE (build_vuses, i)
- = VARRAY_TREE (build_vuses,
- VARRAY_ACTIVE_SIZE (build_vuses) - 1);
+ var_ann_t ann = var_ann (vuse);
+ ann->in_vuse_list = 0;
+ if (ann->in_v_may_def_list)
+ {
+ /* If we found a useless VUSE operand, remove it from the
+ operand array by replacing it with the last active element
+ in the operand array (unless the useless VUSE was the
+ last operand, in which case we simply remove it. */
+ if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
+ {
+ VARRAY_TREE (build_vuses, i)
+ = VARRAY_TREE (build_vuses,
+ VARRAY_ACTIVE_SIZE (build_vuses) - 1);
+ }
+ VARRAY_POP (build_vuses);
+
+ /* We want to rescan the element at this index, unless
+ this was the last element, in which case the loop
+ terminates. */
+ i--;
}
- VARRAY_POP (build_vuses);
-
- /* We want to rescan the element at this index, unless
- this was the last element, in which case the loop
- terminates. */
- i--;
}
}
}
+ else
+ /* Clear out the in_list bits. */
+ for (x = 0; x < num; x++)
+ {
+ tree t = VARRAY_TREE (build_vuses, x);
+ if (TREE_CODE (t) != SSA_NAME)
+ {
+ var_ann_t ann = var_ann (t);
+ ann->in_vuse_list = 0;
+ }
+ }
+
num = VARRAY_ACTIVE_SIZE (build_vuses);
/* We could have reduced the size to zero now, however. */
if (num == 0)
{
- VARRAY_POP_ALL (build_v_may_defs);
+ cleanup_v_may_defs ();
return NULL;
}
@@ -618,7 +669,7 @@ finalize_ssa_vuses (vuse_optype *old_ops_p)
/* The v_may_def build vector wasn't freed because we needed it here.
Free it now with the vuses build vector. */
VARRAY_POP_ALL (build_vuses);
- VARRAY_POP_ALL (build_v_may_defs);
+ cleanup_v_may_defs ();
return vuse_ops;
}
@@ -751,12 +802,12 @@ append_use (tree *use_p)
static inline void
append_v_may_def (tree var)
{
- unsigned i;
+ var_ann_t ann = get_var_ann (var);
/* Don't allow duplicate entries. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
- if (var == VARRAY_TREE (build_v_may_defs, i))
- return;
+ if (ann->in_v_may_def_list)
+ return;
+ ann->in_v_may_def_list = 1;
VARRAY_PUSH_TREE (build_v_may_defs, var);
}
@@ -767,12 +818,16 @@ append_v_may_def (tree var)
static inline void
append_vuse (tree var)
{
- size_t i;
/* Don't allow duplicate entries. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
- if (var == VARRAY_TREE (build_vuses, i))
- return;
+ if (TREE_CODE (var) != SSA_NAME)
+ {
+ var_ann_t ann = get_var_ann (var);
+
+ if (ann->in_vuse_list || ann->in_v_may_def_list)
+ return;
+ ann->in_vuse_list = 1;
+ }
VARRAY_PUSH_TREE (build_vuses, var);
}
@@ -972,6 +1027,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
enum tree_code code;
enum tree_code_class class;
tree expr = *expr_p;
+ stmt_ann_t s_ann = stmt_ann (stmt);
if (expr == NULL || expr == error_mark_node)
return;
@@ -987,7 +1043,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
/* Taking the address of a variable does not represent a
reference to it, but the fact that the stmt takes its address will be
of interest to some passes (e.g. alias resolution). */
- add_stmt_operand (expr_p, stmt, 0);
+ add_stmt_operand (expr_p, s_ann, 0);
/* If the address is invariant, there may be no interesting variable
references inside. */
@@ -1010,7 +1066,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
case CONST_DECL:
/* If we found a variable, add it to DEFS or USES depending
on the operand flags. */
- add_stmt_operand (expr_p, stmt, flags);
+ add_stmt_operand (expr_p, s_ann, flags);
return;
case MISALIGNED_INDIRECT_REF:
@@ -1032,7 +1088,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
according to the value of IS_DEF. Recurse if the LHS of the
ARRAY_REF node is not a regular variable. */
if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
- add_stmt_operand (expr_p, stmt, flags);
+ add_stmt_operand (expr_p, s_ann, flags);
else
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
@@ -1060,7 +1116,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
/* If the LHS of the compound reference is not a regular variable,
recurse to keep looking for more operands in the subexpression. */
if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
- add_stmt_operand (expr_p, stmt, flags);
+ add_stmt_operand (expr_p, s_ann, flags);
else
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
@@ -1273,19 +1329,19 @@ get_asm_expr_operands (tree stmt)
/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
decided to group them). */
if (global_var)
- add_stmt_operand (&global_var, stmt, opf_is_def);
+ add_stmt_operand (&global_var, s_ann, opf_is_def);
else
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
- add_stmt_operand (&var, stmt, opf_is_def);
+ add_stmt_operand (&var, s_ann, opf_is_def);
}
/* Now clobber all addressables. */
EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
{
tree var = referenced_var (i);
- add_stmt_operand (&var, stmt, opf_is_def);
+ add_stmt_operand (&var, s_ann, opf_is_def);
}
break;
@@ -1300,7 +1356,7 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags)
{
tree *pptr = &TREE_OPERAND (expr, 0);
tree ptr = *pptr;
- stmt_ann_t ann = stmt_ann (stmt);
+ stmt_ann_t s_ann = stmt_ann (stmt);
/* Stores into INDIRECT_REF operands are never killing definitions. */
flags &= ~opf_kill_def;
@@ -1327,13 +1383,13 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags)
&& pi->name_mem_tag)
{
/* PTR has its own memory tag. Use it. */
- add_stmt_operand (&pi->name_mem_tag, stmt, flags);
+ add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
}
else
{
/* If PTR is not an SSA_NAME or it doesn't have a name
tag, use its type memory tag. */
- var_ann_t ann;
+ var_ann_t v_ann;
/* If we are emitting debugging dumps, display a warning if
PTR is an SSA_NAME with no flow-sensitive alias
@@ -1352,9 +1408,9 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags)
if (TREE_CODE (ptr) == SSA_NAME)
ptr = SSA_NAME_VAR (ptr);
- ann = var_ann (ptr);
- if (ann->type_mem_tag)
- add_stmt_operand (&ann->type_mem_tag, stmt, flags);
+ v_ann = var_ann (ptr);
+ if (v_ann->type_mem_tag)
+ add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
}
}
@@ -1363,8 +1419,8 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags)
optimizations from messing things up. */
else if (TREE_CODE (ptr) == INTEGER_CST)
{
- if (ann)
- ann->has_volatile_ops = true;
+ if (s_ann)
+ s_ann->has_volatile_ops = true;
return;
}
@@ -1379,7 +1435,7 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags)
{
/* Make sure we know the object is addressable. */
pptr = &TREE_OPERAND (ptr, 0);
- add_stmt_operand (pptr, stmt, 0);
+ add_stmt_operand (pptr, s_ann, 0);
/* Mark the object itself with a VUSE. */
pptr = &TREE_OPERAND (*pptr, 0);
@@ -1403,14 +1459,6 @@ get_call_expr_operands (tree stmt, tree expr)
tree op;
int call_flags = call_expr_flags (expr);
- /* Find uses in the called function. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
-
- for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
- get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
-
if (!bitmap_empty_p (call_clobbered_vars))
{
/* A 'pure' or a 'const' functions never call clobber anything.
@@ -1422,6 +1470,15 @@ get_call_expr_operands (tree stmt, tree expr)
else if (!(call_flags & ECF_CONST))
add_call_read_ops (stmt);
}
+
+ /* Find uses in the called function. */
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
+
+ for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
+ get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
+
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+
}
@@ -1431,11 +1488,10 @@ get_call_expr_operands (tree stmt, tree expr)
operands. */
static void
-add_stmt_operand (tree *var_p, tree stmt, int flags)
+add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
{
bool is_real_op;
tree var, sym;
- stmt_ann_t s_ann = stmt_ann (stmt);
var_ann_t v_ann;
var = *var_p;
@@ -1586,32 +1642,92 @@ note_addressable (tree var, stmt_ann_t s_ann)
static void
add_call_clobber_ops (tree stmt)
{
+ unsigned i;
+ tree t;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ struct stmt_ann_d empty_ann;
+
/* Functions that are not const, pure or never return may clobber
call-clobbered variables. */
- if (stmt_ann (stmt))
- stmt_ann (stmt)->makes_clobbering_call = true;
+ if (s_ann)
+ s_ann->makes_clobbering_call = true;
- /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
- a V_MAY_DEF operand for every call clobbered variable. See
- compute_may_aliases for the heuristic used to decide whether
- to create .GLOBAL_VAR or not. */
+ /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
+ for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
if (global_var)
- add_stmt_operand (&global_var, stmt, opf_is_def);
- else
{
- unsigned i;
- bitmap_iterator bi;
+ add_stmt_operand (&global_var, s_ann, opf_is_def);
+ return;
+ }
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ /* If cache is valid, copy the elements into the build vectors. */
+ if (ssa_call_clobbered_cache_valid)
+ {
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_vuses); i++)
{
- tree var = referenced_var (i);
- if (TREE_READONLY (var)
- && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
- add_stmt_operand (&var, stmt, opf_none);
- else
- add_stmt_operand (&var, stmt, opf_is_def);
+ t = VARRAY_TREE (clobbered_vuses, i);
+ gcc_assert (TREE_CODE (t) != SSA_NAME);
+ var_ann (t)->in_vuse_list = 1;
+ VARRAY_PUSH_TREE (build_vuses, t);
+ }
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_v_may_defs); i++)
+ {
+ t = VARRAY_TREE (clobbered_v_may_defs, i);
+ gcc_assert (TREE_CODE (t) != SSA_NAME);
+ var_ann (t)->in_v_may_def_list = 1;
+ VARRAY_PUSH_TREE (build_v_may_defs, t);
}
+ if (s_ann)
+ {
+ s_ann->makes_aliased_loads = clobbered_aliased_loads;
+ s_ann->makes_aliased_stores = clobbered_aliased_stores;
+ }
+ return;
+ }
+
+ memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
+
+ /* Add a V_MAY_DEF operand for every call clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ {
+ tree var = referenced_var (i);
+ if (TREE_READONLY (var)
+ && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
+ add_stmt_operand (&var, &empty_ann, opf_none);
+ else
+ add_stmt_operand (&var, &empty_ann, opf_is_def);
+ }
+
+ clobbered_aliased_loads = empty_ann.makes_aliased_loads;
+ clobbered_aliased_stores = empty_ann.makes_aliased_stores;
+
+ /* Set the flags for a stmt's annotation. */
+ if (s_ann)
+ {
+ s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+ s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
+ }
+
+ /* Perpare empty cache vectors. */
+ if (clobbered_v_may_defs)
+ {
+ VARRAY_POP_ALL (clobbered_vuses);
+ VARRAY_POP_ALL (clobbered_v_may_defs);
}
+ else
+ {
+ VARRAY_TREE_INIT (clobbered_v_may_defs, 10, "clobbered_v_may_defs");
+ VARRAY_TREE_INIT (clobbered_vuses, 10, "clobbered_vuses");
+ }
+
+ /* Now fill the clobbered cache with the values that have been found. */
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
+ VARRAY_PUSH_TREE (clobbered_vuses, VARRAY_TREE (build_vuses, i));
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
+ VARRAY_PUSH_TREE (clobbered_v_may_defs, VARRAY_TREE (build_v_may_defs, i));
+
+ ssa_call_clobbered_cache_valid = true;
}
@@ -1621,24 +1737,60 @@ add_call_clobber_ops (tree stmt)
static void
add_call_read_ops (tree stmt)
{
+ unsigned i;
+ tree t;
bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ struct stmt_ann_d empty_ann;
- /* Otherwise, if the function is not pure, it may reference memory. Add
- a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
- for each call-clobbered variable. See add_referenced_var for the
- heuristic used to decide whether to create .GLOBAL_VAR. */
+ /* if the function is not pure, it may reference memory. Add
+ a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
+ for the heuristic used to decide whether to create .GLOBAL_VAR. */
if (global_var)
- add_stmt_operand (&global_var, stmt, opf_none);
- else
{
- unsigned i;
-
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ add_stmt_operand (&global_var, s_ann, opf_none);
+ return;
+ }
+
+ /* If cache is valid, copy the elements into the build vector. */
+ if (ssa_ro_call_cache_valid)
+ {
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (ro_call_vuses); i++)
{
- tree var = referenced_var (i);
- add_stmt_operand (&var, stmt, opf_none);
+ t = VARRAY_TREE (ro_call_vuses, i);
+ gcc_assert (TREE_CODE (t) != SSA_NAME);
+ var_ann (t)->in_vuse_list = 1;
+ VARRAY_PUSH_TREE (build_vuses, t);
}
+ if (s_ann)
+ s_ann->makes_aliased_loads = ro_call_aliased_loads;
+ return;
+ }
+
+ memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
+
+ /* Add a VUSE for each call-clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ {
+ tree var = referenced_var (i);
+ add_stmt_operand (&var, &empty_ann, opf_none);
}
+
+ ro_call_aliased_loads = empty_ann.makes_aliased_loads;
+ if (s_ann)
+ s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+
+ /* Perpare empty cache vectors. */
+ if (ro_call_vuses)
+ VARRAY_POP_ALL (ro_call_vuses);
+ else
+ VARRAY_TREE_INIT (ro_call_vuses, 10, "ro_call_vuses");
+
+ /* Now fill the clobbered cache with the values that have been found. */
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
+ VARRAY_PUSH_TREE (ro_call_vuses, VARRAY_TREE (build_vuses, i));
+
+ ssa_ro_call_cache_valid = true;
}
/* Copies virtual operands from SRC to DST. */