summaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-operands.c
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@baserock.org>2014-10-30 09:35:42 +0000
committer <>2015-01-09 11:51:27 +0000
commitc27a97d04853380f1e80525391b3f0d156ed4c84 (patch)
tree68ffaade7c605bc80cffa18360799c98a810976f /gcc/tree-ssa-operands.c
parent6af3fdec2262dd94954acc5e426ef71cbd4521d3 (diff)
downloadgcc-tarball-c27a97d04853380f1e80525391b3f0d156ed4c84.tar.gz
Imported from /home/lorry/working-area/delta_gcc-tarball/gcc-4.9.2.tar.bz2.gcc-4.9.2
Diffstat (limited to 'gcc/tree-ssa-operands.c')
-rw-r--r--gcc/tree-ssa-operands.c630
1 files changed, 249 insertions, 381 deletions
diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c
index ed0d34de2d..76d046309b 100644
--- a/gcc/tree-ssa-operands.c
+++ b/gcc/tree-ssa-operands.c
@@ -1,6 +1,5 @@
/* SSA operands management for trees.
- Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
- Free Software Foundation, Inc.
+ Copyright (C) 2003-2014 Free Software Foundation, Inc.
This file is part of GCC.
@@ -23,14 +22,26 @@ along with GCC; see the file COPYING3. If not see
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
+#include "stmt.h"
+#include "print-tree.h"
#include "flags.h"
#include "function.h"
-#include "tree-pretty-print.h"
#include "gimple-pretty-print.h"
-#include "tree-flow.h"
+#include "bitmap.h"
+#include "basic-block.h"
+#include "tree-ssa-alias.h"
+#include "internal-fn.h"
+#include "gimple-expr.h"
+#include "is-a.h"
+#include "gimple.h"
+#include "gimple-ssa.h"
+#include "tree-phinodes.h"
+#include "ssa-iterators.h"
+#include "stringpool.h"
+#include "tree-ssanames.h"
#include "tree-inline.h"
-#include "tree-pass.h"
-#include "ggc.h"
+#include "timevar.h"
+#include "dumpfile.h"
#include "timevar.h"
#include "langhooks.h"
#include "diagnostic-core.h"
@@ -76,34 +87,6 @@ along with GCC; see the file COPYING3. If not see
operand vector for VUSE, then the new vector will also be modified
such that it contains 'a_5' rather than 'a'. */
-/* Structure storing statistics on how many call clobbers we have, and
- how many where avoided. */
-
-static struct
-{
- /* Number of call-clobbered ops we attempt to add to calls in
- add_call_clobbered_mem_symbols. */
- unsigned int clobbered_vars;
-
- /* Number of write-clobbers (VDEFs) avoided by using
- not_written information. */
- unsigned int static_write_clobbers_avoided;
-
- /* Number of reads (VUSEs) avoided by using not_read information. */
- unsigned int static_read_clobbers_avoided;
-
- /* Number of write-clobbers avoided because the variable can't escape to
- this call. */
- unsigned int unescapable_clobbers_avoided;
-
- /* Number of read-only uses we attempt to add to calls in
- add_call_read_mem_symbols. */
- unsigned int readonly_clobbers;
-
- /* Number of read-only uses we avoid using not_read information. */
- unsigned int static_readonly_clobbers_avoided;
-} clobber_stats;
-
/* Flags to describe operand properties in helpers. */
@@ -122,22 +105,17 @@ static struct
VUSE for 'b'. */
#define opf_no_vops (1 << 1)
-/* Operand is an implicit reference. This is used to distinguish
- explicit assignments in the form of MODIFY_EXPR from
- clobbering sites like function calls or ASM_EXPRs. */
-#define opf_implicit (1 << 2)
-
/* Operand is in a place where address-taken does not imply addressable. */
#define opf_non_addressable (1 << 3)
/* Operand is in a place where opf_non_addressable does not apply. */
#define opf_not_non_addressable (1 << 4)
-/* Array for building all the def operands. */
-static VEC(tree,heap) *build_defs;
+/* Operand is having its address taken. */
+#define opf_address_taken (1 << 5)
/* Array for building all the use operands. */
-static VEC(tree,heap) *build_uses;
+static vec<tree> build_uses;
/* The built VDEF operand. */
static tree build_vdef;
@@ -149,36 +127,28 @@ static tree build_vuse;
compilations of multiple functions. */
static bitmap_obstack operands_bitmap_obstack;
-static void get_expr_operands (gimple, tree *, int);
+static void get_expr_operands (struct function *, gimple, tree *, int);
/* Number of functions with initialized ssa_operands. */
static int n_initialized = 0;
-/* Return the DECL_UID of the base variable of T. */
-
-static inline unsigned
-get_name_decl (const_tree t)
+/* Accessor to tree-ssa-operands.c caches. */
+static inline struct ssa_operands *
+gimple_ssa_operands (const struct function *fun)
{
- if (TREE_CODE (t) != SSA_NAME)
- return DECL_UID (t);
- else
- return DECL_UID (SSA_NAME_VAR (t));
+ return &fun->gimple_df->ssa_operands;
}
/* Return true if the SSA operands cache is active. */
bool
-ssa_operands_active (void)
+ssa_operands_active (struct function *fun)
{
- /* This function may be invoked from contexts where CFUN is NULL
- (IPA passes), return false for now. FIXME: operands may be
- active in each individual function, maybe this function should
- take CFUN as a parameter. */
- if (cfun == NULL)
+ if (fun == NULL)
return false;
- return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
+ return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
}
@@ -186,11 +156,11 @@ ssa_operands_active (void)
representative of all of the virtual operands FUD chain. */
static void
-create_vop_var (void)
+create_vop_var (struct function *fn)
{
tree global_var;
- gcc_assert (cfun->gimple_df->vop == NULL_TREE);
+ gcc_assert (fn->gimple_df->vop == NULL_TREE);
global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
get_identifier (".MEM"),
@@ -203,10 +173,9 @@ create_vop_var (void)
DECL_CONTEXT (global_var) = NULL_TREE;
TREE_THIS_VOLATILE (global_var) = 0;
TREE_ADDRESSABLE (global_var) = 0;
+ VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
- create_var_ann (global_var);
- add_referenced_var (global_var);
- cfun->gimple_df->vop = global_var;
+ fn->gimple_df->vop = global_var;
}
/* These are the sizes of the operand memory buffer in bytes which gets
@@ -224,103 +193,82 @@ create_vop_var (void)
/* Initialize the operand cache routines. */
void
-init_ssa_operands (void)
+init_ssa_operands (struct function *fn)
{
if (!n_initialized++)
{
- build_defs = VEC_alloc (tree, heap, 5);
- build_uses = VEC_alloc (tree, heap, 10);
+ build_uses.create (10);
build_vuse = NULL_TREE;
build_vdef = NULL_TREE;
bitmap_obstack_initialize (&operands_bitmap_obstack);
}
- gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
- gimple_ssa_operands (cfun)->operand_memory_index
- = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
- gimple_ssa_operands (cfun)->ops_active = true;
- memset (&clobber_stats, 0, sizeof (clobber_stats));
- gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
- create_vop_var ();
+ gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
+ gimple_ssa_operands (fn)->operand_memory_index
+ = gimple_ssa_operands (fn)->ssa_operand_mem_size;
+ gimple_ssa_operands (fn)->ops_active = true;
+ gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
+ create_vop_var (fn);
}
/* Dispose of anything required by the operand routines. */
void
-fini_ssa_operands (void)
+fini_ssa_operands (struct function *fn)
{
struct ssa_operand_memory_d *ptr;
if (!--n_initialized)
{
- VEC_free (tree, heap, build_defs);
- VEC_free (tree, heap, build_uses);
+ build_uses.release ();
build_vdef = NULL_TREE;
build_vuse = NULL_TREE;
}
- gimple_ssa_operands (cfun)->free_defs = NULL;
- gimple_ssa_operands (cfun)->free_uses = NULL;
+ gimple_ssa_operands (fn)->free_uses = NULL;
- while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
+ while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
{
- gimple_ssa_operands (cfun)->operand_memory
- = gimple_ssa_operands (cfun)->operand_memory->next;
+ gimple_ssa_operands (fn)->operand_memory
+ = gimple_ssa_operands (fn)->operand_memory->next;
ggc_free (ptr);
}
- gimple_ssa_operands (cfun)->ops_active = false;
+ gimple_ssa_operands (fn)->ops_active = false;
if (!n_initialized)
bitmap_obstack_release (&operands_bitmap_obstack);
- cfun->gimple_df->vop = NULL_TREE;
-
- if (dump_file && (dump_flags & TDF_STATS))
- {
- fprintf (dump_file, "Original clobbered vars: %d\n",
- clobber_stats.clobbered_vars);
- fprintf (dump_file, "Static write clobbers avoided: %d\n",
- clobber_stats.static_write_clobbers_avoided);
- fprintf (dump_file, "Static read clobbers avoided: %d\n",
- clobber_stats.static_read_clobbers_avoided);
- fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
- clobber_stats.unescapable_clobbers_avoided);
- fprintf (dump_file, "Original read-only clobbers: %d\n",
- clobber_stats.readonly_clobbers);
- fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
- clobber_stats.static_readonly_clobbers_avoided);
- }
+ fn->gimple_df->vop = NULL_TREE;
}
/* Return memory for an operand of size SIZE. */
static inline void *
-ssa_operand_alloc (unsigned size)
+ssa_operand_alloc (struct function *fn, unsigned size)
{
char *ptr;
- gcc_assert (size == sizeof (struct use_optype_d)
- || size == sizeof (struct def_optype_d));
+ gcc_assert (size == sizeof (struct use_optype_d));
- if (gimple_ssa_operands (cfun)->operand_memory_index + size
- >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
+ if (gimple_ssa_operands (fn)->operand_memory_index + size
+ >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
{
struct ssa_operand_memory_d *ptr;
- switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
+ switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
{
case OP_SIZE_INIT:
- gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
+ gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
break;
case OP_SIZE_1:
- gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
+ gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
break;
case OP_SIZE_2:
case OP_SIZE_3:
- gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
+ gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
break;
default:
gcc_unreachable ();
@@ -328,81 +276,47 @@ ssa_operand_alloc (unsigned size)
ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
- + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
+ + gimple_ssa_operands (fn)->ssa_operand_mem_size);
- ptr->next = gimple_ssa_operands (cfun)->operand_memory;
- gimple_ssa_operands (cfun)->operand_memory = ptr;
- gimple_ssa_operands (cfun)->operand_memory_index = 0;
+ ptr->next = gimple_ssa_operands (fn)->operand_memory;
+ gimple_ssa_operands (fn)->operand_memory = ptr;
+ gimple_ssa_operands (fn)->operand_memory_index = 0;
}
- ptr = &(gimple_ssa_operands (cfun)->operand_memory
- ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
- gimple_ssa_operands (cfun)->operand_memory_index += size;
+ ptr = &(gimple_ssa_operands (fn)->operand_memory
+ ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
+ gimple_ssa_operands (fn)->operand_memory_index += size;
return ptr;
}
-/* Allocate a DEF operand. */
-
-static inline struct def_optype_d *
-alloc_def (void)
-{
- struct def_optype_d *ret;
- if (gimple_ssa_operands (cfun)->free_defs)
- {
- ret = gimple_ssa_operands (cfun)->free_defs;
- gimple_ssa_operands (cfun)->free_defs
- = gimple_ssa_operands (cfun)->free_defs->next;
- }
- else
- ret = (struct def_optype_d *)
- ssa_operand_alloc (sizeof (struct def_optype_d));
- return ret;
-}
-
-
/* Allocate a USE operand. */
static inline struct use_optype_d *
-alloc_use (void)
+alloc_use (struct function *fn)
{
struct use_optype_d *ret;
- if (gimple_ssa_operands (cfun)->free_uses)
+ if (gimple_ssa_operands (fn)->free_uses)
{
- ret = gimple_ssa_operands (cfun)->free_uses;
- gimple_ssa_operands (cfun)->free_uses
- = gimple_ssa_operands (cfun)->free_uses->next;
+ ret = gimple_ssa_operands (fn)->free_uses;
+ gimple_ssa_operands (fn)->free_uses
+ = gimple_ssa_operands (fn)->free_uses->next;
}
else
ret = (struct use_optype_d *)
- ssa_operand_alloc (sizeof (struct use_optype_d));
+ ssa_operand_alloc (fn, sizeof (struct use_optype_d));
return ret;
}
-/* Adds OP to the list of defs after LAST. */
-
-static inline def_optype_p
-add_def_op (tree *op, def_optype_p last)
-{
- def_optype_p new_def;
-
- new_def = alloc_def ();
- DEF_OP_PTR (new_def) = op;
- last->next = new_def;
- new_def->next = NULL;
- return new_def;
-}
-
-
/* Adds OP to the list of uses of statement STMT after LAST. */
static inline use_optype_p
-add_use_op (gimple stmt, tree *op, use_optype_p last)
+add_use_op (struct function *fn, gimple stmt, tree *op, use_optype_p last)
{
use_optype_p new_use;
- new_use = alloc_use ();
+ new_use = alloc_use (fn);
USE_OP_PTR (new_use)->use = op;
link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
last->next = new_use;
@@ -413,19 +327,11 @@ add_use_op (gimple stmt, tree *op, use_optype_p last)
/* Takes elements from build_defs and turns them into def operands of STMT.
- TODO -- Make build_defs VEC of tree *. */
+ TODO -- Make build_defs vec of tree *. */
static inline void
-finalize_ssa_defs (gimple stmt)
+finalize_ssa_defs (struct function *fn, gimple stmt)
{
- unsigned new_i;
- struct def_optype_d new_list;
- def_optype_p old_ops, last;
- unsigned int num = VEC_length (tree, build_defs);
-
- /* There should only be a single real definition per assignment. */
- gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
-
/* Pre-pend the vdef we may have built. */
if (build_vdef != NULL_TREE)
{
@@ -435,17 +341,8 @@ finalize_ssa_defs (gimple stmt)
oldvdef = SSA_NAME_VAR (oldvdef);
if (oldvdef != build_vdef)
gimple_set_vdef (stmt, build_vdef);
- VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
- ++num;
}
- new_list.next = NULL;
- last = &new_list;
-
- old_ops = gimple_def_ops (stmt);
-
- new_i = 0;
-
/* Clear and unlink a no longer necessary VDEF. */
if (build_vdef == NULL_TREE
&& gimple_vdef (stmt) != NULL_TREE)
@@ -453,7 +350,7 @@ finalize_ssa_defs (gimple stmt)
if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
{
unlink_stmt_vdef (stmt);
- release_ssa_name (gimple_vdef (stmt));
+ release_ssa_name_fn (fn, gimple_vdef (stmt));
}
gimple_set_vdef (stmt, NULL_TREE);
}
@@ -461,34 +358,18 @@ finalize_ssa_defs (gimple stmt)
/* If we have a non-SSA_NAME VDEF, mark it for renaming. */
if (gimple_vdef (stmt)
&& TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
- mark_sym_for_renaming (gimple_vdef (stmt));
-
- /* Check for the common case of 1 def that hasn't changed. */
- if (old_ops && old_ops->next == NULL && num == 1
- && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
- return;
-
- /* If there is anything in the old list, free it. */
- if (old_ops)
{
- old_ops->next = gimple_ssa_operands (cfun)->free_defs;
- gimple_ssa_operands (cfun)->free_defs = old_ops;
+ fn->gimple_df->rename_vops = 1;
+ fn->gimple_df->ssa_renaming_needed = 1;
}
-
- /* If there is anything remaining in the build_defs list, simply emit it. */
- for ( ; new_i < num; new_i++)
- last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
-
- /* Now set the stmt's operands. */
- gimple_set_def_ops (stmt, new_list.next);
}
/* Takes elements from build_uses and turns them into use operands of STMT.
- TODO -- Make build_uses VEC of tree *. */
+ TODO -- Make build_uses vec of tree *. */
static inline void
-finalize_ssa_uses (gimple stmt)
+finalize_ssa_uses (struct function *fn, gimple stmt)
{
unsigned new_i;
struct use_optype_d new_list;
@@ -504,7 +385,7 @@ finalize_ssa_uses (gimple stmt)
if (oldvuse != (build_vuse != NULL_TREE
? build_vuse : build_vdef))
gimple_set_vuse (stmt, NULL_TREE);
- VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
+ build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt));
}
new_list.next = NULL;
@@ -522,8 +403,8 @@ finalize_ssa_uses (gimple stmt)
{
for (ptr = old_ops; ptr; ptr = ptr->next)
delink_imm_use (USE_OP_PTR (ptr));
- old_ops->next = gimple_ssa_operands (cfun)->free_uses;
- gimple_ssa_operands (cfun)->free_uses = old_ops;
+ old_ops->next = gimple_ssa_operands (fn)->free_uses;
+ gimple_ssa_operands (fn)->free_uses = old_ops;
}
/* If we added a VUSE, make sure to set the operand if it is not already
@@ -531,15 +412,17 @@ finalize_ssa_uses (gimple stmt)
if (build_vuse != NULL_TREE
&& gimple_vuse (stmt) == NULL_TREE)
{
- gimple_set_vuse (stmt, gimple_vop (cfun));
- mark_sym_for_renaming (gimple_vop (cfun));
+ gimple_set_vuse (stmt, gimple_vop (fn));
+ fn->gimple_df->rename_vops = 1;
+ fn->gimple_df->ssa_renaming_needed = 1;
}
/* Now create nodes for all the new nodes. */
- for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
- last = add_use_op (stmt,
- (tree *) VEC_index (tree, build_uses, new_i),
- last);
+ for (new_i = 0; new_i < build_uses.length (); new_i++)
+ {
+ tree *op = (tree *) build_uses[new_i];
+ last = add_use_op (fn, stmt, op, last);
+ }
/* Now set the stmt's operands. */
gimple_set_use_ops (stmt, new_list.next);
@@ -554,18 +437,17 @@ cleanup_build_arrays (void)
{
build_vdef = NULL_TREE;
build_vuse = NULL_TREE;
- VEC_truncate (tree, build_defs, 0);
- VEC_truncate (tree, build_uses, 0);
+ build_uses.truncate (0);
}
/* Finalize all the build vectors, fill the new ones into INFO. */
static inline void
-finalize_ssa_stmt_operands (gimple stmt)
+finalize_ssa_stmt_operands (struct function *fn, gimple stmt)
{
- finalize_ssa_defs (stmt);
- finalize_ssa_uses (stmt);
+ finalize_ssa_defs (fn, stmt);
+ finalize_ssa_uses (fn, stmt);
cleanup_build_arrays ();
}
@@ -575,28 +457,18 @@ finalize_ssa_stmt_operands (gimple stmt)
static inline void
start_ssa_stmt_operands (void)
{
- gcc_assert (VEC_length (tree, build_defs) == 0);
- gcc_assert (VEC_length (tree, build_uses) == 0);
+ gcc_assert (build_uses.length () == 0);
gcc_assert (build_vuse == NULL_TREE);
gcc_assert (build_vdef == NULL_TREE);
}
-/* Add DEF_P to the list of pointers to operands. */
-
-static inline void
-append_def (tree *def_p)
-{
- VEC_safe_push (tree, heap, build_defs, (tree) def_p);
-}
-
-
/* Add USE_P to the list of pointers to operands. */
static inline void
append_use (tree *use_p)
{
- VEC_safe_push (tree, heap, build_uses, (tree) use_p);
+ build_uses.safe_push ((tree) use_p);
}
@@ -635,7 +507,8 @@ append_vuse (tree var)
/* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
static void
-add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
+add_virtual_operand (struct function *fn,
+ gimple stmt ATTRIBUTE_UNUSED, int flags)
{
/* Add virtual operands to the stmt, unless the caller has specifically
requested not to do that (used when adding operands inside an
@@ -646,9 +519,9 @@ add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
gcc_assert (!is_gimple_debug (stmt));
if (flags & opf_def)
- append_vdef (gimple_vop (cfun));
+ append_vdef (gimple_vop (fn));
else
- append_vuse (gimple_vop (cfun));
+ append_vuse (gimple_vop (fn));
}
@@ -658,30 +531,32 @@ add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
added to virtual operands. */
static void
-add_stmt_operand (tree *var_p, gimple stmt, int flags)
+add_stmt_operand (struct function *fn, tree *var_p, gimple stmt, int flags)
{
- tree var, sym;
+ tree var = *var_p;
gcc_assert (SSA_VAR_P (*var_p));
- var = *var_p;
- sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
-
- /* Mark statements with volatile operands. */
- if (!(flags & opf_no_vops)
- && TREE_THIS_VOLATILE (sym))
- gimple_set_has_volatile_ops (stmt, true);
-
- if (is_gimple_reg (sym))
+ if (is_gimple_reg (var))
{
/* The variable is a GIMPLE register. Add it to real operands. */
if (flags & opf_def)
- append_def (var_p);
+ ;
else
append_use (var_p);
+ if (DECL_P (*var_p))
+ fn->gimple_df->ssa_renaming_needed = 1;
}
else
- add_virtual_operand (stmt, flags);
+ {
+ /* Mark statements with volatile operands. */
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (var))
+ gimple_set_has_volatile_ops (stmt, true);
+
+ /* The variable is a memory access. Add virtual operands. */
+ add_virtual_operand (fn, stmt, flags);
+ }
}
/* Mark the base address of REF as having its address taken.
@@ -717,15 +592,11 @@ mark_address_taken (tree ref)
STMT is the statement being processed, EXPR is the MEM_REF
that got us here.
- FLAGS is as in get_expr_operands.
-
- RECURSE_ON_BASE should be set to true if we want to continue
- calling get_expr_operands on the base pointer, and false if
- something else will do it for us. */
+ FLAGS is as in get_expr_operands. */
static void
-get_indirect_ref_operands (gimple stmt, tree expr, int flags,
- bool recurse_on_base)
+get_mem_ref_operands (struct function *fn,
+ gimple stmt, tree expr, int flags)
{
tree *pptr = &TREE_OPERAND (expr, 0);
@@ -734,31 +605,33 @@ get_indirect_ref_operands (gimple stmt, tree expr, int flags,
gimple_set_has_volatile_ops (stmt, true);
/* Add the VOP. */
- add_virtual_operand (stmt, flags);
+ add_virtual_operand (fn, stmt, flags);
/* If requested, add a USE operand for the base pointer. */
- if (recurse_on_base)
- get_expr_operands (stmt, pptr,
- opf_non_addressable | opf_use
- | (flags & (opf_no_vops|opf_not_non_addressable)));
+ get_expr_operands (fn, stmt, pptr,
+ opf_non_addressable | opf_use
+ | (flags & (opf_no_vops|opf_not_non_addressable)));
}
/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
static void
-get_tmr_operands (gimple stmt, tree expr, int flags)
+get_tmr_operands (struct function *fn, gimple stmt, tree expr, int flags)
{
if (!(flags & opf_no_vops)
&& TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
/* First record the real operands. */
- get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
- get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
- get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
-
- add_virtual_operand (stmt, flags);
+ get_expr_operands (fn, stmt,
+ &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
+ get_expr_operands (fn, stmt,
+ &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
+ get_expr_operands (fn, stmt,
+ &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
+
+ add_virtual_operand (fn, stmt, flags);
}
@@ -766,7 +639,7 @@ get_tmr_operands (gimple stmt, tree expr, int flags)
escape, add them to the VDEF/VUSE lists for it. */
static void
-maybe_add_call_vops (gimple stmt)
+maybe_add_call_vops (struct function *fn, gimple stmt)
{
int call_flags = gimple_call_flags (stmt);
@@ -775,13 +648,11 @@ maybe_add_call_vops (gimple stmt)
call-clobbered. */
if (!(call_flags & ECF_NOVOPS))
{
- /* A 'pure' or a 'const' function never call-clobbers anything.
- A 'noreturn' function might, but since we don't return anyway
- there is no point in recording that. */
- if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
- add_virtual_operand (stmt, opf_def);
+ /* A 'pure' or a 'const' function never call-clobbers anything. */
+ if (!(call_flags & (ECF_PURE | ECF_CONST)))
+ add_virtual_operand (fn, stmt, opf_def);
else if (!(call_flags & ECF_CONST))
- add_virtual_operand (stmt, opf_use);
+ add_virtual_operand (fn, stmt, opf_use);
}
}
@@ -789,7 +660,7 @@ maybe_add_call_vops (gimple stmt)
/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
static void
-get_asm_expr_operands (gimple stmt)
+get_asm_stmt_operands (struct function *fn, gimple stmt)
{
size_t i, noutputs;
const char **oconstraints;
@@ -816,7 +687,8 @@ get_asm_expr_operands (gimple stmt)
if (!allows_reg && allows_mem)
mark_address_taken (TREE_VALUE (link));
- get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
+ get_expr_operands (fn, stmt,
+ &TREE_VALUE (link), opf_def | opf_not_non_addressable);
}
/* Gather all input operands. */
@@ -832,12 +704,12 @@ get_asm_expr_operands (gimple stmt)
if (!allows_reg && allows_mem)
mark_address_taken (TREE_VALUE (link));
- get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
+ get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
}
/* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
if (gimple_asm_clobbers_memory_p (stmt))
- add_virtual_operand (stmt, opf_def);
+ add_virtual_operand (fn, stmt, opf_def);
}
@@ -846,7 +718,7 @@ get_asm_expr_operands (gimple stmt)
interpret the operands found. */
static void
-get_expr_operands (gimple stmt, tree *expr_p, int flags)
+get_expr_operands (struct function *fn, gimple stmt, tree *expr_p, int flags)
{
enum tree_code code;
enum tree_code_class codeclass;
@@ -874,29 +746,22 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
&& !is_gimple_debug (stmt))
mark_address_taken (TREE_OPERAND (expr, 0));
- /* If the address is invariant, there may be no interesting
- variable references inside. */
- if (is_gimple_min_invariant (expr))
- return;
-
/* Otherwise, there may be variables referenced inside but there
should be no VUSEs created, since the referenced objects are
not really accessed. The only operands that we should find
here are ARRAY_REF indices which will always be real operands
(GIMPLE does not allow non-registers as array indices). */
flags |= opf_no_vops;
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
- flags | opf_not_non_addressable);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
+ flags | opf_not_non_addressable | opf_address_taken);
return;
case SSA_NAME:
- add_stmt_operand (expr_p, stmt, flags);
- return;
-
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
- add_stmt_operand (expr_p, stmt, flags);
+ if (!(flags & opf_address_taken))
+ add_stmt_operand (fn, expr_p, stmt, flags);
return;
case DEBUG_EXPR_DECL:
@@ -904,11 +769,11 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
return;
case MEM_REF:
- get_indirect_ref_operands (stmt, expr, flags, true);
+ get_mem_ref_operands (fn, stmt, expr, flags);
return;
case TARGET_MEM_REF:
- get_tmr_operands (stmt, expr, flags);
+ get_tmr_operands (fn, stmt, expr, flags);
return;
case ARRAY_REF:
@@ -921,20 +786,20 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
&& TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
if (code == COMPONENT_REF)
{
if (!(flags & opf_no_vops)
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
gimple_set_has_volatile_ops (stmt, true);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
}
else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
{
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
}
return;
@@ -943,16 +808,16 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
case WITH_SIZE_EXPR:
/* WITH_SIZE_EXPR is a pass-through reference to its first argument,
and an rvalue reference to its second argument. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
return;
case COND_EXPR:
case VEC_COND_EXPR:
case VEC_PERM_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
return;
case CONSTRUCTOR:
@@ -970,9 +835,9 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
gimple_set_has_volatile_ops (stmt, true);
for (idx = 0;
- VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
+ vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
idx++)
- get_expr_operands (stmt, &ce->value, uflags);
+ get_expr_operands (fn, stmt, &ce->value, uflags);
return;
}
@@ -985,7 +850,7 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
case VIEW_CONVERT_EXPR:
do_unary:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
return;
case COMPOUND_EXPR:
@@ -993,8 +858,8 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
case ASSERT_EXPR:
do_binary:
{
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
return;
}
@@ -1004,9 +869,9 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
case WIDEN_MULT_MINUS_EXPR:
case FMA_EXPR:
{
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
+ get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
return;
}
@@ -1040,7 +905,7 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
build_* operand vectors will have potential operands in them. */
static void
-parse_ssa_operands (gimple stmt)
+parse_ssa_operands (struct function *fn, gimple stmt)
{
enum gimple_code code = gimple_code (stmt);
size_t i, n, start = 0;
@@ -1048,32 +913,32 @@ parse_ssa_operands (gimple stmt)
switch (code)
{
case GIMPLE_ASM:
- get_asm_expr_operands (stmt);
+ get_asm_stmt_operands (fn, stmt);
break;
case GIMPLE_TRANSACTION:
/* The start of a transaction is a memory barrier. */
- add_virtual_operand (stmt, opf_def | opf_use);
+ add_virtual_operand (fn, stmt, opf_def | opf_use);
break;
case GIMPLE_DEBUG:
if (gimple_debug_bind_p (stmt)
&& gimple_debug_bind_has_value_p (stmt))
- get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
+ get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
opf_use | opf_no_vops);
break;
case GIMPLE_RETURN:
- append_vuse (gimple_vop (cfun));
+ append_vuse (gimple_vop (fn));
goto do_default;
case GIMPLE_CALL:
/* Add call-clobbered operands, if needed. */
- maybe_add_call_vops (stmt);
+ maybe_add_call_vops (fn, stmt);
/* FALLTHRU */
case GIMPLE_ASSIGN:
- get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
+ get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
start = 1;
/* FALLTHRU */
@@ -1081,7 +946,7 @@ parse_ssa_operands (gimple stmt)
do_default:
n = gimple_num_ops (stmt);
for (i = start; i < n; i++)
- get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
+ get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
break;
}
}
@@ -1090,20 +955,20 @@ parse_ssa_operands (gimple stmt)
/* Create an operands cache for STMT. */
static void
-build_ssa_operands (gimple stmt)
+build_ssa_operands (struct function *fn, gimple stmt)
{
/* Initially assume that the statement has no volatile operands. */
gimple_set_has_volatile_ops (stmt, false);
start_ssa_stmt_operands ();
- parse_ssa_operands (stmt);
- finalize_ssa_stmt_operands (stmt);
+ parse_ssa_operands (fn, stmt);
+ finalize_ssa_stmt_operands (fn, stmt);
}
/* Verifies SSA statement operands. */
DEBUG_FUNCTION bool
-verify_ssa_operands (gimple stmt)
+verify_ssa_operands (struct function *fn, gimple stmt)
{
use_operand_p use_p;
def_operand_p def_p;
@@ -1115,7 +980,7 @@ verify_ssa_operands (gimple stmt)
/* build_ssa_operands w/o finalizing them. */
gimple_set_has_volatile_ops (stmt, false);
start_ssa_stmt_operands ();
- parse_ssa_operands (stmt);
+ parse_ssa_operands (fn, stmt);
/* Now verify the built operands are the same as present in STMT. */
def = gimple_vdef (stmt);
@@ -1154,22 +1019,22 @@ verify_ssa_operands (gimple stmt)
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
- FOR_EACH_VEC_ELT (tree, build_uses, i, use)
+ FOR_EACH_VEC_ELT (build_uses, i, use)
{
if (use_p->use == (tree *)use)
{
- VEC_replace (tree, build_uses, i, NULL_TREE);
+ build_uses[i] = NULL_TREE;
break;
}
}
- if (i == VEC_length (tree, build_uses))
+ if (i == build_uses.length ())
{
error ("excess use operand for stmt");
debug_generic_expr (USE_FROM_PTR (use_p));
return true;
}
}
- FOR_EACH_VEC_ELT (tree, build_uses, i, use)
+ FOR_EACH_VEC_ELT (build_uses, i, use)
if (use != NULL_TREE)
{
error ("use operand missing for stmt");
@@ -1177,31 +1042,6 @@ verify_ssa_operands (gimple stmt)
return true;
}
- FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
- {
- FOR_EACH_VEC_ELT (tree, build_defs, i, def)
- {
- if (def_p == (tree *)def)
- {
- VEC_replace (tree, build_defs, i, NULL_TREE);
- break;
- }
- }
- if (i == VEC_length (tree, build_defs))
- {
- error ("excess def operand for stmt");
- debug_generic_expr (DEF_FROM_PTR (def_p));
- return true;
- }
- }
- FOR_EACH_VEC_ELT (tree, build_defs, i, def)
- if (def != NULL_TREE)
- {
- error ("def operand missing for stmt");
- debug_generic_expr (*(tree *)def);
- return true;
- }
-
if (gimple_has_volatile_ops (stmt) != volatile_p)
{
error ("stmt volatile flag not up-to-date");
@@ -1217,27 +1057,17 @@ verify_ssa_operands (gimple stmt)
the stmt operand lists. */
void
-free_stmt_operands (gimple stmt)
+free_stmt_operands (struct function *fn, gimple stmt)
{
- def_optype_p defs = gimple_def_ops (stmt), last_def;
use_optype_p uses = gimple_use_ops (stmt), last_use;
- if (defs)
- {
- for (last_def = defs; last_def->next; last_def = last_def->next)
- continue;
- last_def->next = gimple_ssa_operands (cfun)->free_defs;
- gimple_ssa_operands (cfun)->free_defs = defs;
- gimple_set_def_ops (stmt, NULL);
- }
-
if (uses)
{
for (last_use = uses; last_use->next; last_use = last_use->next)
delink_imm_use (USE_OP_PTR (last_use));
delink_imm_use (USE_OP_PTR (last_use));
- last_use->next = gimple_ssa_operands (cfun)->free_uses;
- gimple_ssa_operands (cfun)->free_uses = uses;
+ last_use->next = gimple_ssa_operands (fn)->free_uses;
+ gimple_ssa_operands (fn)->free_uses = uses;
gimple_set_use_ops (stmt, NULL);
}
@@ -1252,23 +1082,17 @@ free_stmt_operands (gimple stmt)
/* Get the operands of statement STMT. */
void
-update_stmt_operands (gimple stmt)
+update_stmt_operands (struct function *fn, gimple stmt)
{
/* If update_stmt_operands is called before SSA is initialized, do
nothing. */
- if (!ssa_operands_active ())
+ if (!ssa_operands_active (fn))
return;
timevar_push (TV_TREE_OPS);
- /* If the stmt is a noreturn call queue it to be processed by
- split_bbs_on_noreturn_calls during cfg cleanup. */
- if (is_gimple_call (stmt)
- && gimple_call_noreturn_p (stmt))
- VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), stmt);
-
gcc_assert (gimple_modified_p (stmt));
- build_ssa_operands (stmt);
+ build_ssa_operands (fn, stmt);
gimple_set_modified (stmt, false);
timevar_pop (TV_TREE_OPS);
@@ -1279,18 +1103,17 @@ update_stmt_operands (gimple stmt)
to test the validity of the swap operation. */
void
-swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
+swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1)
{
tree op0, op1;
op0 = *exp0;
op1 = *exp1;
- /* If the operand cache is active, attempt to preserve the relative
- positions of these two operands in their respective immediate use
- lists by adjusting their use pointer to point to the new
- operand position. */
- if (ssa_operands_active () && op0 != op1)
+ if (op0 != op1)
{
+ /* Attempt to preserve the relative positions of these two operands in
+ their * respective immediate use lists by adjusting their use pointer
+ to point to the new operand position. */
use_optype_p use0, use1, ptr;
use0 = use1 = NULL;
@@ -1315,11 +1138,11 @@ swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
USE_OP_PTR (use0)->use = exp1;
if (use1)
USE_OP_PTR (use1)->use = exp0;
- }
- /* Now swap the data. */
- *exp0 = op1;
- *exp1 = op0;
+ /* Now swap the data. */
+ *exp0 = op1;
+ *exp1 = op0;
+ }
}
@@ -1390,7 +1213,7 @@ verify_imm_links (FILE *f, tree var)
fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
(void *)ptr->use);
print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
- fprintf(f, "\n");
+ fprintf (f, "\n");
return true;
}
@@ -1425,7 +1248,7 @@ dump_immediate_uses_for (FILE *file, tree var)
else
print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
}
- fprintf(file, "\n");
+ fprintf (file, "\n");
}
@@ -1440,7 +1263,7 @@ dump_immediate_uses (FILE *file)
fprintf (file, "Immediate_uses: \n\n");
for (x = 1; x < num_ssa_names; x++)
{
- var = ssa_name(x);
+ var = ssa_name (x);
if (!var)
continue;
dump_immediate_uses_for (file, var);
@@ -1475,18 +1298,63 @@ unlink_stmt_vdef (gimple stmt)
imm_use_iterator iter;
gimple use_stmt;
tree vdef = gimple_vdef (stmt);
+ tree vuse = gimple_vuse (stmt);
if (!vdef
|| TREE_CODE (vdef) != SSA_NAME)
return;
- FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt))
+ FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
{
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
- SET_USE (use_p, gimple_vuse (stmt));
+ SET_USE (use_p, vuse);
}
- if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)))
- SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1;
+ if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
+}
+
+
+/* Return true if the var whose chain of uses starts at PTR has no
+ nondebug uses. */
+bool
+has_zero_uses_1 (const ssa_use_operand_t *head)
+{
+ const ssa_use_operand_t *ptr;
+
+ for (ptr = head->next; ptr != head; ptr = ptr->next)
+ if (!is_gimple_debug (USE_STMT (ptr)))
+ return false;
+
+ return true;
}
+
+/* Return true if the var whose chain of uses starts at PTR has a
+ single nondebug use. Set USE_P and STMT to that single nondebug
+ use, if so, or to NULL otherwise. */
+bool
+single_imm_use_1 (const ssa_use_operand_t *head,
+ use_operand_p *use_p, gimple *stmt)
+{
+ ssa_use_operand_t *ptr, *single_use = 0;
+
+ for (ptr = head->next; ptr != head; ptr = ptr->next)
+ if (!is_gimple_debug (USE_STMT (ptr)))
+ {
+ if (single_use)
+ {
+ single_use = NULL;
+ break;
+ }
+ single_use = ptr;
+ }
+
+ if (use_p)
+ *use_p = single_use;
+
+ if (stmt)
+ *stmt = single_use ? single_use->loc.stmt : NULL;
+
+ return single_use;
+}