summaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-address.c
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@baserock.org>2014-10-30 09:35:42 +0000
committer <>2015-01-09 11:51:27 +0000
commitc27a97d04853380f1e80525391b3f0d156ed4c84 (patch)
tree68ffaade7c605bc80cffa18360799c98a810976f /gcc/tree-ssa-address.c
parent6af3fdec2262dd94954acc5e426ef71cbd4521d3 (diff)
downloadgcc-tarball-c27a97d04853380f1e80525391b3f0d156ed4c84.tar.gz
Imported from /home/lorry/working-area/delta_gcc-tarball/gcc-4.9.2.tar.bz2.gcc-4.9.2
Diffstat (limited to 'gcc/tree-ssa-address.c')
-rw-r--r--gcc/tree-ssa-address.c122
1 files changed, 68 insertions, 54 deletions
diff --git a/gcc/tree-ssa-address.c b/gcc/tree-ssa-address.c
index cf131578d2..741478c4de 100644
--- a/gcc/tree-ssa-address.c
+++ b/gcc/tree-ssa-address.c
@@ -1,6 +1,5 @@
/* Memory address lowering and addressing mode selection.
- Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2011
- Free Software Foundation, Inc.
+ Copyright (C) 2004-2014 Free Software Foundation, Inc.
This file is part of GCC.
@@ -26,14 +25,23 @@ along with GCC; see the file COPYING3. If not see
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
+#include "stor-layout.h"
#include "tm_p.h"
#include "basic-block.h"
-#include "output.h"
#include "tree-pretty-print.h"
-#include "tree-flow.h"
-#include "tree-dump.h"
-#include "tree-pass.h"
-#include "timevar.h"
+#include "tree-ssa-alias.h"
+#include "internal-fn.h"
+#include "gimple-expr.h"
+#include "is-a.h"
+#include "gimple.h"
+#include "gimple-iterator.h"
+#include "gimplify-me.h"
+#include "stringpool.h"
+#include "tree-ssanames.h"
+#include "tree-ssa-loop-ivopts.h"
+#include "expr.h"
+#include "tree-dfa.h"
+#include "dumpfile.h"
#include "flags.h"
#include "tree-inline.h"
#include "tree-affine.h"
@@ -43,8 +51,9 @@ along with GCC; see the file COPYING3. If not see
#include "rtl.h"
#include "recog.h"
#include "expr.h"
-#include "ggc.h"
#include "target.h"
+#include "expmed.h"
+#include "tree-ssa-address.h"
/* TODO -- handling of symbols (according to Richard Hendersons
comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
@@ -81,14 +90,12 @@ typedef struct GTY (()) mem_addr_template {
be filled in. */
} mem_addr_template;
-DEF_VEC_O (mem_addr_template);
-DEF_VEC_ALLOC_O (mem_addr_template, gc);
/* The templates. Each of the low five bits of the index corresponds to one
component of TARGET_MEM_REF being present, while the high bits identify
the address space. See TEMPL_IDX. */
-static GTY(()) VEC (mem_addr_template, gc) *mem_addr_template_list;
+static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
#define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
(((int) (AS) << 5) \
@@ -178,6 +185,13 @@ gen_addr_rtx (enum machine_mode address_mode,
*addr = const0_rtx;
}
+/* Description of a memory address. */
+
+struct mem_address
+{
+ tree symbol, base, index, step, offset;
+};
+
/* Returns address for TARGET_MEM_REF with parameters given by ADDR
in address space AS.
If REALLY_EXPAND is false, just make fake registers instead
@@ -200,8 +214,8 @@ addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
if (addr->offset && !integer_zerop (addr->offset))
off = immed_double_int_const
- (double_int_sext (tree_to_double_int (addr->offset),
- TYPE_PRECISION (TREE_TYPE (addr->offset))),
+ (tree_to_double_int (addr->offset)
+ .sext (TYPE_PRECISION (TREE_TYPE (addr->offset))),
pointer_mode);
else
off = NULL_RTX;
@@ -211,13 +225,11 @@ addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
unsigned int templ_index
= TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
- if (templ_index
- >= VEC_length (mem_addr_template, mem_addr_template_list))
- VEC_safe_grow_cleared (mem_addr_template, gc, mem_addr_template_list,
- templ_index + 1);
+ if (templ_index >= vec_safe_length (mem_addr_template_list))
+ vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
/* Reuse the templates for addresses, so that we do not waste memory. */
- templ = VEC_index (mem_addr_template, mem_addr_template_list, templ_index);
+ templ = &(*mem_addr_template_list)[templ_index];
if (!templ->ref)
{
sym = (addr->symbol ?
@@ -263,6 +275,17 @@ addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
return address;
}
+/* implement addr_for_mem_ref() directly from a tree, which avoids exporting
+ the mem_address structure. */
+
+rtx
+addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
+{
+ struct mem_address addr;
+ get_address_description (exp, &addr);
+ return addr_for_mem_ref (&addr, as, really_expand);
+}
+
/* Returns address of MEM_REF in TYPE. */
tree
@@ -401,7 +424,7 @@ move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
for (i = 0; i < addr->n; i++)
{
- if (!double_int_one_p (addr->elts[i].coef))
+ if (!addr->elts[i].coef.is_one ())
continue;
val = addr->elts[i].val;
@@ -429,7 +452,7 @@ move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
for (i = 0; i < addr->n; i++)
{
- if (!double_int_one_p (addr->elts[i].coef))
+ if (!addr->elts[i].coef.is_one ())
continue;
val = addr->elts[i].val;
@@ -461,7 +484,7 @@ move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
for (i = 0; i < addr->n; i++)
{
- if (!double_int_one_p (addr->elts[i].coef))
+ if (!addr->elts[i].coef.is_one ())
continue;
val = addr->elts[i].val;
@@ -549,15 +572,15 @@ most_expensive_mult_to_index (tree type, struct mem_address *parts,
best_mult = double_int_zero;
for (i = 0; i < addr->n; i++)
{
- if (!double_int_fits_in_shwi_p (addr->elts[i].coef))
+ if (!addr->elts[i].coef.fits_shwi ())
continue;
- coef = double_int_to_shwi (addr->elts[i].coef);
+ coef = addr->elts[i].coef.to_shwi ();
if (coef == 1
|| !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
continue;
- acost = multiply_by_cost (coef, address_mode, speed);
+ acost = mult_by_coeff_cost (coef, address_mode, speed);
if (acost > best_mult_cost)
{
@@ -573,11 +596,11 @@ most_expensive_mult_to_index (tree type, struct mem_address *parts,
for (i = j = 0; i < addr->n; i++)
{
amult = addr->elts[i].coef;
- amult_neg = double_int_ext_for_comb (double_int_neg (amult), addr);
+ amult_neg = double_int_ext_for_comb (-amult, addr);
- if (double_int_equal_p (amult, best_mult))
+ if (amult == best_mult)
op_code = PLUS_EXPR;
- else if (double_int_equal_p (amult_neg, best_mult))
+ else if (amult_neg == best_mult)
op_code = MINUS_EXPR;
else
{
@@ -625,7 +648,7 @@ addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
parts->index = NULL_TREE;
parts->step = NULL_TREE;
- if (!double_int_zero_p (addr->offset))
+ if (!addr->offset.is_zero ())
parts->offset = double_int_to_tree (sizetype, addr->offset);
else
parts->offset = NULL_TREE;
@@ -657,7 +680,7 @@ addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
for (i = 0; i < addr->n; i++)
{
part = fold_convert (sizetype, addr->elts[i].val);
- if (!double_int_one_p (addr->elts[i].coef))
+ if (!addr->elts[i].coef.is_one ())
part = fold_build2 (MULT_EXPR, sizetype, part,
double_int_to_tree (sizetype, addr->elts[i].coef));
add_to_parts (parts, part);
@@ -822,16 +845,6 @@ get_address_description (tree op, struct mem_address *addr)
addr->offset = TMR_OFFSET (op);
}
-/* Copies the additional information attached to target_mem_ref FROM to TO. */
-
-void
-copy_mem_ref_info (tree to, tree from)
-{
- /* And the info about the original reference. */
- TREE_SIDE_EFFECTS (to) = TREE_SIDE_EFFECTS (from);
- TREE_THIS_VOLATILE (to) = TREE_THIS_VOLATILE (from);
-}
-
/* Copies the reference information from OLD_REF to NEW_REF, where
NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
@@ -863,26 +876,26 @@ copy_ref_info (tree new_ref, tree old_ref)
&& SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
{
struct ptr_info_def *new_pi;
+ unsigned int align, misalign;
+
duplicate_ssa_name_ptr_info
(new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
- /* We have to be careful about transfering alignment information. */
- if (TREE_CODE (old_ref) == MEM_REF
+ /* We have to be careful about transferring alignment information. */
+ if (get_ptr_info_alignment (new_pi, &align, &misalign)
+ && TREE_CODE (old_ref) == MEM_REF
&& !(TREE_CODE (new_ref) == TARGET_MEM_REF
&& (TMR_INDEX2 (new_ref)
|| (TMR_STEP (new_ref)
&& (TREE_INT_CST_LOW (TMR_STEP (new_ref))
- < new_pi->align)))))
+ < align)))))
{
- new_pi->misalign += double_int_sub (mem_ref_offset (old_ref),
- mem_ref_offset (new_ref)).low;
- new_pi->misalign &= (new_pi->align - 1);
+ unsigned int inc = (mem_ref_offset (old_ref)
+ - mem_ref_offset (new_ref)).low;
+ adjust_ptr_info_misalignment (new_pi, inc);
}
else
- {
- new_pi->align = 1;
- new_pi->misalign = 0;
- }
+ mark_ptr_info_alignment_unknown (new_pi);
}
else if (TREE_CODE (base) == VAR_DECL
|| TREE_CODE (base) == PARM_DECL
@@ -902,7 +915,7 @@ maybe_fold_tmr (tree ref)
{
struct mem_address addr;
bool changed = false;
- tree ret, off;
+ tree new_ref, off;
get_address_description (ref, &addr);
@@ -963,10 +976,11 @@ maybe_fold_tmr (tree ref)
ended up folding it, always create a new TARGET_MEM_REF regardless
if it is valid in this for on the target - the propagation result
wouldn't be anyway. */
- ret = create_mem_ref_raw (TREE_TYPE (ref),
- TREE_TYPE (addr.offset), &addr, false);
- copy_mem_ref_info (ret, ref);
- return ret;
+ new_ref = create_mem_ref_raw (TREE_TYPE (ref),
+ TREE_TYPE (addr.offset), &addr, false);
+ TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
+ TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
+ return new_ref;
}
/* Dump PARTS to FILE. */