summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorebotcazou <ebotcazou@138bc75d-0d04-0410-961f-82ee72b054a4>2012-05-14 08:46:33 +0000
committerebotcazou <ebotcazou@138bc75d-0d04-0410-961f-82ee72b054a4>2012-05-14 08:46:33 +0000
commitd990677336b2fdbfeeb9d9dcdf65e2a73ba1b337 (patch)
tree24a9b1d109a3426404acbce3f2ac87147420a614
parent929a56fc2d27fbb4c07e7e13486f4da01849e8ee (diff)
downloadgcc-d990677336b2fdbfeeb9d9dcdf65e2a73ba1b337.tar.gz
* stor-layout.c (bit_from_pos): Distribute conversion to bitsizetype
into a PLUS_EXPR byte offset. * tree-ssa-pre.c (can_value_number_call): Delete. (compute_avail): Skip all statements with side effects. <GIMPLE_CALL>: Skip calls to internal functions. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@187450 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/ChangeLog9
-rw-r--r--gcc/stor-layout.c26
-rw-r--r--gcc/tree-ssa-pre.c22
3 files changed, 28 insertions, 29 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index be431892f24..752edb86ed1 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,12 @@
+2012-05-14 Eric Botcazou <ebotcazou@adacore.com>
+
+ * stor-layout.c (bit_from_pos): Distribute conversion to bitsizetype
+ into a PLUS_EXPR byte offset.
+
+ * tree-ssa-pre.c (can_value_number_call): Delete.
+ (compute_avail): Skip all statements with side effects.
+ <GIMPLE_CALL>: Skip calls to internal functions.
+
2012-05-13 Steven Bosscher <steven@gcc.gnu.org>
* config/pa/pa.md: Use define_c_enum for "unspec" and "unspecv".
diff --git a/gcc/stor-layout.c b/gcc/stor-layout.c
index cb47a52b715..a592bda5c23 100644
--- a/gcc/stor-layout.c
+++ b/gcc/stor-layout.c
@@ -786,25 +786,29 @@ start_record_layout (tree t)
}
/* Return the combined bit position for the byte offset OFFSET and the
- bit position BITPOS. */
+ bit position BITPOS.
+
+ These functions operate on byte and bit positions present in FIELD_DECLs
+ and assume that these expressions result in no (intermediate) overflow.
+ This assumption is necessary to fold the expressions as much as possible,
+ so as to avoid creating artificially variable-sized types in languages
+ supporting variable-sized types like Ada. */
tree
bit_from_pos (tree offset, tree bitpos)
{
+ if (TREE_CODE (offset) == PLUS_EXPR)
+ offset = size_binop (PLUS_EXPR,
+ fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
+ fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
+ else
+ offset = fold_convert (bitsizetype, offset);
return size_binop (PLUS_EXPR, bitpos,
- size_binop (MULT_EXPR,
- fold_convert (bitsizetype, offset),
- bitsize_unit_node));
+ size_binop (MULT_EXPR, offset, bitsize_unit_node));
}
/* Return the combined truncated byte position for the byte offset OFFSET and
- the bit position BITPOS.
-
- These functions operate on byte and bit positions as present in FIELD_DECLs
- and assume that these expressions result in no (intermediate) overflow.
- This assumption is necessary to fold the expressions as much as possible,
- so as to avoid creating artificially variable-sized types in languages
- supporting variable-sized types like Ada. */
+ the bit position BITPOS. */
tree
byte_from_pos (tree offset, tree bitpos)
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index fcd7feeea1e..0550879faa8 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -2586,19 +2586,6 @@ compute_antic (void)
sbitmap_free (changed_blocks);
}
-/* Return true if we can value number the call in STMT. This is true
- if we have a pure or constant call to a real function. */
-
-static bool
-can_value_number_call (gimple stmt)
-{
- if (gimple_call_internal_p (stmt))
- return false;
- if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
- return true;
- return false;
-}
-
/* Return true if OP is a tree which we can perform PRE on.
This may not match the operations we can value number, but in
a perfect world would. */
@@ -3975,8 +3962,7 @@ compute_avail (void)
or control flow.
If this isn't a call or it is the last stmt in the
basic-block then the CFG represents things correctly. */
- if (is_gimple_call (stmt)
- && !stmt_ends_bb_p (stmt))
+ if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
{
/* Non-looping const functions always return normally.
Otherwise the call might not return or have side-effects
@@ -3998,8 +3984,7 @@ compute_avail (void)
bitmap_value_insert_into_set (AVAIL_OUT (block), e);
}
- if (gimple_has_volatile_ops (stmt)
- || stmt_could_throw_p (stmt))
+ if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt))
continue;
switch (gimple_code (stmt))
@@ -4017,7 +4002,8 @@ compute_avail (void)
pre_expr result = NULL;
VEC(vn_reference_op_s, heap) *ops = NULL;
- if (!can_value_number_call (stmt))
+ /* We can value number only calls to real functions. */
+ if (gimple_call_internal_p (stmt))
continue;
copy_reference_ops_from_call (stmt, &ops);