summaryrefslogtreecommitdiff
path: root/gcc/cse.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/cse.c')
-rw-r--r--gcc/cse.c71
1 files changed, 63 insertions, 8 deletions
diff --git a/gcc/cse.c b/gcc/cse.c
index fc15511dbc8..0304d6f763f 100644
--- a/gcc/cse.c
+++ b/gcc/cse.c
@@ -528,6 +528,10 @@ struct table_elt
static struct table_elt *table[HASH_SIZE];
+/* Number of elements in the hash table. */
+
+static unsigned int table_size;
+
/* Chain of `struct table_elt's made so far for this function
but currently removed from the table. */
@@ -961,6 +965,8 @@ new_basic_block (void)
}
}
+ table_size = 0;
+
#ifdef HAVE_cc0
prev_insn = 0;
prev_insn_cc0 = 0;
@@ -1371,6 +1377,8 @@ remove_from_table (struct table_elt *elt, unsigned int hash)
/* Now add it to the free element chain. */
elt->next_same_hash = free_element_chain;
free_element_chain = elt;
+
+ table_size--;
}
/* Look up X in the hash table and return its table element,
@@ -1648,6 +1656,8 @@ insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mo
}
}
+ table_size++;
+
return elt;
}
@@ -3432,10 +3442,10 @@ fold_rtx_subreg (rtx x, rtx insn)
return x;
}
-/* Fold MEM. */
+/* Fold MEM. Not to be called directly, see fold_rtx_mem instead. */
static rtx
-fold_rtx_mem (rtx x, rtx insn)
+fold_rtx_mem_1 (rtx x, rtx insn)
{
enum machine_mode mode = GET_MODE (x);
rtx new;
@@ -3598,6 +3608,51 @@ fold_rtx_mem (rtx x, rtx insn)
}
}
+/* Fold MEM. */
+
+static rtx
+fold_rtx_mem (rtx x, rtx insn)
+{
+ /* To avoid infinite oscillations between fold_rtx and fold_rtx_mem,
+ refuse to allow recursion of the latter past n levels. This can
+ happen because fold_rtx_mem will try to fold the address of the
+ memory reference it is passed, i.e. conceptually throwing away
+ the MEM and reinjecting the bare address into fold_rtx. As a
+ result, patterns like
+
+ set (reg1)
+ (plus (reg)
+ (mem (plus (reg2) (const_int))))
+
+ set (reg2)
+ (plus (reg)
+ (mem (plus (reg1) (const_int))))
+
+ will defeat any "first-order" short-circuit put in either
+ function to prevent these infinite oscillations.
+
+ The heuristics for determining n is as follows: since each time
+ it is invoked fold_rtx_mem throws away a MEM, and since MEMs
+ are generically not nested, we assume that each invocation of
+ fold_rtx_mem corresponds to a new "top-level" operand, i.e.
+ the source or the destination of a SET. So fold_rtx_mem is
+ bound to stop or cycle before n recursions, n being the number
+ of expressions recorded in the hash table. We also leave some
+ play to account for the initial steps. */
+
+ static unsigned int depth;
+ rtx ret;
+
+ if (depth > 3 + table_size)
+ return x;
+
+ depth++;
+ ret = fold_rtx_mem_1 (x, insn);
+ depth--;
+
+ return ret;
+}
+
/* If X is a nontrivial arithmetic operation on an argument
for which a constant value can be determined, return
the result of operating on that value, as a constant.
@@ -4262,10 +4317,8 @@ fold_rtx (rtx x, rtx insn)
{
int is_shift
= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
- rtx y = lookup_as_function (folded_arg0, code);
- rtx inner_const;
+ rtx y, inner_const, new_const;
enum rtx_code associate_code;
- rtx new_const;
if (is_shift
&& (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode)
@@ -4278,11 +4331,9 @@ fold_rtx (rtx x, rtx insn)
break;
}
+ y = lookup_as_function (folded_arg0, code);
if (y == 0)
break;
- inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
- if (!inner_const || GET_CODE (inner_const) != CONST_INT)
- break;
/* If we have compiled a statement like
"if (x == (x & mask1))", and now are looking at
@@ -4292,6 +4343,10 @@ fold_rtx (rtx x, rtx insn)
if (XEXP (y, 0) == folded_arg0)
break;
+ inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
+ if (!inner_const || GET_CODE (inner_const) != CONST_INT)
+ break;
+
/* Don't associate these operations if they are a PLUS with the
same constant and it is a power of two. These might be doable
with a pre- or post-increment. Similarly for two subtracts of