summaryrefslogtreecommitdiff
path: root/gcc/gcse.c
diff options
context:
space:
mode:
authorsteven <steven@138bc75d-0d04-0410-961f-82ee72b054a4>2004-08-18 20:53:59 +0000
committersteven <steven@138bc75d-0d04-0410-961f-82ee72b054a4>2004-08-18 20:53:59 +0000
commit78d140c990f84aa03c72b5f074683bf3f7b6307f (patch)
tree93b7857ed347181c4c208adbe8b5fdd8d2f48e15 /gcc/gcse.c
parent1c46afb0bec4da2209f3dcbb5e8c64172b9e0570 (diff)
downloadgcc-78d140c990f84aa03c72b5f074683bf3f7b6307f.tar.gz
* Makefile.in (OBJS-common): Add postreload-gcse.c.
Add new postreload-gcse.o. * cse.c (SAFE_HASH): Define as wrapper around safe_hash. (lookup_as_function, insert, rehash_using_reg, use_related_value, equiv_constant): Use SAFE_HASH instead of safe_hash. (exp_equiv_p): Export. Add for_gcse argument when comparing for GCSE. (lookup, lookup_for_remove, merge_equiv_classes, find_best_addr, find_comparison_args, fold_rtx, cse_insn): Update callers. (hash_rtx): New function derived from old canon_hash and bits from gcse.c hash_expr_1. (canon_hash_string): Rename to hash_rtx_string. (canon_hash, safe_hash): Make static inline. Call hash_rtx. * cselib.c (hash_rtx): Rename to cselib_hash_rtx. (cselib_lookup): Update this caller. * gcse.c (modify_mem_list_set, canon_modify_mem_list_set): Make static. (hash_expr): Call hash_rtx. (ldst_entry): Likewise. (expr_equiv_p): Call exp_equiv_p. (struct unoccr, hash_expr_1, hash_string_1, lookup_expr, reg_used_on_edge, reg_set_between_after_reload_p, reg_used_between_after_reload_p, get_avail_load_store_reg, is_jump_table_basic_block, bb_has_well_behaved_predecessors, get_bb_avail_insn, hash_scan_set_after_reload, compute_hash_table_after_reload, eliminate_partially_redundant_loads, gcse_after_reload, get_bb_avail_insn, gcse_after_reload_main): Remove. * postreload-gcse.c: New file, reincarnating most of the above. * rtl.h (exp_equiv_p, hash_rtx): New prototypes. (gcse_after_reload_main): Update prototype. * timevar.def (TV_GCSE_AFTER_RELOAD): New timevar. * passes.c (rest_of_handle_gcse2): Use it. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@86206 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/gcse.c')
-rw-r--r--gcc/gcse.c1028
1 files changed, 10 insertions, 1018 deletions
diff --git a/gcc/gcse.c b/gcc/gcse.c
index b9a7874c348..16d76fe4d6c 100644
--- a/gcc/gcse.c
+++ b/gcc/gcse.c
@@ -495,11 +495,12 @@ static sbitmap *reg_set_in_block;
/* Array, indexed by basic block number for a list of insns which modify
memory within that block. */
static rtx * modify_mem_list;
-bitmap modify_mem_list_set;
+static bitmap modify_mem_list_set;
/* This array parallels modify_mem_list, but is kept canonicalized. */
static rtx * canon_modify_mem_list;
-bitmap canon_modify_mem_list_set;
+static bitmap canon_modify_mem_list_set;
+
/* Various variables for statistics gathering. */
/* Memory used in a pass.
@@ -564,8 +565,6 @@ static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
struct hash_table *);
static void insert_set_in_table (rtx, rtx, struct hash_table *);
static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
-static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
-static unsigned int hash_string_1 (const char *);
static unsigned int hash_set (int, int);
static int expr_equiv_p (rtx, rtx);
static void record_last_reg_set_info (rtx, int);
@@ -576,7 +575,6 @@ static void alloc_hash_table (int, struct hash_table *, int);
static void free_hash_table (struct hash_table *);
static void compute_hash_table_work (struct hash_table *);
static void dump_hash_table (FILE *, const char *, struct hash_table *);
-static struct expr *lookup_expr (rtx, struct hash_table *);
static struct expr *lookup_set (unsigned int, struct hash_table *);
static struct expr *next_set (unsigned int, struct expr *);
static void reset_opr_set_tables (void);
@@ -1462,9 +1460,7 @@ oprs_available_p (rtx x, rtx insn)
MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
indicating if a volatile operand is found or if the expression contains
something we don't want to insert in the table. HASH_TABLE_SIZE is
- the current size of the hash table to be probed.
-
- ??? One might want to merge this with canon_hash. Later. */
+ the current size of the hash table to be probed. */
static unsigned int
hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
@@ -1474,208 +1470,11 @@ hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
*do_not_record_p = 0;
- hash = hash_expr_1 (x, mode, do_not_record_p);
+ hash = hash_rtx (x, mode, do_not_record_p,
+ NULL, /*have_reg_qty=*/false);
return hash % hash_table_size;
}
-/* Hash a string. Just add its bytes up. */
-
-static inline unsigned
-hash_string_1 (const char *ps)
-{
- unsigned hash = 0;
- const unsigned char *p = (const unsigned char *) ps;
-
- if (p)
- while (*p)
- hash += *p++;
-
- return hash;
-}
-
-/* Subroutine of hash_expr to do the actual work. */
-
-static unsigned int
-hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
-{
- int i, j;
- unsigned hash = 0;
- enum rtx_code code;
- const char *fmt;
-
- if (x == 0)
- return hash;
-
- /* Used to turn recursion into iteration. We can't rely on GCC's
- tail-recursion elimination since we need to keep accumulating values
- in HASH. */
- repeat:
-
- code = GET_CODE (x);
- switch (code)
- {
- case REG:
- hash += ((unsigned int) REG << 7) + REGNO (x);
- return hash;
-
- case CONST_INT:
- hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
- + (unsigned int) INTVAL (x));
- return hash;
-
- case CONST_DOUBLE:
- /* This is like the general case, except that it only counts
- the integers representing the constant. */
- hash += (unsigned int) code + (unsigned int) GET_MODE (x);
- if (GET_MODE (x) != VOIDmode)
- for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
- hash += (unsigned int) XWINT (x, i);
- else
- hash += ((unsigned int) CONST_DOUBLE_LOW (x)
- + (unsigned int) CONST_DOUBLE_HIGH (x));
- return hash;
-
- case CONST_VECTOR:
- {
- int units;
- rtx elt;
-
- units = CONST_VECTOR_NUNITS (x);
-
- for (i = 0; i < units; ++i)
- {
- elt = CONST_VECTOR_ELT (x, i);
- hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
- }
-
- return hash;
- }
-
- /* Assume there is only one rtx object for any given label. */
- case LABEL_REF:
- /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
- differences and differences between each stage's debugging dumps. */
- hash += (((unsigned int) LABEL_REF << 7)
- + CODE_LABEL_NUMBER (XEXP (x, 0)));
- return hash;
-
- case SYMBOL_REF:
- {
- /* Don't hash on the symbol's address to avoid bootstrap differences.
- Different hash values may cause expressions to be recorded in
- different orders and thus different registers to be used in the
- final assembler. This also avoids differences in the dump files
- between various stages. */
- unsigned int h = 0;
- const unsigned char *p = (const unsigned char *) XSTR (x, 0);
-
- while (*p)
- h += (h << 7) + *p++; /* ??? revisit */
-
- hash += ((unsigned int) SYMBOL_REF << 7) + h;
- return hash;
- }
-
- case MEM:
- if (MEM_VOLATILE_P (x))
- {
- *do_not_record_p = 1;
- return 0;
- }
-
- hash += (unsigned int) MEM;
- /* We used alias set for hashing, but this is not good, since the alias
- set may differ in -fprofile-arcs and -fbranch-probabilities compilation
- causing the profiles to fail to match. */
- x = XEXP (x, 0);
- goto repeat;
-
- case PRE_DEC:
- case PRE_INC:
- case POST_DEC:
- case POST_INC:
- case PC:
- case CC0:
- case CALL:
- case UNSPEC_VOLATILE:
- *do_not_record_p = 1;
- return 0;
-
- case ASM_OPERANDS:
- if (MEM_VOLATILE_P (x))
- {
- *do_not_record_p = 1;
- return 0;
- }
- else
- {
- /* We don't want to take the filename and line into account. */
- hash += (unsigned) code + (unsigned) GET_MODE (x)
- + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
- + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
- + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
-
- if (ASM_OPERANDS_INPUT_LENGTH (x))
- {
- for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
- {
- hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
- GET_MODE (ASM_OPERANDS_INPUT (x, i)),
- do_not_record_p)
- + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
- (x, i)));
- }
-
- hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
- x = ASM_OPERANDS_INPUT (x, 0);
- mode = GET_MODE (x);
- goto repeat;
- }
- return hash;
- }
-
- default:
- break;
- }
-
- hash += (unsigned) code + (unsigned) GET_MODE (x);
- for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
- {
- if (fmt[i] == 'e')
- {
- /* If we are about to do the last recursive call
- needed at this level, change it into iteration.
- This function is called enough to be worth it. */
- if (i == 0)
- {
- x = XEXP (x, i);
- goto repeat;
- }
-
- hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
- if (*do_not_record_p)
- return 0;
- }
-
- else if (fmt[i] == 'E')
- for (j = 0; j < XVECLEN (x, i); j++)
- {
- hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
- if (*do_not_record_p)
- return 0;
- }
-
- else if (fmt[i] == 's')
- hash += hash_string_1 (XSTR (x, i));
- else if (fmt[i] == 'i')
- hash += (unsigned int) XINT (x, i);
- else
- abort ();
- }
-
- return hash;
-}
-
/* Hash a set of register REGNO.
Sets are hashed on the register that is set. This simplifies the PRE copy
@@ -1692,148 +1491,12 @@ hash_set (int regno, int hash_table_size)
return hash % hash_table_size;
}
-/* Return nonzero if exp1 is equivalent to exp2.
- ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
+/* Return nonzero if exp1 is equivalent to exp2. */
static int
expr_equiv_p (rtx x, rtx y)
{
- int i, j;
- enum rtx_code code;
- const char *fmt;
-
- if (x == y)
- return 1;
-
- if (x == 0 || y == 0)
- return 0;
-
- code = GET_CODE (x);
- if (code != GET_CODE (y))
- return 0;
-
- /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
- if (GET_MODE (x) != GET_MODE (y))
- return 0;
-
- switch (code)
- {
- case PC:
- case CC0:
- case CONST_INT:
- return 0;
-
- case LABEL_REF:
- return XEXP (x, 0) == XEXP (y, 0);
-
- case SYMBOL_REF:
- return XSTR (x, 0) == XSTR (y, 0);
-
- case REG:
- return REGNO (x) == REGNO (y);
-
- case MEM:
- /* Can't merge two expressions in different alias sets, since we can
- decide that the expression is transparent in a block when it isn't,
- due to it being set with the different alias set. */
- if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
- return 0;
-
- /* A volatile mem should not be considered equivalent to any other. */
- if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
- return 0;
- break;
-
- /* For commutative operations, check both orders. */
- case PLUS:
- case MULT:
- case AND:
- case IOR:
- case XOR:
- case NE:
- case EQ:
- return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
- && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
- || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
- && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
-
- case ASM_OPERANDS:
- /* We don't use the generic code below because we want to
- disregard filename and line numbers. */
-
- /* A volatile asm isn't equivalent to any other. */
- if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
- return 0;
-
- if (GET_MODE (x) != GET_MODE (y)
- || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
- || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
- ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
- || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
- || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
- return 0;
-
- if (ASM_OPERANDS_INPUT_LENGTH (x))
- {
- for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
- if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
- ASM_OPERANDS_INPUT (y, i))
- || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
- ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
- return 0;
- }
-
- return 1;
-
- default:
- break;
- }
-
- /* Compare the elements. If any pair of corresponding elements
- fail to match, return 0 for the whole thing. */
-
- fmt = GET_RTX_FORMAT (code);
- for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
- {
- switch (fmt[i])
- {
- case 'e':
- if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
- return 0;
- break;
-
- case 'E':
- if (XVECLEN (x, i) != XVECLEN (y, i))
- return 0;
- for (j = 0; j < XVECLEN (x, i); j++)
- if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
- return 0;
- break;
-
- case 's':
- if (strcmp (XSTR (x, i), XSTR (y, i)))
- return 0;
- break;
-
- case 'i':
- if (XINT (x, i) != XINT (y, i))
- return 0;
- break;
-
- case 'w':
- if (XWINT (x, i) != XWINT (y, i))
- return 0;
- break;
-
- case '0':
- break;
-
- default:
- abort ();
- }
- }
-
- return 1;
+ return exp_equiv_p (x, y, 0, true);
}
/* Insert expression X in INSN in the hash TABLE.
@@ -2556,28 +2219,6 @@ compute_hash_table (struct hash_table *table)
/* Expression tracking support. */
-/* Lookup pattern PAT in the expression TABLE.
- The result is a pointer to the table entry, or NULL if not found. */
-
-static struct expr *
-lookup_expr (rtx pat, struct hash_table *table)
-{
- int do_not_record_p;
- unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
- table->size);
- struct expr *expr;
-
- if (do_not_record_p)
- return NULL;
-
- expr = table->table[hash];
-
- while (expr && ! expr_equiv_p (expr->expr, pat))
- expr = expr->next_same_hash;
-
- return expr;
-}
-
/* Lookup REGNO in the set TABLE. The result is a pointer to the
table entry, or NULL if not found. */
@@ -5426,7 +5067,8 @@ ldst_entry (rtx x)
struct ls_expr * ptr;
unsigned int hash;
- hash = hash_expr_1 (x, GET_MODE (x), & do_not_record_p);
+ hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
+ NULL, /*have_reg_qty=*/false);
for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
@@ -6945,654 +6587,4 @@ is_too_expensive (const char *pass)
return false;
}
-/* The following code implements gcse after reload, the purpose of this
- pass is to cleanup redundant loads generated by reload and other
- optimizations that come after gcse. It searches for simple inter-block
- redundancies and tries to eliminate them by adding moves and loads
- in cold places. */
-
-/* The following structure holds the information about the occurrences of
- the redundant instructions. */
-struct unoccr
-{
- struct unoccr *next;
- edge pred;
- rtx insn;
-};
-
-static bool reg_used_on_edge (rtx, edge);
-static rtx reg_set_between_after_reload_p (rtx, rtx, rtx);
-static rtx reg_used_between_after_reload_p (rtx, rtx, rtx);
-static rtx get_avail_load_store_reg (rtx);
-static bool is_jump_table_basic_block (basic_block);
-static bool bb_has_well_behaved_predecessors (basic_block);
-static struct occr* get_bb_avail_insn (basic_block, struct occr *);
-static void hash_scan_set_after_reload (rtx, rtx, struct hash_table *);
-static void compute_hash_table_after_reload (struct hash_table *);
-static void eliminate_partially_redundant_loads (basic_block,
- rtx,
- struct expr *);
-static void gcse_after_reload (void);
-static struct occr* get_bb_avail_insn (basic_block, struct occr *);
-void gcse_after_reload_main (rtx, FILE *);
-
-
-/* Check if register REG is used in any insn waiting to be inserted on E.
- Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
- with PREV(insn),NEXT(insn) instead of calling
- reg_overlap_mentioned_p. */
-
-static bool
-reg_used_on_edge (rtx reg, edge e)
-{
- rtx insn;
-
- for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
- if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
- return true;
-
- return false;
-}
-
-/* Return the insn that sets register REG or clobbers it in between
- FROM_INSN and TO_INSN (exclusive of those two).
- Just like reg_set_between but for hard registers and not pseudos. */
-
-static rtx
-reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
-{
- rtx insn;
- int regno;
-
- if (! REG_P (reg))
- abort ();
- regno = REGNO (reg);
-
- /* We are called after register allocation. */
- if (regno >= FIRST_PSEUDO_REGISTER)
- abort ();
-
- if (from_insn == to_insn)
- return NULL_RTX;
-
- for (insn = NEXT_INSN (from_insn);
- insn != to_insn;
- insn = NEXT_INSN (insn))
- {
- if (INSN_P (insn))
- {
- if (FIND_REG_INC_NOTE (insn, reg)
- || (CALL_P (insn)
- && call_used_regs[regno])
- || find_reg_fusage (insn, CLOBBER, reg))
- return insn;
- }
- if (set_of (reg, insn) != NULL_RTX)
- return insn;
- }
- return NULL_RTX;
-}
-
-/* Return the insn that uses register REG in between FROM_INSN and TO_INSN
- (exclusive of those two). Similar to reg_used_between but for hard
- registers and not pseudos. */
-
-static rtx
-reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
-{
- rtx insn;
- int regno;
-
- if (! REG_P (reg))
- return to_insn;
- regno = REGNO (reg);
-
- /* We are called after register allocation. */
- if (regno >= FIRST_PSEUDO_REGISTER)
- abort ();
- if (from_insn == to_insn)
- return NULL_RTX;
-
- for (insn = NEXT_INSN (from_insn);
- insn != to_insn;
- insn = NEXT_INSN (insn))
- if (INSN_P (insn)
- && (reg_overlap_mentioned_p (reg, PATTERN (insn))
- || (CALL_P (insn)
- && call_used_regs[regno])
- || find_reg_fusage (insn, USE, reg)
- || find_reg_fusage (insn, CLOBBER, reg)))
- return insn;
- return NULL_RTX;
-}
-
-/* Return the loaded/stored register of a load/store instruction. */
-
-static rtx
-get_avail_load_store_reg (rtx insn)
-{
- if (REG_P (SET_DEST (PATTERN (insn)))) /* A load. */
- return SET_DEST(PATTERN(insn));
- if (REG_P (SET_SRC (PATTERN (insn)))) /* A store. */
- return SET_SRC (PATTERN (insn));
- abort ();
-}
-
-/* Don't handle ABNORMAL edges or jump tables. */
-
-static bool
-is_jump_table_basic_block (basic_block bb)
-{
- rtx insn = BB_END (bb);
-
- if (JUMP_TABLE_DATA_P (insn))
- return true;
- return false;
-}
-
-/* Return nonzero if the predecessors of BB are "well behaved". */
-
-static bool
-bb_has_well_behaved_predecessors (basic_block bb)
-{
- edge pred;
-
- if (! bb->pred)
- return false;
- for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
- if (((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
- || is_jump_table_basic_block (pred->src))
- return false;
- return true;
-}
-
-
-/* Search for the occurrences of expression in BB. */
-
-static struct occr*
-get_bb_avail_insn (basic_block bb, struct occr *occr)
-{
- for (; occr != NULL; occr = occr->next)
- if (BLOCK_FOR_INSN (occr->insn)->index == bb->index)
- return occr;
- return NULL;
-}
-
-/* Perform partial GCSE pass after reload, try to eliminate redundant loads
- created by the reload pass. We try to look for a full or partial
- redundant loads fed by one or more loads/stores in predecessor BBs,
- and try adding loads to make them fully redundant. We also check if
- it's worth adding loads to be able to delete the redundant load.
-
- Algorithm:
- 1. Build available expressions hash table:
- For each load/store instruction, if the loaded/stored memory didn't
- change until the end of the basic block add this memory expression to
- the hash table.
- 2. Perform Redundancy elimination:
- For each load instruction do the following:
- perform partial redundancy elimination, check if it's worth adding
- loads to make the load fully redundant. If so add loads and
- register copies and delete the load.
-
- Future enhancement:
- if loaded register is used/defined between load and some store,
- look for some other free register between load and all its stores,
- and replace load with a copy from this register to the loaded
- register. */
-
-
-/* This handles the case where several stores feed a partially redundant
- load. It checks if the redundancy elimination is possible and if it's
- worth it. */
-
-static void
-eliminate_partially_redundant_loads (basic_block bb, rtx insn,
- struct expr *expr)
-{
- edge pred;
- rtx avail_insn = NULL_RTX;
- rtx avail_reg;
- rtx dest, pat;
- struct occr *a_occr;
- struct unoccr *occr, *avail_occrs = NULL;
- struct unoccr *unoccr, *unavail_occrs = NULL;
- int npred_ok = 0;
- gcov_type ok_count = 0; /* Redundant load execution count. */
- gcov_type critical_count = 0; /* Execution count of critical edges. */
-
- /* The execution count of the loads to be added to make the
- load fully redundant. */
- gcov_type not_ok_count = 0;
- basic_block pred_bb;
-
- pat = PATTERN (insn);
- dest = SET_DEST (pat);
- /* Check that the loaded register is not used, set, or killed from the
- beginning of the block. */
- if (reg_used_between_after_reload_p (dest,
- PREV_INSN (BB_HEAD (bb)), insn)
- || reg_set_between_after_reload_p (dest,
- PREV_INSN (BB_HEAD (bb)), insn))
- return;
-
- /* Check potential for replacing load with copy for predecessors. */
- for (pred = bb->pred; pred; pred = pred->pred_next)
- {
- rtx next_pred_bb_end;
-
- avail_insn = NULL_RTX;
- pred_bb = pred->src;
- next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
- for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
- a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
- {
- /* Check if the loaded register is not used. */
- avail_insn = a_occr->insn;
- if (! (avail_reg = get_avail_load_store_reg (avail_insn)))
- abort ();
- /* Make sure we can generate a move from register avail_reg to
- dest. */
- extract_insn (gen_move_insn (copy_rtx (dest),
- copy_rtx (avail_reg)));
- if (! constrain_operands (1)
- || reg_killed_on_edge (avail_reg, pred)
- || reg_used_on_edge (dest, pred))
- {
- avail_insn = NULL;
- continue;
- }
- if (! reg_set_between_after_reload_p (avail_reg, avail_insn,
- next_pred_bb_end))
- /* AVAIL_INSN remains non-null. */
- break;
- else
- avail_insn = NULL;
- }
- if (avail_insn != NULL_RTX)
- {
- npred_ok++;
- ok_count += pred->count;
- if (EDGE_CRITICAL_P (pred))
- critical_count += pred->count;
- occr = gmalloc (sizeof (struct unoccr));
- occr->insn = avail_insn;
- occr->pred = pred;
- occr->next = avail_occrs;
- avail_occrs = occr;
- }
- else
- {
- not_ok_count += pred->count;
- if (EDGE_CRITICAL_P (pred))
- critical_count += pred->count;
- unoccr = gmalloc (sizeof (struct unoccr));
- unoccr->insn = NULL_RTX;
- unoccr->pred = pred;
- unoccr->next = unavail_occrs;
- unavail_occrs = unoccr;
- }
- }
-
- if (npred_ok == 0 /* No load can be replaced by copy. */
- || (optimize_size && npred_ok > 1)) /* Prevent exploding the code. */
- goto cleanup;
-
- /* Check if it's worth applying the partial redundancy elimination. */
- if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
- goto cleanup;
-
- if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
- goto cleanup;
-
- /* Generate moves to the loaded register from where
- the memory is available. */
- for (occr = avail_occrs; occr; occr = occr->next)
- {
- avail_insn = occr->insn;
- pred = occr->pred;
- /* Set avail_reg to be the register having the value of the
- memory. */
- avail_reg = get_avail_load_store_reg (avail_insn);
- if (! avail_reg)
- abort ();
-
- insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
- copy_rtx (avail_reg)),
- pred);
-
- if (gcse_file)
- fprintf (gcse_file,
- "GCSE AFTER reload generating move from %d to %d on \
- edge from %d to %d\n",
- REGNO (avail_reg),
- REGNO (dest),
- pred->src->index,
- pred->dest->index);
- }
-
- /* Regenerate loads where the memory is unavailable. */
- for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
- {
- pred = unoccr->pred;
- insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
-
- if (gcse_file)
- fprintf (gcse_file,
- "GCSE AFTER reload: generating on edge from %d to %d\
- a copy of load:\n",
- pred->src->index,
- pred->dest->index);
- }
-
- /* Delete the insn if it is not available in this block and mark it
- for deletion if it is available. If insn is available it may help
- discover additional redundancies, so mark it for later deletion.*/
- for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
- a_occr && (a_occr->insn != insn);
- a_occr = get_bb_avail_insn (bb, a_occr->next));
-
- if (!a_occr)
- delete_insn (insn);
- else
- a_occr->deleted_p = 1;
-
-cleanup:
-
- while (unavail_occrs)
- {
- struct unoccr *temp = unavail_occrs->next;
- free (unavail_occrs);
- unavail_occrs = temp;
- }
-
- while (avail_occrs)
- {
- struct unoccr *temp = avail_occrs->next;
- free (avail_occrs);
- avail_occrs = temp;
- }
-}
-
-/* Performing the redundancy elimination as described before. */
-
-static void
-gcse_after_reload (void)
-{
- unsigned int i;
- rtx insn;
- basic_block bb;
- struct expr *expr;
- struct occr *occr;
-
- /* Note we start at block 1. */
-
- if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
- return;
-
- FOR_BB_BETWEEN (bb,
- ENTRY_BLOCK_PTR->next_bb->next_bb,
- EXIT_BLOCK_PTR,
- next_bb)
- {
- if (! bb_has_well_behaved_predecessors (bb))
- continue;
-
- /* Do not try this optimization on cold basic blocks. */
- if (probably_cold_bb_p (bb))
- continue;
-
- reset_opr_set_tables ();
-
- for (insn = BB_HEAD (bb);
- insn != NULL
- && insn != NEXT_INSN (BB_END (bb));
- insn = NEXT_INSN (insn))
- {
- /* Is it a load - of the form (set (reg) (mem))? */
- if (NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == SET
- && REG_P (SET_DEST (PATTERN (insn)))
- && MEM_P (SET_SRC (PATTERN (insn))))
- {
- rtx pat = PATTERN (insn);
- rtx src = SET_SRC (pat);
- struct expr *expr;
-
- if (general_operand (src, GET_MODE (src))
- /* Is the expression recorded? */
- && (expr = lookup_expr (src, &expr_hash_table)) != NULL
- /* Are the operands unchanged since the start of the
- block? */
- && oprs_not_set_p (src, insn)
- && ! MEM_VOLATILE_P (src)
- && GET_MODE (src) != BLKmode
- && !(flag_non_call_exceptions && may_trap_p (src))
- && !side_effects_p (src))
- {
- /* We now have a load (insn) and an available memory at
- its BB start (expr). Try to remove the loads if it is
- redundant. */
- eliminate_partially_redundant_loads (bb, insn, expr);
- }
- }
-
- /* Keep track of everything modified by this insn. */
- if (INSN_P (insn))
- mark_oprs_set (insn);
- }
- }
-
- commit_edge_insertions ();
-
- /* Go over the expression hash table and delete insns that were
- marked for later deletion. */
- for (i = 0; i < expr_hash_table.size; i++)
- {
- for (expr = expr_hash_table.table[i];
- expr != NULL;
- expr = expr->next_same_hash)
- for (occr = expr->avail_occr; occr; occr = occr->next)
- if (occr->deleted_p)
- delete_insn (occr->insn);
- }
-}
-
-/* Scan pattern PAT of INSN and add an entry to the hash TABLE.
- After reload we are interested in loads/stores only. */
-
-static void
-hash_scan_set_after_reload (rtx pat, rtx insn, struct hash_table *table)
-{
- rtx src = SET_SRC (pat);
- rtx dest = SET_DEST (pat);
-
- if (! MEM_P (src) && ! MEM_P (dest))
- return;
-
- if (REG_P (dest))
- {
- if (/* Don't GCSE something if we can't do a reg/reg copy. */
- can_copy_p (GET_MODE (dest))
- /* GCSE commonly inserts instruction after the insn. We can't
- do that easily for EH_REGION notes so disable GCSE on these
- for now. */
- && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
- /* Is SET_SRC something we want to gcse? */
- && general_operand (src, GET_MODE (src))
- /* Don't CSE a nop. */
- && ! set_noop_p (pat)
- && ! JUMP_P (insn))
- {
- /* An expression is not available if its operands are
- subsequently modified, including this insn. */
- if (oprs_available_p (src, insn))
- insert_expr_in_table (src, GET_MODE (dest), insn, 0, 1, table);
- }
- }
- else if (REG_P (src))
- {
- /* Only record sets of pseudo-regs in the hash table. */
- if (/* Don't GCSE something if we can't do a reg/reg copy. */
- can_copy_p (GET_MODE (src))
- /* GCSE commonly inserts instruction after the insn. We can't
- do that easily for EH_REGION notes so disable GCSE on these
- for now. */
- && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
- /* Is SET_DEST something we want to gcse? */
- && general_operand (dest, GET_MODE (dest))
- /* Don't CSE a nop. */
- && ! set_noop_p (pat)
- &&! JUMP_P (insn)
- && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
- /* Check if the memory expression is killed after insn. */
- && ! load_killed_in_block_p (BLOCK_FOR_INSN (insn),
- INSN_CUID (insn) + 1,
- dest,
- 1)
- && oprs_unchanged_p (XEXP (dest, 0), insn, 1))
- {
- insert_expr_in_table (dest, GET_MODE (dest), insn, 0, 1, table);
- }
- }
-}
-
-
-/* Create hash table of memory expressions available at end of basic
- blocks. */
-
-static void
-compute_hash_table_after_reload (struct hash_table *table)
-{
- unsigned int i;
-
- table->set_p = 0;
-
- /* Initialize count of number of entries in hash table. */
- table->n_elems = 0;
- memset ((char *) table->table, 0,
- table->size * sizeof (struct expr *));
-
- /* While we compute the hash table we also compute a bit array of which
- registers are set in which blocks. */
- sbitmap_vector_zero (reg_set_in_block, last_basic_block);
-
- /* Re-cache any INSN_LIST nodes we have allocated. */
- clear_modify_mem_tables ();
-
- /* Some working arrays used to track first and last set in each block. */
- reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
-
- for (i = 0; i < max_gcse_regno; ++i)
- reg_avail_info[i].last_bb = NULL;
-
- FOR_EACH_BB (current_bb)
- {
- rtx insn;
- unsigned int regno;
-
- /* First pass over the instructions records information used to
- determine when registers and memory are first and last set. */
- for (insn = BB_HEAD (current_bb);
- insn && insn != NEXT_INSN (BB_END (current_bb));
- insn = NEXT_INSN (insn))
- {
- if (! INSN_P (insn))
- continue;
-
- if (CALL_P (insn))
- {
- bool clobbers_all = false;
-
-#ifdef NON_SAVING_SETJMP
- if (NON_SAVING_SETJMP
- && find_reg_note (insn, REG_SETJMP, NULL_RTX))
- clobbers_all = true;
-#endif
-
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if (clobbers_all
- || TEST_HARD_REG_BIT (regs_invalidated_by_call,
- regno))
- record_last_reg_set_info (insn, regno);
-
- mark_call (insn);
- }
-
- note_stores (PATTERN (insn), record_last_set_info, insn);
-
- if (GET_CODE (PATTERN (insn)) == SET)
- {
- rtx src, dest;
-
- src = SET_SRC (PATTERN (insn));
- dest = SET_DEST (PATTERN (insn));
- if (MEM_P (src) && auto_inc_p (XEXP (src, 0)))
- {
- regno = REGNO (XEXP (XEXP (src, 0), 0));
- record_last_reg_set_info (insn, regno);
- }
- if (MEM_P (dest) && auto_inc_p (XEXP (dest, 0)))
- {
- regno = REGNO (XEXP (XEXP (dest, 0), 0));
- record_last_reg_set_info (insn, regno);
- }
- }
- }
-
- /* The next pass builds the hash table. */
- for (insn = BB_HEAD (current_bb);
- insn && insn != NEXT_INSN (BB_END (current_bb));
- insn = NEXT_INSN (insn))
- if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
- if (! find_reg_note (insn, REG_LIBCALL, NULL_RTX))
- hash_scan_set_after_reload (PATTERN (insn), insn, table);
- }
-
- free (reg_avail_info);
- reg_avail_info = NULL;
-}
-
-
-/* Main entry point of the GCSE after reload - clean some redundant loads
- due to spilling. */
-
-void
-gcse_after_reload_main (rtx f, FILE* file)
-{
- gcse_subst_count = 0;
- gcse_create_count = 0;
-
- gcse_file = file;
-
- gcc_obstack_init (&gcse_obstack);
- bytes_used = 0;
-
- /* We need alias. */
- init_alias_analysis ();
-
- max_gcse_regno = max_reg_num ();
-
- alloc_reg_set_mem (max_gcse_regno);
- alloc_gcse_mem (f);
- alloc_hash_table (max_cuid, &expr_hash_table, 0);
- compute_hash_table_after_reload (&expr_hash_table);
-
- if (gcse_file)
- dump_hash_table (gcse_file, "Expression", &expr_hash_table);
-
- if (expr_hash_table.n_elems > 0)
- gcse_after_reload ();
-
- free_hash_table (&expr_hash_table);
-
- free_gcse_mem ();
- free_reg_set_mem ();
-
- /* We are finished with alias. */
- end_alias_analysis ();
-
- obstack_free (&gcse_obstack, NULL);
-}
-
#include "gt-gcse.h"