summaryrefslogtreecommitdiff
path: root/gcc/gcse.c
diff options
context:
space:
mode:
authoraj <aj@138bc75d-0d04-0410-961f-82ee72b054a4>2004-06-12 13:17:39 +0000
committeraj <aj@138bc75d-0d04-0410-961f-82ee72b054a4>2004-06-12 13:17:39 +0000
commitb9f02dbb0382be4a33401a285a767517968eaadd (patch)
treea4c4741e36a0cc8d87150d30f2d4237dd22150b9 /gcc/gcse.c
parent1e157ad48f19395b0f8cf25b2ef4ea710aa2ccfd (diff)
downloadgcc-b9f02dbb0382be4a33401a285a767517968eaadd.tar.gz
* gcse.c (record_set_info): Use predicates like REG_P.
(mems_conflict_for_gcse_p): Likewise. (load_killed_in_block_p): Likewise. (hash_expr_1): Likewise. (insert_set_in_table): Likewise. (gcse_constant_p): Likewise. (hash_scan_set): Likewise. (hash_scan_insn): Likewise. (canon_list_insert): Likewise. (record_last_mem_set_info): Likewise. (record_last_set_info): Likewise. (compute_hash_table_work): Likewise. (mark_set): Likewise. (mark_clobber): Likewise. (mark_oprs_set): Likewise. (compute_transp): Likewise. (find_avail_set): Likewise. (cprop_insn): Likewise. (do_local_cprop): Likewise. (cprop): Likewise. (find_implicit_sets): Likewise. (find_bypass_set): Likewise. (bypass_conditional_jumps): Likewise. (insert_insn_end_bb): Likewise. (pre_insert_copy_insn): Likewise. (compute_transpout): Likewise. (next_ls_expr): Likewise. (invalidate_any_buried_refs): Likewise. (compute_ld_motion_mems): Likewise. (reg_set_info): Likewise. (reg_clear_last_set): Likewise. (find_moveable_store): Likewise. (compute_store_table): Likewise. (find_loads): Likewise. (store_killed_in_insn): Likewise. (insert_insn_start_bb): Likewise. (reg_set_between_after_reload_p): Likewise. (reg_used_between_after_reload_p): Likewise. (is_jump_table_basic_block): Likewise. (gcse_after_reload): Likewise. (hash_scan_set_after_reload): Likewise. (compute_hash_table_after_reload): Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@83026 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/gcse.c')
-rw-r--r--gcc/gcse.c185
1 files changed, 91 insertions, 94 deletions
diff --git a/gcc/gcse.c b/gcc/gcse.c
index b6d0a6b4d4a..20baed8a9ef 100644
--- a/gcc/gcse.c
+++ b/gcc/gcse.c
@@ -232,8 +232,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
substitutions.
PRE is quite expensive in complicated functions because the DFA can take
- awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
- be modified if one wants to experiment.
+ a while to converge. Hence we only perform one pass. The parameter
+ max-gcse-passes can be modified if one wants to experiment.
**********************
@@ -288,7 +288,6 @@ static FILE *gcse_file;
* If we changed any jumps via cprop.
* If we added any labels via edge splitting. */
-
static int run_jump_opt_after_gcse;
/* Bitmaps are normally not included in debugging dumps.
@@ -707,7 +706,7 @@ gcse_main (rtx f, FILE *file)
/* Return if there's nothing to do, or it is too expensive. */
if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
return 0;
-
+
gcc_obstack_init (&gcse_obstack);
bytes_used = 0;
@@ -822,6 +821,7 @@ gcse_main (rtx f, FILE *file)
obstack_free (&gcse_obstack, NULL);
free_reg_set_mem ();
+
/* We are finished with alias. */
end_alias_analysis ();
allocate_reg_info (max_reg_num (), FALSE, FALSE);
@@ -1013,7 +1013,8 @@ free_gcse_mem (void)
ABSALTERED. */
static void
-compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
+compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
+ struct hash_table *table)
{
unsigned int i;
@@ -1154,7 +1155,7 @@ record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
{
rtx record_set_insn = (rtx) data;
- if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
+ if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
record_one_set (REGNO (dest), record_set_insn);
}
@@ -1355,7 +1356,7 @@ mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
/* If DEST is not a MEM, then it will not conflict with the load. Note
that function calls are assumed to clobber memory, but are handled
elsewhere. */
- if (GET_CODE (dest) != MEM)
+ if (! MEM_P (dest))
return;
/* If we are setting a MEM in our list of specially recognized MEMs,
@@ -1403,7 +1404,7 @@ load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
/* If SETTER is a call everything is clobbered. Note that calls
to pure functions are never put on the list, so we need not
worry about them. */
- if (GET_CODE (setter) == CALL_INSN)
+ if (CALL_P (setter))
return 1;
/* SETTER must be an INSN of some kind that sets memory. Call
@@ -1485,14 +1486,14 @@ hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
enum rtx_code code;
const char *fmt;
- /* Used to turn recursion into iteration. We can't rely on GCC's
- tail-recursion elimination since we need to keep accumulating values
- in HASH. */
-
if (x == 0)
return hash;
+ /* Used to turn recursion into iteration. We can't rely on GCC's
+ tail-recursion elimination since we need to keep accumulating values
+ in HASH. */
repeat:
+
code = GET_CODE (x);
switch (code)
{
@@ -1964,7 +1965,7 @@ insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
struct occr *cur_occr, *last_occr = NULL;
if (GET_CODE (x) != SET
- || GET_CODE (SET_DEST (x)) != REG)
+ || ! REG_P (SET_DEST (x)))
abort ();
hash = hash_set (REGNO (SET_DEST (x)), table->size);
@@ -2048,12 +2049,10 @@ gcse_constant_p (rtx x)
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
return true;
-
/* Consider a COMPARE of the same registers is a constant
- if they are not floating point registers. */
+ if they are not floating point registers. */
if (GET_CODE(x) == COMPARE
- && GET_CODE (XEXP (x, 0)) == REG
- && GET_CODE (XEXP (x, 1)) == REG
+ && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
&& REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
&& ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
&& ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
@@ -2072,10 +2071,10 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
rtx dest = SET_DEST (pat);
rtx note;
- if (GET_CODE (src) == CALL)
+ if (CALL_P (src))
hash_scan_call (src, insn, table);
- else if (GET_CODE (dest) == REG)
+ else if (REG_P (dest))
{
unsigned int regno = REGNO (dest);
rtx tmp;
@@ -2105,7 +2104,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
explicitly, it means address of parameter has been taken,
so we should not extend the lifetime of the pseudo. */
&& ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
- || GET_CODE (XEXP (note, 0)) != MEM))
+ || ! MEM_P (XEXP (note, 0))))
{
/* An expression is not anticipatable if its operands are
modified before this insn or if this is not the only SET in
@@ -2124,7 +2123,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
/* Record sets for constant/copy propagation. */
else if (table->set_p
&& regno >= FIRST_PSEUDO_REGISTER
- && ((GET_CODE (src) == REG
+ && ((REG_P (src)
&& REGNO (src) >= FIRST_PSEUDO_REGISTER
&& can_copy_p (GET_MODE (dest))
&& REGNO (src) != regno)
@@ -2140,7 +2139,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
/* In case of store we want to consider the memory value as available in
the REG stored in that memory. This makes it possible to remove
redundant loads from due to stores to the same location. */
- else if (flag_gcse_las && GET_CODE (src) == REG && GET_CODE (dest) == MEM)
+ else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
{
unsigned int regno = REGNO (src);
@@ -2164,7 +2163,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
explicitly, it means address of parameter has been taken,
so we should not extend the lifetime of the pseudo. */
&& ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
- || GET_CODE (XEXP (note, 0)) != MEM))
+ || ! MEM_P (XEXP (note, 0))))
{
/* Stores are never anticipatable. */
int antic_p = 0;
@@ -2232,13 +2231,13 @@ hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
hash_scan_set (x, insn, table);
else if (GET_CODE (x) == CLOBBER)
hash_scan_clobber (x, insn, table);
- else if (GET_CODE (x) == CALL)
+ else if (CALL_P (x))
hash_scan_call (x, insn, table);
}
else if (GET_CODE (pat) == CLOBBER)
hash_scan_clobber (pat, insn, table);
- else if (GET_CODE (pat) == CALL)
+ else if (CALL_P (pat))
hash_scan_call (pat, insn, table);
}
@@ -2331,7 +2330,7 @@ canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
that function calls are assumed to clobber memory, but are handled
elsewhere. */
- if (GET_CODE (dest) != MEM)
+ if (! MEM_P (dest))
return;
dest_addr = get_addr (XEXP (dest, 0));
@@ -2360,7 +2359,7 @@ record_last_mem_set_info (rtx insn)
modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
bitmap_set_bit (modify_mem_list_set, bb);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
/* Note that traversals of this loop (other than for free-ing)
will break after encountering a CALL_INSN. So, there's no
@@ -2385,9 +2384,9 @@ record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
if (GET_CODE (dest) == SUBREG)
dest = SUBREG_REG (dest);
- if (GET_CODE (dest) == REG)
+ if (REG_P (dest))
record_last_reg_set_info (last_set_insn, REGNO (dest));
- else if (GET_CODE (dest) == MEM
+ else if (MEM_P (dest)
/* Ignore pushes, they clobber nothing. */
&& ! push_operand (dest, GET_MODE (dest)))
record_last_mem_set_info (last_set_insn);
@@ -2446,7 +2445,7 @@ compute_hash_table_work (struct hash_table *table)
if (! INSN_P (insn))
continue;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
bool clobbers_all = false;
#ifdef NON_SAVING_SETJMP
@@ -2741,12 +2740,12 @@ mark_set (rtx pat, rtx insn)
|| GET_CODE (dest) == STRICT_LOW_PART)
dest = XEXP (dest, 0);
- if (GET_CODE (dest) == REG)
+ if (REG_P (dest))
SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
- else if (GET_CODE (dest) == MEM)
+ else if (MEM_P (dest))
record_last_mem_set_info (insn);
- if (GET_CODE (SET_SRC (pat)) == CALL)
+ if (CALL_P (SET_SRC (pat)))
mark_call (insn);
}
@@ -2760,7 +2759,7 @@ mark_clobber (rtx pat, rtx insn)
while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
clob = XEXP (clob, 0);
- if (GET_CODE (clob) == REG)
+ if (REG_P (clob))
SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
else
record_last_mem_set_info (insn);
@@ -2786,13 +2785,13 @@ mark_oprs_set (rtx insn)
mark_set (x, insn);
else if (GET_CODE (x) == CLOBBER)
mark_clobber (x, insn);
- else if (GET_CODE (x) == CALL)
+ else if (CALL_P (x))
mark_call (insn);
}
else if (GET_CODE (pat) == CLOBBER)
mark_clobber (pat, insn);
- else if (GET_CODE (pat) == CALL)
+ else if (CALL_P (pat))
mark_call (insn);
}
@@ -2897,7 +2896,7 @@ compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
{
rtx dest, dest_addr;
- if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
+ if (CALL_P (XEXP (list_entry, 0)))
{
if (set_p)
SET_BIT (bmap[bb->index], indx);
@@ -3156,7 +3155,7 @@ find_avail_set (int regno, rtx insn)
/* If the source of the set is anything except a register, then
we have reached the end of the copy chain. */
- if (GET_CODE (src) != REG)
+ if (! REG_P (src))
break;
/* Follow the copy chain, ie start another iteration of the loop
@@ -3379,7 +3378,7 @@ cprop_insn (rtx insn, int alter_jumps)
return 1;
}
}
- else if (GET_CODE (src) == REG
+ else if (REG_P (src)
&& REGNO (src) >= FIRST_PSEUDO_REGISTER
&& REGNO (src) != regno)
{
@@ -3461,7 +3460,7 @@ do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
/* Rule out USE instructions and ASM statements as we don't want to
change the hard registers mentioned. */
- if (GET_CODE (x) == REG
+ if (REG_P (x)
&& (REGNO (x) >= FIRST_PSEUDO_REGISTER
|| (GET_CODE (PATTERN (insn)) != USE
&& asm_noperands (PATTERN (insn)) < 0)))
@@ -3488,7 +3487,7 @@ do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
explicitly, it means address of parameter has been taken,
so we should not extend the lifetime of the pseudo. */
&& (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
- || GET_CODE (XEXP (note, 0)) != MEM))
+ || ! MEM_P (XEXP (note, 0))))
newreg = this_rtx;
}
if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
@@ -3664,7 +3663,7 @@ cprop (int alter_jumps)
/* Keep track of everything modified by this insn. */
/* ??? Need to be careful w.r.t. mods done to INSN. Don't
call mark_oprs_set if we turned the insn into a NOTE. */
- if (GET_CODE (insn) != NOTE)
+ if (! NOTE_P (insn))
mark_oprs_set (insn);
}
}
@@ -3791,7 +3790,7 @@ find_implicit_sets (void)
if (cond
&& (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
- && GET_CODE (XEXP (cond, 0)) == REG
+ && REG_P (XEXP (cond, 0))
&& REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
&& implicit_set_cond_p (cond))
{
@@ -3913,7 +3912,7 @@ find_bypass_set (int regno, int bb)
if (gcse_constant_p (src))
result = set;
- if (GET_CODE (src) != REG)
+ if (! REG_P (src))
break;
regno = REGNO (src);
@@ -4050,7 +4049,7 @@ bypass_block (basic_block bb, rtx setcc, rtx jump)
/* Avoid unification of the edge with other edges from original
branch. We would end up emitting the instruction on "both"
edges. */
-
+
if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
{
edge e2;
@@ -4141,7 +4140,7 @@ bypass_conditional_jumps (void)
else
break;
}
- else if (GET_CODE (insn) == JUMP_INSN)
+ else if (JUMP_P (insn))
{
if ((any_condjump_p (insn) || computed_jump_p (insn))
&& onlyjump_p (insn))
@@ -4427,7 +4426,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
handle cc0, etc. properly]. Similarly we need to care trapping
instructions in presence of non-call exceptions. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
|| (GET_CODE (insn) == INSN
&& (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
{
@@ -4470,7 +4469,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
/* Likewise if the last insn is a call, as will happen in the presence
of exception handling. */
- else if (GET_CODE (insn) == CALL_INSN
+ else if (CALL_P (insn)
&& (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
{
/* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
@@ -4500,7 +4499,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
If we inserted before the CODE_LABEL, then we would be putting
the insn in the wrong basic block. In that case, put the insn
after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
- while (GET_CODE (insn) == CODE_LABEL
+ while (LABEL_P (insn)
|| NOTE_INSN_BASIC_BLOCK_P (insn))
insn = NEXT_INSN (insn);
@@ -4663,7 +4662,7 @@ pre_insert_copy_insn (struct expr *expr, rtx insn)
else
abort ();
- if (GET_CODE (SET_DEST (set)) == REG)
+ if (REG_P (SET_DEST (set)))
{
old_reg = SET_DEST (set);
/* Check if we can modify the set destination in the original insn. */
@@ -4736,9 +4735,9 @@ pre_insert_copies (void)
expression wasn't deleted anywhere. */
if (expr->reaching_reg == NULL)
continue;
-
+
/* Set when we add a copy for that expression. */
- added_copy = 0;
+ added_copy = 0;
for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
{
@@ -4771,7 +4770,7 @@ pre_insert_copies (void)
}
}
- if (added_copy)
+ if (added_copy)
update_ld_motion_stores (expr);
}
}
@@ -5050,12 +5049,12 @@ compute_transpout (void)
/* Note that flow inserted a nop a the end of basic blocks that
end in call instructions for reasons other than abnormal
control flow. */
- if (GET_CODE (BB_END (bb)) != CALL_INSN)
+ if (! CALL_P (BB_END (bb)))
continue;
for (i = 0; i < expr_hash_table.size; i++)
for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
- if (GET_CODE (expr->expr) == MEM)
+ if (MEM_P (expr->expr))
{
if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
@@ -5591,7 +5590,7 @@ next_ls_expr (struct ls_expr * ptr)
static int
simple_mem (rtx x)
{
- if (GET_CODE (x) != MEM)
+ if (! MEM_P (x))
return 0;
if (MEM_VOLATILE_P (x))
@@ -5635,7 +5634,7 @@ invalidate_any_buried_refs (rtx x)
struct ls_expr * ptr;
/* Invalidate it in the list. */
- if (GET_CODE (x) == MEM && simple_mem (x))
+ if (MEM_P (x) && simple_mem (x))
{
ptr = ldst_entry (x);
ptr->invalid = 1;
@@ -5685,10 +5684,10 @@ compute_ld_motion_mems (void)
rtx dest = SET_DEST (PATTERN (insn));
/* Check for a simple LOAD... */
- if (GET_CODE (src) == MEM && simple_mem (src))
+ if (MEM_P (src) && simple_mem (src))
{
ptr = ldst_entry (src);
- if (GET_CODE (dest) == REG)
+ if (REG_P (dest))
ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
else
ptr->invalid = 1;
@@ -5703,11 +5702,11 @@ compute_ld_motion_mems (void)
will block any movement we might do later. We only care
about this exact pattern since those are the only
circumstance that we will ignore the aliasing info. */
- if (GET_CODE (dest) == MEM && simple_mem (dest))
+ if (MEM_P (dest) && simple_mem (dest))
{
ptr = ldst_entry (dest);
- if (GET_CODE (src) != MEM
+ if (! MEM_P (src)
&& GET_CODE (src) != ASM_OPERANDS
/* Check for REG manually since want_to_gcse_p
returns 0 for all REGs. */
@@ -5860,7 +5859,7 @@ reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
if (GET_CODE (dest) == SUBREG)
dest = SUBREG_REG (dest);
- if (GET_CODE (dest) == REG)
+ if (REG_P (dest))
{
regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
if (bb_reg)
@@ -5880,7 +5879,7 @@ reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
if (GET_CODE (dest) == SUBREG)
dest = SUBREG_REG (dest);
- if (GET_CODE (dest) == REG &&
+ if (REG_P (dest) &&
dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
dead_vec[REGNO (dest)] = 0;
}
@@ -6025,7 +6024,7 @@ find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
dest = SET_DEST (set);
- if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
+ if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
|| GET_MODE (dest) == BLKmode)
return;
@@ -6137,7 +6136,7 @@ compute_store_table (void)
if (! INSN_P (insn))
continue;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
bool clobbers_all = false;
#ifdef NON_SAVING_SETJMP
@@ -6170,7 +6169,7 @@ compute_store_table (void)
if (! INSN_P (insn))
continue;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
bool clobbers_all = false;
#ifdef NON_SAVING_SETJMP
@@ -6194,7 +6193,7 @@ compute_store_table (void)
/* Unmark regs that are no longer set. */
compute_store_table_current_insn = insn;
note_stores (pat, reg_clear_last_set, last_set_in);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
bool clobbers_all = false;
#ifdef NON_SAVING_SETJMP
@@ -6288,7 +6287,7 @@ find_loads (rtx x, rtx store_pattern, int after)
if (GET_CODE (x) == SET)
x = SET_SRC (x);
- if (GET_CODE (x) == MEM)
+ if (MEM_P (x))
{
if (load_kills_store (x, store_pattern, after))
return true;
@@ -6320,7 +6319,7 @@ store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
if (!INSN_P (insn))
return false;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
/* A normal or pure call might read from pattern,
but a const call will not. */
@@ -6352,7 +6351,7 @@ store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
dest = XEXP (dest, 0);
/* Check for memory stores to aliased objects. */
- if (GET_CODE (dest) == MEM
+ if (MEM_P (dest)
&& !expr_equiv_p (dest, x))
{
if (after)
@@ -6537,8 +6536,8 @@ insert_insn_start_bb (rtx insn, basic_block bb)
rtx before = BB_HEAD (bb);
while (before != 0)
{
- if (GET_CODE (before) != CODE_LABEL
- && (GET_CODE (before) != NOTE
+ if (! LABEL_P (before)
+ && (! NOTE_P (before)
|| NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
break;
prev = before;
@@ -6658,7 +6657,7 @@ remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
act = stack[--stack_top];
}
bb = act->dest;
-
+
if (bb == EXIT_BLOCK_PTR
|| TEST_BIT (visited, bb->index))
{
@@ -6677,7 +6676,7 @@ remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
}
else
last = NEXT_INSN (BB_END (bb));
-
+
for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
if (INSN_P (insn))
{
@@ -6940,7 +6939,7 @@ is_too_expensive (const char *pass)
/* Trying to perform global optimizations on flow graphs which have
a high connectivity will take a long time and is unlikely to be
particularly useful.
-
+
In normal circumstances a cfg should have about twice as many
edges as blocks. But we do not want to punish small functions
which have a couple switch statements. Rather than simply
@@ -6951,7 +6950,7 @@ is_too_expensive (const char *pass)
if (warn_disabled_optimization)
warning ("%s: %d basic blocks and %d edges/basic block",
pass, n_basic_blocks, n_edges / n_basic_blocks);
-
+
return true;
}
@@ -7030,7 +7029,7 @@ reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
rtx insn;
int regno;
- if (GET_CODE (reg) != REG)
+ if (! REG_P (reg))
abort ();
regno = REGNO (reg);
@@ -7048,7 +7047,7 @@ reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
if (INSN_P (insn))
{
if (FIND_REG_INC_NOTE (insn, reg)
- || (GET_CODE (insn) == CALL_INSN
+ || (CALL_P (insn)
&& call_used_regs[regno])
|| find_reg_fusage (insn, CLOBBER, reg))
return insn;
@@ -7069,7 +7068,7 @@ reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
rtx insn;
int regno;
- if (GET_CODE (reg) != REG)
+ if (! REG_P (reg))
return to_insn;
regno = REGNO (reg);
@@ -7084,7 +7083,7 @@ reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
- || (GET_CODE (insn) == CALL_INSN
+ || (CALL_P (insn)
&& call_used_regs[regno])
|| find_reg_fusage (insn, USE, reg)
|| find_reg_fusage (insn, CLOBBER, reg)))
@@ -7097,9 +7096,9 @@ reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
static rtx
get_avail_load_store_reg (rtx insn)
{
- if (GET_CODE (SET_DEST (PATTERN (insn))) == REG) /* A load. */
+ if (REG_P (SET_DEST (PATTERN (insn)))) /* A load. */
return SET_DEST(PATTERN(insn));
- if (GET_CODE (SET_SRC (PATTERN (insn))) == REG) /* A store. */
+ if (REG_P (SET_SRC (PATTERN (insn)))) /* A store. */
return SET_SRC (PATTERN (insn));
abort ();
}
@@ -7111,9 +7110,7 @@ is_jump_table_basic_block (basic_block bb)
{
rtx insn = BB_END (bb);
- if (GET_CODE (insn) == JUMP_INSN &&
- (GET_CODE (PATTERN (insn)) == ADDR_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
+ if (JUMP_TABLE_DATA_P (insn))
return true;
return false;
}
@@ -7324,7 +7321,7 @@ eliminate_partially_redundant_loads (basic_block bb, rtx insn,
delete_insn (insn);
else
a_occr->deleted_p = 1;
-
+
cleanup:
while (unavail_occrs)
@@ -7380,8 +7377,8 @@ gcse_after_reload (void)
/* Is it a load - of the form (set (reg) (mem))? */
if (GET_CODE (insn) == INSN
&& GET_CODE (PATTERN (insn)) == SET
- && GET_CODE (SET_DEST (PATTERN (insn))) == REG
- && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
+ && REG_P (SET_DEST (PATTERN (insn)))
+ && MEM_P (SET_SRC (PATTERN (insn))))
{
rtx pat = PATTERN (insn);
rtx src = SET_SRC (pat);
@@ -7435,10 +7432,10 @@ hash_scan_set_after_reload (rtx pat, rtx insn, struct hash_table *table)
rtx src = SET_SRC (pat);
rtx dest = SET_DEST (pat);
- if (GET_CODE (src) != MEM && GET_CODE (dest) != MEM)
+ if (! MEM_P (src) && ! MEM_P (dest))
return;
- if (GET_CODE (dest) == REG)
+ if (REG_P (dest))
{
if (/* Don't GCSE something if we can't do a reg/reg copy. */
can_copy_p (GET_MODE (dest))
@@ -7458,7 +7455,7 @@ hash_scan_set_after_reload (rtx pat, rtx insn, struct hash_table *table)
insert_expr_in_table (src, GET_MODE (dest), insn, 0, 1, table);
}
}
- else if ((GET_CODE (src) == REG))
+ else if (REG_P (src))
{
/* Only record sets of pseudo-regs in the hash table. */
if (/* Don't GCSE something if we can't do a reg/reg copy. */
@@ -7528,7 +7525,7 @@ compute_hash_table_after_reload (struct hash_table *table)
if (! INSN_P (insn))
continue;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
bool clobbers_all = false;
@@ -7555,12 +7552,12 @@ compute_hash_table_after_reload (struct hash_table *table)
src = SET_SRC (PATTERN (insn));
dest = SET_DEST (PATTERN (insn));
- if (GET_CODE (src) == MEM && auto_inc_p (XEXP (src, 0)))
+ if (MEM_P (src) && auto_inc_p (XEXP (src, 0)))
{
regno = REGNO (XEXP (XEXP (src, 0), 0));
record_last_reg_set_info (insn, regno);
}
- if (GET_CODE (dest) == MEM && auto_inc_p (XEXP (dest, 0)))
+ if (MEM_P (dest) && auto_inc_p (XEXP (dest, 0)))
{
regno = REGNO (XEXP (XEXP (dest, 0), 0));
record_last_reg_set_info (insn, regno);