summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorghazi <ghazi@138bc75d-0d04-0410-961f-82ee72b054a4>2008-07-16 17:52:19 +0000
committerghazi <ghazi@138bc75d-0d04-0410-961f-82ee72b054a4>2008-07-16 17:52:19 +0000
commit47cfb7f48eb384c13d88a83c80fe460c216f4ab3 (patch)
tree6ea7145a64bd6e7855ef17deb8174471e33ab630 /gcc
parent6659485c82112bcc7e2f644780c8b69b9d479e92 (diff)
downloadgcc-47cfb7f48eb384c13d88a83c80fe460c216f4ab3.tar.gz
* recog.c (validate_change_1, validate_change,
validate_unshare_change, validate_replace_rtx_1, struct funny_match, constrain_operands, peephole2_optimize): Avoid C++ keywords. * reload.c (push_secondary_reload, secondary_reload_class, scratch_reload_class, find_valid_class, find_reusable_reload, push_reload, find_dummy_reload, find_reloads_address_1, find_reloads_address_part, find_equiv_reg): Likewise. * reload1.c (spill_failure, eliminate_regs_1, allocate_reload_reg, choose_reload_regs): Likewise. * rtlanal.c (replace_rtx, nonzero_bits1, num_sign_bit_copies1): Likewise. * rtlhooks.c (gen_lowpart_if_possible): Likewise. * sched-ebb.c (add_deps_for_risky_insns): Likewise. * sched-rgn.c (concat_INSN_LIST): Likewise. * stor-layout.c (mode_for_size, mode_for_size_tree, smallest_mode_for_size): Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@137894 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog20
-rw-r--r--gcc/recog.c60
-rw-r--r--gcc/reload.c176
-rw-r--r--gcc/reload1.c90
-rw-r--r--gcc/rtlanal.c28
-rw-r--r--gcc/rtlhooks.c8
-rw-r--r--gcc/sched-ebb.c6
-rw-r--r--gcc/sched-rgn.c6
-rw-r--r--gcc/stor-layout.c14
9 files changed, 214 insertions, 194 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 12cc4e43d77..43e62ff7057 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,25 @@
2008-07-16 Kaveh R. Ghazi <ghazi@caip.rutgers.edu>
+ * recog.c (validate_change_1, validate_change,
+ validate_unshare_change, validate_replace_rtx_1, struct
+ funny_match, constrain_operands, peephole2_optimize): Avoid C++
+ keywords.
+ * reload.c (push_secondary_reload, secondary_reload_class,
+ scratch_reload_class, find_valid_class, find_reusable_reload,
+ push_reload, find_dummy_reload, find_reloads_address_1,
+ find_reloads_address_part, find_equiv_reg): Likewise.
+ * reload1.c (spill_failure, eliminate_regs_1, allocate_reload_reg,
+ choose_reload_regs): Likewise.
+ * rtlanal.c (replace_rtx, nonzero_bits1, num_sign_bit_copies1):
+ Likewise.
+ * rtlhooks.c (gen_lowpart_if_possible): Likewise.
+ * sched-ebb.c (add_deps_for_risky_insns): Likewise.
+ * sched-rgn.c (concat_INSN_LIST): Likewise.
+ * stor-layout.c (mode_for_size, mode_for_size_tree,
+ smallest_mode_for_size): Likewise.
+
+2008-07-16 Kaveh R. Ghazi <ghazi@caip.rutgers.edu>
+
* cfg.c (dump_reg_info): Avoid C++ keywords.
* dwarf2asm.c (dw2_force_const_mem,
dw2_asm_output_encoded_addr_rtx): Likewise.
diff --git a/gcc/recog.c b/gcc/recog.c
index 73d57647877..599d529231a 100644
--- a/gcc/recog.c
+++ b/gcc/recog.c
@@ -183,7 +183,7 @@ static int changes_allocated;
static int num_changes = 0;
/* Validate a proposed change to OBJECT. LOC is the location in the rtl
- at which NEW will be placed. If OBJECT is zero, no validation is done,
+ at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
the change is simply made.
Two types of objects are supported: If OBJECT is a MEM, memory_address_p
@@ -201,16 +201,16 @@ static int num_changes = 0;
Otherwise, perform the change and return 1. */
static bool
-validate_change_1 (rtx object, rtx *loc, rtx new, bool in_group, bool unshare)
+validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
{
rtx old = *loc;
- if (old == new || rtx_equal_p (old, new))
+ if (old == new_rtx || rtx_equal_p (old, new_rtx))
return 1;
gcc_assert (in_group != 0 || num_changes == 0);
- *loc = new;
+ *loc = new_rtx;
/* Save the information describing this change. */
if (num_changes >= changes_allocated)
@@ -253,18 +253,18 @@ validate_change_1 (rtx object, rtx *loc, rtx new, bool in_group, bool unshare)
UNSHARE to false. */
bool
-validate_change (rtx object, rtx *loc, rtx new, bool in_group)
+validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
{
- return validate_change_1 (object, loc, new, in_group, false);
+ return validate_change_1 (object, loc, new_rtx, in_group, false);
}
/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
UNSHARE to true. */
bool
-validate_unshare_change (rtx object, rtx *loc, rtx new, bool in_group)
+validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
{
- return validate_change_1 (object, loc, new, in_group, true);
+ return validate_change_1 (object, loc, new_rtx, in_group, true);
}
@@ -525,7 +525,7 @@ validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
enum rtx_code code;
enum machine_mode op0_mode = VOIDmode;
int prev_changes = num_changes;
- rtx new;
+ rtx new_rtx;
if (!x)
return;
@@ -633,25 +633,25 @@ validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
case SIGN_EXTEND:
if (GET_MODE (XEXP (x, 0)) == VOIDmode)
{
- new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
+ new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
op0_mode);
/* If any of the above failed, substitute in something that
we know won't be recognized. */
- if (!new)
- new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
- validate_change (object, loc, new, 1);
+ if (!new_rtx)
+ new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
+ validate_change (object, loc, new_rtx, 1);
}
break;
case SUBREG:
/* All subregs possible to simplify should be simplified. */
- new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
+ new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
SUBREG_BYTE (x));
/* Subregs of VOIDmode operands are incorrect. */
- if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
- new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
- if (new)
- validate_change (object, loc, new, 1);
+ if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
+ new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
+ if (new_rtx)
+ validate_change (object, loc, new_rtx, 1);
break;
case ZERO_EXTRACT:
case SIGN_EXTRACT:
@@ -2200,7 +2200,7 @@ preprocess_constraints (void)
struct funny_match
{
- int this, other;
+ int this_op, other;
};
int
@@ -2350,7 +2350,7 @@ constrain_operands (int strict)
output op is the one that will be printed. */
if (val == 2 && strict > 0)
{
- funny_match[funny_match_index].this = opno;
+ funny_match[funny_match_index].this_op = opno;
funny_match[funny_match_index++].other = match;
}
}
@@ -2583,7 +2583,7 @@ constrain_operands (int strict)
while (--funny_match_index >= 0)
{
recog_data.operand[funny_match[funny_match_index].other]
- = recog_data.operand[funny_match[funny_match_index].this];
+ = recog_data.operand[funny_match[funny_match_index].this_op];
}
return 1;
@@ -2987,7 +2987,7 @@ peephole2_optimize (void)
prev = PREV_INSN (insn);
if (INSN_P (insn))
{
- rtx try, before_try, x;
+ rtx attempt, before_try, x;
int match_len;
rtx note;
bool was_call = false;
@@ -3008,13 +3008,13 @@ peephole2_optimize (void)
substitution would lose the
REG_FRAME_RELATED_EXPR that is attached. */
peep2_current_count = 0;
- try = NULL;
+ attempt = NULL;
}
else
/* Match the peephole. */
- try = peephole2_insns (PATTERN (insn), insn, &match_len);
+ attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
- if (try != NULL)
+ if (attempt != NULL)
{
/* If we are splitting a CALL_INSN, look for the CALL_INSN
in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
@@ -3032,7 +3032,7 @@ peephole2_optimize (void)
continue;
was_call = true;
- new_insn = try;
+ new_insn = attempt;
while (new_insn != NULL_RTX)
{
if (CALL_P (new_insn))
@@ -3080,7 +3080,7 @@ peephole2_optimize (void)
REG_EH_REGION, NULL_RTX);
/* Replace the old sequence with the new. */
- try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
+ attempt = emit_insn_after_setloc (attempt, peep2_insn_data[i].insn,
INSN_LOCATOR (peep2_insn_data[i].insn));
before_try = PREV_INSN (insn);
delete_insn_chain (insn, peep2_insn_data[i].insn, false);
@@ -3095,7 +3095,7 @@ peephole2_optimize (void)
if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
break;
- for (x = try ; x != before_try ; x = PREV_INSN (x))
+ for (x = attempt ; x != before_try ; x = PREV_INSN (x))
if (CALL_P (x)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (x))
@@ -3145,7 +3145,7 @@ peephole2_optimize (void)
bitmap_copy (live, peep2_insn_data[i].live_before);
/* Update life information for the new sequence. */
- x = try;
+ x = attempt;
do
{
if (INSN_P (x))
@@ -3169,7 +3169,7 @@ peephole2_optimize (void)
/* If we generated a jump instruction, it won't have
JUMP_LABEL set. Recompute after we're done. */
- for (x = try; x != before_try; x = PREV_INSN (x))
+ for (x = attempt; x != before_try; x = PREV_INSN (x))
if (JUMP_P (x))
{
do_rebuild_jump_labels = true;
diff --git a/gcc/reload.c b/gcc/reload.c
index 5a128f9fb81..7c7d736f14c 100644
--- a/gcc/reload.c
+++ b/gcc/reload.c
@@ -319,7 +319,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
enum machine_mode reload_mode, enum reload_type type,
enum insn_code *picode, secondary_reload_info *prev_sri)
{
- enum reg_class class = NO_REGS;
+ enum reg_class rclass = NO_REGS;
enum reg_class scratch_class;
enum machine_mode mode = reload_mode;
enum insn_code icode = CODE_FOR_nothing;
@@ -362,15 +362,15 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
sri.icode = CODE_FOR_nothing;
sri.prev_sri = prev_sri;
- class = targetm.secondary_reload (in_p, x, reload_class, reload_mode, &sri);
+ rclass = targetm.secondary_reload (in_p, x, reload_class, reload_mode, &sri);
icode = sri.icode;
/* If we don't need any secondary registers, done. */
- if (class == NO_REGS && icode == CODE_FOR_nothing)
+ if (rclass == NO_REGS && icode == CODE_FOR_nothing)
return -1;
- if (class != NO_REGS)
- t_reload = push_secondary_reload (in_p, x, opnum, optional, class,
+ if (rclass != NO_REGS)
+ t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
reload_mode, type, &t_icode, &sri);
/* If we will be using an insn, the secondary reload is for a
@@ -392,7 +392,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
an icode to reload from an intermediate tertiary reload register.
We should probably have a new field in struct reload to tag a
chain of scratch operand reloads onto. */
- gcc_assert (class == NO_REGS);
+ gcc_assert (rclass == NO_REGS);
scratch_constraint = insn_data[(int) icode].operand[2].constraint;
gcc_assert (*scratch_constraint == '=');
@@ -404,7 +404,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
: REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
scratch_constraint));
- class = scratch_class;
+ rclass = scratch_class;
mode = insn_data[(int) icode].operand[2].mode;
}
@@ -422,21 +422,21 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
Allow this when a reload_in/out pattern is being used. I.e. assume
that the generated code handles this case. */
- gcc_assert (!in_p || class != reload_class || icode != CODE_FOR_nothing
+ gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
|| t_icode != CODE_FOR_nothing);
/* See if we can reuse an existing secondary reload. */
for (s_reload = 0; s_reload < n_reloads; s_reload++)
if (rld[s_reload].secondary_p
- && (reg_class_subset_p (class, rld[s_reload].class)
- || reg_class_subset_p (rld[s_reload].class, class))
+ && (reg_class_subset_p (rclass, rld[s_reload].class)
+ || reg_class_subset_p (rld[s_reload].class, rclass))
&& ((in_p && rld[s_reload].inmode == mode)
|| (! in_p && rld[s_reload].outmode == mode))
&& ((in_p && rld[s_reload].secondary_in_reload == t_reload)
|| (! in_p && rld[s_reload].secondary_out_reload == t_reload))
&& ((in_p && rld[s_reload].secondary_in_icode == t_icode)
|| (! in_p && rld[s_reload].secondary_out_icode == t_icode))
- && (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES)
+ && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
&& MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
opnum, rld[s_reload].opnum))
{
@@ -445,8 +445,8 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
if (! in_p)
rld[s_reload].outmode = mode;
- if (reg_class_subset_p (class, rld[s_reload].class))
- rld[s_reload].class = class;
+ if (reg_class_subset_p (rclass, rld[s_reload].class))
+ rld[s_reload].class = rclass;
rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
rld[s_reload].optional &= optional;
@@ -467,7 +467,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
way reloads are output. */
if (in_p && icode == CODE_FOR_nothing
- && SECONDARY_MEMORY_NEEDED (class, reload_class, mode))
+ && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
{
get_secondary_mem (x, reload_mode, opnum, type);
@@ -479,7 +479,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
/* We need to make a new secondary reload for this register class. */
rld[s_reload].in = rld[s_reload].out = 0;
- rld[s_reload].class = class;
+ rld[s_reload].class = rclass;
rld[s_reload].inmode = in_p ? mode : VOIDmode;
rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
@@ -503,7 +503,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
#ifdef SECONDARY_MEMORY_NEEDED
if (! in_p && icode == CODE_FOR_nothing
- && SECONDARY_MEMORY_NEEDED (reload_class, class, mode))
+ && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
get_secondary_mem (x, mode, opnum, type);
#endif
}
@@ -516,7 +516,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
register and a scratch register is needed, we return the class of the
intermediate register. */
enum reg_class
-secondary_reload_class (bool in_p, enum reg_class class,
+secondary_reload_class (bool in_p, enum reg_class rclass,
enum machine_mode mode, rtx x)
{
enum insn_code icode;
@@ -524,13 +524,13 @@ secondary_reload_class (bool in_p, enum reg_class class,
sri.icode = CODE_FOR_nothing;
sri.prev_sri = NULL;
- class = targetm.secondary_reload (in_p, x, class, mode, &sri);
+ rclass = targetm.secondary_reload (in_p, x, rclass, mode, &sri);
icode = sri.icode;
/* If there are no secondary reloads at all, we return NO_REGS.
If an intermediate register is needed, we return its class. */
- if (icode == CODE_FOR_nothing || class != NO_REGS)
- return class;
+ if (icode == CODE_FOR_nothing || rclass != NO_REGS)
+ return rclass;
/* No intermediate register is needed, but we have a special reload
pattern, which we assume for now needs a scratch register. */
@@ -547,7 +547,7 @@ scratch_reload_class (enum insn_code icode)
{
const char *scratch_constraint;
char scratch_letter;
- enum reg_class class;
+ enum reg_class rclass;
gcc_assert (insn_data[(int) icode].n_operands == 3);
scratch_constraint = insn_data[(int) icode].operand[2].constraint;
@@ -558,10 +558,10 @@ scratch_reload_class (enum insn_code icode)
scratch_letter = *scratch_constraint;
if (scratch_letter == 'r')
return GENERAL_REGS;
- class = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
+ rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
scratch_constraint);
- gcc_assert (class != NO_REGS);
- return class;
+ gcc_assert (rclass != NO_REGS);
+ return rclass;
}
#ifdef SECONDARY_MEMORY_NEEDED
@@ -660,24 +660,24 @@ find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
unsigned int dest_regno ATTRIBUTE_UNUSED)
{
int best_cost = -1;
- int class;
+ int rclass;
int regno;
enum reg_class best_class = NO_REGS;
enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
unsigned int best_size = 0;
int cost;
- for (class = 1; class < N_REG_CLASSES; class++)
+ for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
{
int bad = 0;
int good = 0;
for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
- if (TEST_HARD_REG_BIT (reg_class_contents[class], regno))
+ if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
{
if (HARD_REGNO_MODE_OK (regno, inner))
{
good = 1;
- if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno + n)
+ if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
|| ! HARD_REGNO_MODE_OK (regno + n, outer))
bad = 1;
}
@@ -685,15 +685,15 @@ find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
if (bad || !good)
continue;
- cost = REGISTER_MOVE_COST (outer, class, dest_class);
+ cost = REGISTER_MOVE_COST (outer, rclass, dest_class);
- if ((reg_class_size[class] > best_size
+ if ((reg_class_size[rclass] > best_size
&& (best_cost < 0 || best_cost >= cost))
|| best_cost > cost)
{
- best_class = class;
- best_size = reg_class_size[class];
- best_cost = REGISTER_MOVE_COST (outer, class, dest_class);
+ best_class = rclass;
+ best_size = reg_class_size[rclass];
+ best_cost = REGISTER_MOVE_COST (outer, rclass, dest_class);
}
}
@@ -704,14 +704,14 @@ find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
/* Return the number of a previously made reload that can be combined with
a new one, or n_reloads if none of the existing reloads can be used.
- OUT, CLASS, TYPE and OPNUM are the same arguments as passed to
+ OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
push_reload, they determine the kind of the new reload that we try to
combine. P_IN points to the corresponding value of IN, which can be
modified by this function.
DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
static int
-find_reusable_reload (rtx *p_in, rtx out, enum reg_class class,
+find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
enum reload_type type, int opnum, int dont_share)
{
rtx in = *p_in;
@@ -732,18 +732,18 @@ find_reusable_reload (rtx *p_in, rtx out, enum reg_class class,
than we otherwise would. */
for (i = 0; i < n_reloads; i++)
- if ((reg_class_subset_p (class, rld[i].class)
- || reg_class_subset_p (rld[i].class, class))
+ if ((reg_class_subset_p (rclass, rld[i].class)
+ || reg_class_subset_p (rld[i].class, rclass))
/* If the existing reload has a register, it must fit our class. */
&& (rld[i].reg_rtx == 0
- || TEST_HARD_REG_BIT (reg_class_contents[(int) class],
+ || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
true_regnum (rld[i].reg_rtx)))
&& ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
&& (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
|| (out != 0 && MATCHES (rld[i].out, out)
&& (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
- && (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES)
+ && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
&& MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
return i;
@@ -753,12 +753,12 @@ find_reusable_reload (rtx *p_in, rtx out, enum reg_class class,
the preincrementation as happening before any ref in this insn
to that register. */
for (i = 0; i < n_reloads; i++)
- if ((reg_class_subset_p (class, rld[i].class)
- || reg_class_subset_p (rld[i].class, class))
+ if ((reg_class_subset_p (rclass, rld[i].class)
+ || reg_class_subset_p (rld[i].class, rclass))
/* If the existing reload has a register, it must fit our
class. */
&& (rld[i].reg_rtx == 0
- || TEST_HARD_REG_BIT (reg_class_contents[(int) class],
+ || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
true_regnum (rld[i].reg_rtx)))
&& out == 0 && rld[i].out == 0 && rld[i].in != 0
&& ((REG_P (in)
@@ -768,7 +768,7 @@ find_reusable_reload (rtx *p_in, rtx out, enum reg_class class,
&& GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
&& MATCHES (XEXP (in, 0), rld[i].in)))
&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
- && (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES)
+ && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
&& MERGABLE_RELOADS (type, rld[i].when_needed,
opnum, rld[i].opnum))
{
@@ -878,7 +878,7 @@ can_reload_into (rtx in, int regno, enum machine_mode mode)
If IN and OUT are both nonzero, it means the same register must be used
to reload both IN and OUT.
- CLASS is a register class required for the reloaded data.
+ RCLASS is a register class required for the reloaded data.
INMODE is the machine mode that the instruction requires
for the reg that replaces IN and OUTMODE is likewise for OUT.
@@ -904,7 +904,7 @@ can_reload_into (rtx in, int regno, enum machine_mode mode)
int
push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
- enum reg_class class, enum machine_mode inmode,
+ enum reg_class rclass, enum machine_mode inmode,
enum machine_mode outmode, int strict_low, int optional,
int opnum, enum reload_type type)
{
@@ -1003,7 +1003,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
if (in != 0 && GET_CODE (in) == SUBREG
&& (subreg_lowpart_p (in) || strict_low)
#ifdef CANNOT_CHANGE_MODE_CLASS
- && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, class)
+ && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
#endif
&& (CONSTANT_P (SUBREG_REG (in))
|| GET_CODE (SUBREG_REG (in)) == PLUS
@@ -1043,8 +1043,8 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
!= (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
[GET_MODE (SUBREG_REG (in))]))
|| ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
- || (secondary_reload_class (1, class, inmode, in) != NO_REGS
- && (secondary_reload_class (1, class, GET_MODE (SUBREG_REG (in)),
+ || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
+ && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
SUBREG_REG (in))
== NO_REGS))
#ifdef CANNOT_CHANGE_MODE_CLASS
@@ -1079,7 +1079,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
{
- enum reg_class in_class = class;
+ enum reg_class in_class = rclass;
if (REG_P (SUBREG_REG (in)))
in_class
@@ -1109,7 +1109,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
if (out != 0 && GET_CODE (out) == SUBREG
&& (subreg_lowpart_p (out) || strict_low)
#ifdef CANNOT_CHANGE_MODE_CLASS
- && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, class)
+ && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
#endif
&& (CONSTANT_P (SUBREG_REG (out))
|| strict_low
@@ -1136,8 +1136,8 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
!= (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
[GET_MODE (SUBREG_REG (out))]))
|| ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
- || (secondary_reload_class (0, class, outmode, out) != NO_REGS
- && (secondary_reload_class (0, class, GET_MODE (SUBREG_REG (out)),
+ || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
+ && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
SUBREG_REG (out))
== NO_REGS))
#ifdef CANNOT_CHANGE_MODE_CLASS
@@ -1211,10 +1211,10 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
/* Narrow down the class of register wanted if that is
desirable on this machine for efficiency. */
{
- enum reg_class preferred_class = class;
+ enum reg_class preferred_class = rclass;
if (in != 0)
- preferred_class = PREFERRED_RELOAD_CLASS (in, class);
+ preferred_class = PREFERRED_RELOAD_CLASS (in, rclass);
/* Output reloads may need analogous treatment, different in detail. */
#ifdef PREFERRED_OUTPUT_RELOAD_CLASS
@@ -1225,7 +1225,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
/* Discard what the target said if we cannot do it. */
if (preferred_class != NO_REGS
|| (optional && type == RELOAD_FOR_OUTPUT))
- class = preferred_class;
+ rclass = preferred_class;
}
/* Make sure we use a class that can handle the actual pseudo
@@ -1234,14 +1234,14 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
can handle SImode, QImode needs a smaller class. */
#ifdef LIMIT_RELOAD_CLASS
if (in_subreg_loc)
- class = LIMIT_RELOAD_CLASS (inmode, class);
+ rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
else if (in != 0 && GET_CODE (in) == SUBREG)
- class = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), class);
+ rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
if (out_subreg_loc)
- class = LIMIT_RELOAD_CLASS (outmode, class);
+ rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
if (out != 0 && GET_CODE (out) == SUBREG)
- class = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), class);
+ rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
#endif
/* Verify that this class is at least possible for the mode that
@@ -1265,7 +1265,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
}
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (HARD_REGNO_MODE_OK (i, mode)
- && in_hard_reg_set_p (reg_class_contents[(int) class], mode, i))
+ && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
break;
if (i == FIRST_PSEUDO_REGISTER)
{
@@ -1290,10 +1290,10 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
/* Optional output reloads are always OK even if we have no register class,
since the function of these reloads is only to have spill_reg_store etc.
set, so that the storing insn can be deleted later. */
- gcc_assert (class != NO_REGS
+ gcc_assert (rclass != NO_REGS
|| (optional != 0 && type == RELOAD_FOR_OUTPUT));
- i = find_reusable_reload (&in, out, class, type, opnum, dont_share);
+ i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
if (i == n_reloads)
{
@@ -1303,11 +1303,11 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
if (in != 0)
secondary_in_reload
- = push_secondary_reload (1, in, opnum, optional, class, inmode, type,
+ = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
&secondary_in_icode, NULL);
if (out != 0 && GET_CODE (out) != SCRATCH)
secondary_out_reload
- = push_secondary_reload (0, out, opnum, optional, class, outmode,
+ = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
type, &secondary_out_icode, NULL);
/* We found no existing reload suitable for re-use.
@@ -1320,14 +1320,14 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
|| (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
&& reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
&& SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
- class, inmode))
+ rclass, inmode))
get_secondary_mem (in, inmode, opnum, type);
#endif
i = n_reloads;
rld[i].in = in;
rld[i].out = out;
- rld[i].class = class;
+ rld[i].class = rclass;
rld[i].inmode = inmode;
rld[i].outmode = outmode;
rld[i].reg_rtx = 0;
@@ -1351,7 +1351,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
&& (REG_P (out)
|| (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
&& reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
- && SECONDARY_MEMORY_NEEDED (class,
+ && SECONDARY_MEMORY_NEEDED (rclass,
REGNO_REG_CLASS (reg_or_subregno (out)),
outmode))
get_secondary_mem (out, outmode, opnum, type);
@@ -1411,8 +1411,8 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
rld[i].out = out;
rld[i].out_reg = outloc ? *outloc : 0;
}
- if (reg_class_subset_p (class, rld[i].class))
- rld[i].class = class;
+ if (reg_class_subset_p (rclass, rld[i].class))
+ rld[i].class = rclass;
rld[i].optional &= optional;
if (MERGE_TO_OTHER (type, rld[i].when_needed,
opnum, rld[i].opnum))
@@ -1561,7 +1561,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
for (offs = 0; offs < nregs; offs++)
if (fixed_regs[regno + offs]
- || ! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
+ || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
regno + offs))
break;
@@ -1867,7 +1867,7 @@ combine_reloads (void)
If so, return the register rtx that proves acceptable.
INLOC and OUTLOC are locations where IN and OUT appear in the insn.
- CLASS is the register class required for the reload.
+ RCLASS is the register class required for the reload.
If FOR_REAL is >= 0, it is the number of the reload,
and in some cases when it can be discovered that OUT doesn't need
@@ -1884,7 +1884,7 @@ combine_reloads (void)
static rtx
find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
enum machine_mode inmode, enum machine_mode outmode,
- enum reg_class class, int for_real, int earlyclobber)
+ enum reg_class rclass, int for_real, int earlyclobber)
{
rtx in = real_in;
rtx out = real_out;
@@ -1927,9 +1927,9 @@ find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
/* Narrow down the reg class, the same way push_reload will;
otherwise we might find a dummy now, but push_reload won't. */
{
- enum reg_class preferred_class = PREFERRED_RELOAD_CLASS (in, class);
+ enum reg_class preferred_class = PREFERRED_RELOAD_CLASS (in, rclass);
if (preferred_class != NO_REGS)
- class = preferred_class;
+ rclass = preferred_class;
}
/* See if OUT will do. */
@@ -1960,7 +1960,7 @@ find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
unsigned int i;
for (i = 0; i < nwords; i++)
- if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
+ if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
regno + i))
break;
@@ -2028,7 +2028,7 @@ find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
unsigned int i;
for (i = 0; i < nwords; i++)
- if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
+ if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
regno + i))
break;
@@ -5916,14 +5916,14 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
is larger than the class size, then reload the whole SUBREG. */
else
{
- enum reg_class class = context_reg_class;
- if ((unsigned) CLASS_MAX_NREGS (class, GET_MODE (SUBREG_REG (x)))
- > reg_class_size[class])
+ enum reg_class rclass = context_reg_class;
+ if ((unsigned) CLASS_MAX_NREGS (rclass, GET_MODE (SUBREG_REG (x)))
+ > reg_class_size[rclass])
{
x = find_reloads_subreg_address (x, 0, opnum,
ADDR_TYPE (type),
ind_levels, insn);
- push_reload (x, NULL_RTX, loc, (rtx*) 0, class,
+ push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
GET_MODE (x), VOIDmode, 0, 0, opnum, type);
return 1;
}
@@ -5954,7 +5954,7 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
}
/* X, which is found at *LOC, is a part of an address that needs to be
- reloaded into a register of class CLASS. If X is a constant, or if
+ reloaded into a register of class RCLASS. If X is a constant, or if
X is a PLUS that contains a constant, check that the constant is a
legitimate operand and that we are supposed to be able to load
it into the register.
@@ -5969,13 +5969,13 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
supports. */
static void
-find_reloads_address_part (rtx x, rtx *loc, enum reg_class class,
+find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
enum machine_mode mode, int opnum,
enum reload_type type, int ind_levels)
{
if (CONSTANT_P (x)
&& (! LEGITIMATE_CONSTANT_P (x)
- || PREFERRED_RELOAD_CLASS (x, class) == NO_REGS))
+ || PREFERRED_RELOAD_CLASS (x, rclass) == NO_REGS))
{
x = force_const_mem (mode, x);
find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
@@ -5985,7 +5985,7 @@ find_reloads_address_part (rtx x, rtx *loc, enum reg_class class,
else if (GET_CODE (x) == PLUS
&& CONSTANT_P (XEXP (x, 1))
&& (! LEGITIMATE_CONSTANT_P (XEXP (x, 1))
- || PREFERRED_RELOAD_CLASS (XEXP (x, 1), class) == NO_REGS))
+ || PREFERRED_RELOAD_CLASS (XEXP (x, 1), rclass) == NO_REGS))
{
rtx tem;
@@ -5995,7 +5995,7 @@ find_reloads_address_part (rtx x, rtx *loc, enum reg_class class,
opnum, type, ind_levels, 0);
}
- push_reload (x, NULL_RTX, loc, (rtx*) 0, class,
+ push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
mode, VOIDmode, 0, 0, opnum, type);
}
@@ -6600,7 +6600,7 @@ refers_to_mem_for_reload_p (rtx x)
/* Check the insns before INSN to see if there is a suitable register
containing the same value as GOAL.
- If OTHER is -1, look for a register in class CLASS.
+ If OTHER is -1, look for a register in class RCLASS.
Otherwise, just see if register number OTHER shares GOAL's value.
Return an rtx for the register found, or zero if none is found.
@@ -6626,7 +6626,7 @@ refers_to_mem_for_reload_p (rtx x)
as if it were a constant except that sp is required to be unchanging. */
rtx
-find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
+find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
short *reload_reg_p, int goalreg, enum machine_mode mode)
{
rtx p = insn;
@@ -6772,7 +6772,7 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
}
else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
continue;
- else if (!in_hard_reg_set_p (reg_class_contents[(int) class],
+ else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
mode, valueno))
continue;
value = valtry;
diff --git a/gcc/reload1.c b/gcc/reload1.c
index f449ffa9c32..ad58228c791 100644
--- a/gcc/reload1.c
+++ b/gcc/reload1.c
@@ -1976,16 +1976,16 @@ delete_caller_save_insns (void)
INSN should be one of the insns which needed this particular spill reg. */
static void
-spill_failure (rtx insn, enum reg_class class)
+spill_failure (rtx insn, enum reg_class rclass)
{
if (asm_noperands (PATTERN (insn)) >= 0)
error_for_asm (insn, "can't find a register in class %qs while "
"reloading %<asm%>",
- reg_class_names[class]);
+ reg_class_names[rclass]);
else
{
error ("unable to find a register to spill in class %qs",
- reg_class_names[class]);
+ reg_class_names[rclass]);
if (dump_file)
{
@@ -2394,7 +2394,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
enum rtx_code code = GET_CODE (x);
struct elim_table *ep;
int regno;
- rtx new;
+ rtx new_rtx;
int i, j;
const char *fmt;
int copied = 0;
@@ -2523,15 +2523,15 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
&& reg_equiv_constant[REGNO (new0)] != 0)
new0 = reg_equiv_constant[REGNO (new0)];
- new = form_sum (new0, new1);
+ new_rtx = form_sum (new0, new1);
/* As above, if we are not inside a MEM we do not want to
turn a PLUS into something else. We might try to do so here
for an addition of 0 if we aren't optimizing. */
- if (! mem_mode && GET_CODE (new) != PLUS)
- return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
+ if (! mem_mode && GET_CODE (new_rtx) != PLUS)
+ return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
else
- return new;
+ return new_rtx;
}
}
return x;
@@ -2588,8 +2588,8 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
/* If we have something in XEXP (x, 0), the usual case, eliminate it. */
if (XEXP (x, 0))
{
- new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
- if (new != XEXP (x, 0))
+ new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
+ if (new_rtx != XEXP (x, 0))
{
/* If this is a REG_DEAD note, it is not valid anymore.
Using the eliminated version could result in creating a
@@ -2599,7 +2599,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
: NULL_RTX);
- x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
+ x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
}
}
@@ -2611,10 +2611,10 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
strictly needed, but it simplifies the code. */
if (XEXP (x, 1))
{
- new = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
- if (new != XEXP (x, 1))
+ new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
+ if (new_rtx != XEXP (x, 1))
return
- gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
+ gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
}
return x;
@@ -2636,13 +2636,13 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
if (GET_CODE (XEXP (x, 1)) == PLUS
&& XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
{
- rtx new = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
+ rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
insn, true);
- if (new != XEXP (XEXP (x, 1), 1))
+ if (new_rtx != XEXP (XEXP (x, 1), 1))
return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
gen_rtx_PLUS (GET_MODE (x),
- XEXP (x, 0), new));
+ XEXP (x, 0), new_rtx));
}
return x;
@@ -2660,9 +2660,9 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
case POPCOUNT:
case PARITY:
case BSWAP:
- new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
- if (new != XEXP (x, 0))
- return gen_rtx_fmt_e (code, GET_MODE (x), new);
+ new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
+ if (new_rtx != XEXP (x, 0))
+ return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
return x;
case SUBREG:
@@ -2678,17 +2678,17 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
&& reg_equiv_memory_loc != 0
&& reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
{
- new = SUBREG_REG (x);
+ new_rtx = SUBREG_REG (x);
}
else
- new = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
+ new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
- if (new != SUBREG_REG (x))
+ if (new_rtx != SUBREG_REG (x))
{
int x_size = GET_MODE_SIZE (GET_MODE (x));
- int new_size = GET_MODE_SIZE (GET_MODE (new));
+ int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
- if (MEM_P (new)
+ if (MEM_P (new_rtx)
&& ((x_size < new_size
#ifdef WORD_REGISTER_OPERATIONS
/* On these machines, combine can create rtl of the form
@@ -2704,9 +2704,9 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
)
|| x_size == new_size)
)
- return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
+ return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
else
- return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
+ return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
}
return x;
@@ -2722,9 +2722,9 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
case USE:
/* Handle insn_list USE that a call to a pure function may generate. */
- new = eliminate_regs_1 (XEXP (x, 0), 0, insn, false);
- if (new != XEXP (x, 0))
- return gen_rtx_USE (GET_MODE (x), new);
+ new_rtx = eliminate_regs_1 (XEXP (x, 0), 0, insn, false);
+ if (new_rtx != XEXP (x, 0))
+ return gen_rtx_USE (GET_MODE (x), new_rtx);
return x;
case CLOBBER:
@@ -2743,21 +2743,21 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
{
if (*fmt == 'e')
{
- new = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
- if (new != XEXP (x, i) && ! copied)
+ new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
+ if (new_rtx != XEXP (x, i) && ! copied)
{
x = shallow_copy_rtx (x);
copied = 1;
}
- XEXP (x, i) = new;
+ XEXP (x, i) = new_rtx;
}
else if (*fmt == 'E')
{
int copied_vec = 0;
for (j = 0; j < XVECLEN (x, i); j++)
{
- new = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
- if (new != XVECEXP (x, i, j) && ! copied_vec)
+ new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
+ if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
{
rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
XVEC (x, i)->elem);
@@ -2769,7 +2769,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
XVEC (x, i) = new_v;
copied_vec = 1;
}
- XVECEXP (x, i, j) = new;
+ XVECEXP (x, i, j) = new_rtx;
}
}
}
@@ -5474,7 +5474,7 @@ allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
for (count = 0; count < n_spills; count++)
{
- int class = (int) rld[r].class;
+ int rclass = (int) rld[r].class;
int regnum;
i++;
@@ -5491,7 +5491,7 @@ allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
&& free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
rld[r].when_needed, rld[r].in,
rld[r].out, r, 1)))
- && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
+ && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
&& HARD_REGNO_MODE_OK (regnum, rld[r].mode)
/* Look first for regs to share, then for unshared. But
don't share regs used for inherited reloads; they are
@@ -5521,7 +5521,7 @@ allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
while (nr > 1)
{
int regno = regnum + nr - 1;
- if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
+ if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
&& spill_reg_order[regno] >= 0
&& reload_reg_free_p (regno, rld[r].opnum,
rld[r].when_needed)))
@@ -5793,7 +5793,7 @@ choose_reload_regs (struct insn_chain *chain)
#endif
)
{
- enum reg_class class = rld[r].class, last_class;
+ enum reg_class rclass = rld[r].class, last_class;
rtx last_reg = reg_last_reload_reg[regno];
enum machine_mode need_mode;
@@ -5814,18 +5814,18 @@ choose_reload_regs (struct insn_chain *chain)
&& reg_reloaded_contents[i] == regno
&& TEST_HARD_REG_BIT (reg_reloaded_valid, i)
&& HARD_REGNO_MODE_OK (i, rld[r].mode)
- && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
+ && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
/* Even if we can't use this register as a reload
register, we might use it for reload_override_in,
if copying it to the desired class is cheap
enough. */
- || ((REGISTER_MOVE_COST (mode, last_class, class)
- < MEMORY_MOVE_COST (mode, class, 1))
- && (secondary_reload_class (1, class, mode,
+ || ((REGISTER_MOVE_COST (mode, last_class, rclass)
+ < MEMORY_MOVE_COST (mode, rclass, 1))
+ && (secondary_reload_class (1, rclass, mode,
last_reg)
== NO_REGS)
#ifdef SECONDARY_MEMORY_NEEDED
- && ! SECONDARY_MEMORY_NEEDED (last_class, class,
+ && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
mode)
#endif
))
diff --git a/gcc/rtlanal.c b/gcc/rtlanal.c
index d569ff04b2b..fb4a5df7dc8 100644
--- a/gcc/rtlanal.c
+++ b/gcc/rtlanal.c
@@ -2470,32 +2470,32 @@ replace_rtx (rtx x, rtx from, rtx to)
if (GET_CODE (x) == SUBREG)
{
- rtx new = replace_rtx (SUBREG_REG (x), from, to);
+ rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
- if (GET_CODE (new) == CONST_INT)
+ if (GET_CODE (new_rtx) == CONST_INT)
{
- x = simplify_subreg (GET_MODE (x), new,
+ x = simplify_subreg (GET_MODE (x), new_rtx,
GET_MODE (SUBREG_REG (x)),
SUBREG_BYTE (x));
gcc_assert (x);
}
else
- SUBREG_REG (x) = new;
+ SUBREG_REG (x) = new_rtx;
return x;
}
else if (GET_CODE (x) == ZERO_EXTEND)
{
- rtx new = replace_rtx (XEXP (x, 0), from, to);
+ rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
- if (GET_CODE (new) == CONST_INT)
+ if (GET_CODE (new_rtx) == CONST_INT)
{
x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
- new, GET_MODE (XEXP (x, 0)));
+ new_rtx, GET_MODE (XEXP (x, 0)));
gcc_assert (x);
}
else
- XEXP (x, 0) = new;
+ XEXP (x, 0) = new_rtx;
return x;
}
@@ -3692,12 +3692,12 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
{
unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
- rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
+ rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
known_mode, known_ret,
&nonzero_for_hook);
- if (new)
- nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
+ if (new_rtx)
+ nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
known_mode, known_ret);
return nonzero_for_hook;
@@ -4177,12 +4177,12 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
{
unsigned int copies_for_hook = 1, copies = 1;
- rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
+ rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
known_mode, known_ret,
&copies_for_hook);
- if (new)
- copies = cached_num_sign_bit_copies (new, mode, known_x,
+ if (new_rtx)
+ copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
known_mode, known_ret);
if (copies > 1 || copies_for_hook > 1)
diff --git a/gcc/rtlhooks.c b/gcc/rtlhooks.c
index 432b286c1d8..25fbc094830 100644
--- a/gcc/rtlhooks.c
+++ b/gcc/rtlhooks.c
@@ -141,7 +141,7 @@ gen_lowpart_if_possible (enum machine_mode mode, rtx x)
{
/* This is the only other case we handle. */
int offset = 0;
- rtx new;
+ rtx new_rtx;
if (WORDS_BIG_ENDIAN)
offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
@@ -152,11 +152,11 @@ gen_lowpart_if_possible (enum machine_mode mode, rtx x)
offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
- MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
- new = adjust_address_nv (x, mode, offset);
- if (! memory_address_p (mode, XEXP (new, 0)))
+ new_rtx = adjust_address_nv (x, mode, offset);
+ if (! memory_address_p (mode, XEXP (new_rtx, 0)))
return 0;
- return new;
+ return new_rtx;
}
else if (mode != GET_MODE (x) && GET_MODE (x) != VOIDmode
&& validate_subreg (mode, GET_MODE (x), x,
diff --git a/gcc/sched-ebb.c b/gcc/sched-ebb.c
index 58a89346cf0..25b97c395d1 100644
--- a/gcc/sched-ebb.c
+++ b/gcc/sched-ebb.c
@@ -357,7 +357,7 @@ static void
add_deps_for_risky_insns (rtx head, rtx tail)
{
rtx insn, prev;
- int class;
+ int classification;
rtx last_jump = NULL_RTX;
rtx next_tail = NEXT_INSN (tail);
basic_block last_block = NULL, bb;
@@ -372,9 +372,9 @@ add_deps_for_risky_insns (rtx head, rtx tail)
}
else if (INSN_P (insn) && last_jump != NULL_RTX)
{
- class = haifa_classify_insn (insn);
+ classification = haifa_classify_insn (insn);
prev = last_jump;
- switch (class)
+ switch (classification)
{
case PFREE_CANDIDATE:
if (flag_schedule_speculative_load)
diff --git a/gcc/sched-rgn.c b/gcc/sched-rgn.c
index abb2c8d6f6c..28f528302ff 100644
--- a/gcc/sched-rgn.c
+++ b/gcc/sched-rgn.c
@@ -2388,10 +2388,10 @@ static struct deps *bb_deps;
static rtx
concat_INSN_LIST (rtx copy, rtx old)
{
- rtx new = old;
+ rtx new_rtx = old;
for (; copy ; copy = XEXP (copy, 1))
- new = alloc_INSN_LIST (XEXP (copy, 0), new);
- return new;
+ new_rtx = alloc_INSN_LIST (XEXP (copy, 0), new_rtx);
+ return new_rtx;
}
static void
diff --git a/gcc/stor-layout.c b/gcc/stor-layout.c
index 2b48ea60370..17654611e1a 100644
--- a/gcc/stor-layout.c
+++ b/gcc/stor-layout.c
@@ -163,12 +163,12 @@ variable_size (tree size)
#endif
/* Return the machine mode to use for a nonscalar of SIZE bits. The
- mode must be in class CLASS, and have exactly that many value bits;
+ mode must be in class MCLASS, and have exactly that many value bits;
it may have padding as well. If LIMIT is nonzero, modes of wider
than MAX_FIXED_MODE_SIZE will not be used. */
enum machine_mode
-mode_for_size (unsigned int size, enum mode_class class, int limit)
+mode_for_size (unsigned int size, enum mode_class mclass, int limit)
{
enum machine_mode mode;
@@ -176,7 +176,7 @@ mode_for_size (unsigned int size, enum mode_class class, int limit)
return BLKmode;
/* Get the first mode which has this size, in the specified class. */
- for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
+ for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
if (GET_MODE_PRECISION (mode) == size)
return mode;
@@ -187,7 +187,7 @@ mode_for_size (unsigned int size, enum mode_class class, int limit)
/* Similar, except passed a tree node. */
enum machine_mode
-mode_for_size_tree (const_tree size, enum mode_class class, int limit)
+mode_for_size_tree (const_tree size, enum mode_class mclass, int limit)
{
unsigned HOST_WIDE_INT uhwi;
unsigned int ui;
@@ -198,20 +198,20 @@ mode_for_size_tree (const_tree size, enum mode_class class, int limit)
ui = uhwi;
if (uhwi != ui)
return BLKmode;
- return mode_for_size (ui, class, limit);
+ return mode_for_size (ui, mclass, limit);
}
/* Similar, but never return BLKmode; return the narrowest mode that
contains at least the requested number of value bits. */
enum machine_mode
-smallest_mode_for_size (unsigned int size, enum mode_class class)
+smallest_mode_for_size (unsigned int size, enum mode_class mclass)
{
enum machine_mode mode;
/* Get the first mode which has at least this size, in the
specified class. */
- for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
+ for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
if (GET_MODE_PRECISION (mode) >= size)
return mode;