summaryrefslogtreecommitdiff
path: root/gcc/sel-sched-ir.c
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@baserock.org>2015-04-22 10:21:45 +0000
committer <>2015-04-25 21:44:09 +0000
commitf80b5ea1605c9f9408c5aa386ba71c16d918ebbf (patch)
treebb7eafaa81fc4b8c5c215bc08d517fd158db234a /gcc/sel-sched-ir.c
parentc27a97d04853380f1e80525391b3f0d156ed4c84 (diff)
downloadgcc-tarball-f80b5ea1605c9f9408c5aa386ba71c16d918ebbf.tar.gz
Imported from /home/lorry/working-area/delta_gcc-tarball/gcc-5.1.0.tar.bz2.gcc-5.1.0
Diffstat (limited to 'gcc/sel-sched-ir.c')
-rw-r--r--gcc/sel-sched-ir.c194
1 files changed, 106 insertions, 88 deletions
diff --git a/gcc/sel-sched-ir.c b/gcc/sel-sched-ir.c
index e1408b488a..94f6c43a28 100644
--- a/gcc/sel-sched-ir.c
+++ b/gcc/sel-sched-ir.c
@@ -1,5 +1,5 @@
/* Instruction scheduling pass. Selective scheduler and pipeliner.
- Copyright (C) 2006-2014 Free Software Foundation, Inc.
+ Copyright (C) 2006-2015 Free Software Foundation, Inc.
This file is part of GCC.
@@ -26,7 +26,19 @@ along with GCC; see the file COPYING3. If not see
#include "tm_p.h"
#include "hard-reg-set.h"
#include "regs.h"
+#include "hashtab.h"
+#include "hash-set.h"
+#include "vec.h"
+#include "machmode.h"
+#include "input.h"
#include "function.h"
+#include "predict.h"
+#include "dominance.h"
+#include "cfg.h"
+#include "cfgrtl.h"
+#include "cfganal.h"
+#include "cfgbuild.h"
+#include "basic-block.h"
#include "flags.h"
#include "insn-config.h"
#include "insn-attr.h"
@@ -36,8 +48,10 @@ along with GCC; see the file COPYING3. If not see
#include "target.h"
#include "sched-int.h"
#include "ggc.h"
+#include "symtab.h"
+#include "wide-int.h"
+#include "inchash.h"
#include "tree.h"
-#include "vec.h"
#include "langhooks.h"
#include "rtlhooks-def.h"
#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
@@ -126,13 +140,13 @@ static struct
} nop_pool = { NULL, 0, 0 };
/* The pool for basic block notes. */
-static rtx_vec_t bb_note_pool;
+static vec<rtx_note *> bb_note_pool;
/* A NOP pattern used to emit placeholder insns. */
rtx nop_pattern = NULL_RTX;
/* A special instruction that resides in EXIT_BLOCK.
EXIT_INSN is successor of the insns that lead to EXIT_BLOCK. */
-rtx exit_insn = NULL_RTX;
+rtx_insn *exit_insn = NULL;
/* TRUE if while scheduling current region, which is loop, its preheader
was removed. */
@@ -262,7 +276,7 @@ init_fence_for_scheduling (fence_t f)
/* Add new fence consisting of INSN and STATE to the list pointed to by LP. */
static void
flist_add (flist_t *lp, insn_t insn, state_t state, deps_t dc, void *tc,
- insn_t last_scheduled_insn, vec<rtx, va_gc> *executing_insns,
+ insn_t last_scheduled_insn, vec<rtx_insn *, va_gc> *executing_insns,
int *ready_ticks, int ready_ticks_size, insn_t sched_next,
int cycle, int cycle_issued_insns, int issue_more,
bool starts_cycle_p, bool after_stall_p)
@@ -614,11 +628,11 @@ init_fences (insn_t old_fence)
state_create (),
create_deps_context () /* dc */,
create_target_context (true) /* tc */,
- NULL_RTX /* last_scheduled_insn */,
+ NULL /* last_scheduled_insn */,
NULL, /* executing_insns */
XCNEWVEC (int, ready_ticks_size), /* ready_ticks */
ready_ticks_size,
- NULL_RTX /* sched_next */,
+ NULL /* sched_next */,
1 /* cycle */, 0 /* cycle_issued_insns */,
issue_rate, /* issue_more */
1 /* starts_cycle_p */, 0 /* after_stall_p */);
@@ -637,7 +651,8 @@ init_fences (insn_t old_fence)
static void
merge_fences (fence_t f, insn_t insn,
state_t state, deps_t dc, void *tc,
- rtx last_scheduled_insn, vec<rtx, va_gc> *executing_insns,
+ rtx_insn *last_scheduled_insn,
+ vec<rtx_insn *, va_gc> *executing_insns,
int *ready_ticks, int ready_ticks_size,
rtx sched_next, int cycle, int issue_more, bool after_stall_p)
{
@@ -802,9 +817,10 @@ merge_fences (fence_t f, insn_t insn,
other parameters. */
static void
add_to_fences (flist_tail_t new_fences, insn_t insn,
- state_t state, deps_t dc, void *tc, rtx last_scheduled_insn,
- vec<rtx, va_gc> *executing_insns, int *ready_ticks,
- int ready_ticks_size, rtx sched_next, int cycle,
+ state_t state, deps_t dc, void *tc,
+ rtx_insn *last_scheduled_insn,
+ vec<rtx_insn *, va_gc> *executing_insns, int *ready_ticks,
+ int ready_ticks_size, rtx_insn *sched_next, int cycle,
int cycle_issued_insns, int issue_rate,
bool starts_cycle_p, bool after_stall_p)
{
@@ -866,9 +882,9 @@ add_clean_fence_to_fences (flist_tail_t new_fences, insn_t succ, fence_t fence)
add_to_fences (new_fences,
succ, state_create (), create_deps_context (),
create_target_context (true),
- NULL_RTX, NULL,
+ NULL, NULL,
XCNEWVEC (int, ready_ticks_size), ready_ticks_size,
- NULL_RTX, FENCE_CYCLE (fence) + 1,
+ NULL, FENCE_CYCLE (fence) + 1,
0, issue_rate, 1, FENCE_AFTER_STALL_P (fence));
}
@@ -1036,16 +1052,17 @@ static vinsn_t nop_vinsn = NULL;
insn_t
get_nop_from_pool (insn_t insn)
{
+ rtx nop_pat;
insn_t nop;
bool old_p = nop_pool.n != 0;
int flags;
if (old_p)
- nop = nop_pool.v[--nop_pool.n];
+ nop_pat = nop_pool.v[--nop_pool.n];
else
- nop = nop_pattern;
+ nop_pat = nop_pattern;
- nop = emit_insn_before (nop, insn);
+ nop = emit_insn_before (nop_pat, insn);
if (old_p)
flags = INSN_INIT_TODO_SSID;
@@ -1066,10 +1083,10 @@ return_nop_to_pool (insn_t nop, bool full_tidying)
sel_remove_insn (nop, false, full_tidying);
/* We'll recycle this nop. */
- INSN_DELETED_P (nop) = 0;
+ nop->set_undeleted ();
if (nop_pool.n == nop_pool.s)
- nop_pool.v = XRESIZEVEC (rtx, nop_pool.v,
+ nop_pool.v = XRESIZEVEC (rtx_insn *, nop_pool.v,
(nop_pool.s = 2 * nop_pool.s + 1));
nop_pool.v[nop_pool.n++] = nop;
}
@@ -1119,8 +1136,8 @@ skip_unspecs_callback (const_rtx *xx, const_rtx *yy, rtx *nx, rtx* ny)
to support ia64 speculation. When changes are needed, new rtx X and new mode
NMODE are written, and the callback returns true. */
static int
-hash_with_unspec_callback (const_rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
- rtx *nx, enum machine_mode* nmode)
+hash_with_unspec_callback (const_rtx x, machine_mode mode ATTRIBUTE_UNUSED,
+ rtx *nx, machine_mode* nmode)
{
if (GET_CODE (x) == UNSPEC
&& targetm.sched.skip_rtx_p
@@ -1141,10 +1158,10 @@ lhs_and_rhs_separable_p (rtx lhs, rtx rhs)
if (lhs == NULL || rhs == NULL)
return false;
- /* Do not schedule CONST, CONST_INT and CONST_DOUBLE etc as rhs: no point
- to use reg, if const can be used. Moreover, scheduling const as rhs may
- lead to mode mismatch cause consts don't have modes but they could be
- merged from branches where the same const used in different modes. */
+ /* Do not schedule constants as rhs: no point to use reg, if const
+ can be used. Moreover, scheduling const as rhs may lead to mode
+ mismatch cause consts don't have modes but they could be merged
+ from branches where the same const used in different modes. */
if (CONSTANT_P (rhs))
return false;
@@ -1246,7 +1263,7 @@ vinsn_create (insn_t insn, bool force_unique_p)
vinsn_t
vinsn_copy (vinsn_t vi, bool reattach_p)
{
- rtx copy;
+ rtx_insn *copy;
bool unique = VINSN_UNIQUE_P (vi);
vinsn_t new_vi;
@@ -1306,7 +1323,7 @@ vinsn_cond_branch_p (vinsn_t vi)
/* Return latency of INSN. */
static int
-sel_insn_rtx_cost (rtx insn)
+sel_insn_rtx_cost (rtx_insn *insn)
{
int cost;
@@ -1401,7 +1418,7 @@ sel_gen_insn_from_expr_after (expr_t expr, vinsn_t vinsn, int seqno,
/* The insn may come from the transformation cache, which may hold already
deleted insns, so mark it as not deleted. */
- INSN_DELETED_P (insn) = 0;
+ insn->set_undeleted ();
add_insn_after (insn, after, BLOCK_FOR_INSN (insn));
@@ -1423,11 +1440,11 @@ sel_move_insn (expr_t expr, int seqno, insn_t after)
/* Assert that in move_op we disconnected this insn properly. */
gcc_assert (EXPR_VINSN (INSN_EXPR (insn)) != NULL);
- PREV_INSN (insn) = after;
- NEXT_INSN (insn) = next;
+ SET_PREV_INSN (insn) = after;
+ SET_NEXT_INSN (insn) = next;
- NEXT_INSN (after) = insn;
- PREV_INSN (next) = insn;
+ SET_NEXT_INSN (after) = insn;
+ SET_PREV_INSN (next) = insn;
/* Update links from insn to bb and vice versa. */
df_insn_change_bb (insn, bb);
@@ -1962,7 +1979,7 @@ int
speculate_expr (expr_t expr, ds_t ds)
{
int res;
- rtx orig_insn_rtx;
+ rtx_insn *orig_insn_rtx;
rtx spec_pat;
ds_t target_ds, current_ds;
@@ -1983,7 +2000,8 @@ speculate_expr (expr_t expr, ds_t ds)
case 1:
{
- rtx spec_insn_rtx = create_insn_rtx_from_pattern (spec_pat, NULL_RTX);
+ rtx_insn *spec_insn_rtx =
+ create_insn_rtx_from_pattern (spec_pat, NULL_RTX);
vinsn_t spec_vinsn = create_vinsn_from_insn_rtx (spec_insn_rtx, false);
change_vinsn_in_expr (expr, spec_vinsn);
@@ -2613,8 +2631,7 @@ static void
maybe_downgrade_id_to_use (idata_t id, insn_t insn)
{
bool must_be_use = false;
- unsigned uid = INSN_UID (insn);
- df_ref *rec;
+ df_ref def;
rtx lhs = IDATA_LHS (id);
rtx rhs = IDATA_RHS (id);
@@ -2628,10 +2645,8 @@ maybe_downgrade_id_to_use (idata_t id, insn_t insn)
return;
}
- for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- df_ref def = *rec;
-
if (DF_REF_INSN (def)
&& DF_REF_FLAGS_IS_SET (def, DF_REF_PRE_POST_MODIFY)
&& loc_mentioned_in_p (DF_REF_LOC (def), IDATA_RHS (id)))
@@ -2659,13 +2674,12 @@ maybe_downgrade_id_to_use (idata_t id, insn_t insn)
static void
setup_id_reg_sets (idata_t id, insn_t insn)
{
- unsigned uid = INSN_UID (insn);
- df_ref *rec;
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ df_ref def, use;
regset tmp = get_clear_regset_from_pool ();
- for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *rec;
unsigned int regno = DF_REF_REGNO (def);
/* Post modifies are treated like clobbers by sched-deps.c. */
@@ -2689,9 +2703,8 @@ setup_id_reg_sets (idata_t id, insn_t insn)
bitmap_set_bit (tmp, regno);
}
- for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- df_ref use = *rec;
unsigned int regno = DF_REF_REGNO (use);
/* When these refs are met for the first time, skip them, as
@@ -2778,7 +2791,7 @@ struct sched_scan_info_def
/* This hook makes scheduler frontend to initialize its internal data
structures for the passed insn. */
- void (*init_insn) (rtx);
+ void (*init_insn) (insn_t);
};
/* A driver function to add a set of basic blocks (BBS) to the
@@ -2802,7 +2815,7 @@ sched_scan (const struct sched_scan_info_def *ssi, bb_vec_t bbs)
if (ssi->init_insn)
FOR_EACH_VEC_ELT (bbs, i, bb)
{
- rtx insn;
+ rtx_insn *insn;
FOR_BB_INSNS (bb, insn)
ssi->init_insn (insn);
@@ -2830,8 +2843,10 @@ hash_transformed_insns (const void *p)
static int
eq_transformed_insns (const void *p, const void *q)
{
- rtx i1 = VINSN_INSN_RTX (((const struct transformed_insns *) p)->vinsn_old);
- rtx i2 = VINSN_INSN_RTX (((const struct transformed_insns *) q)->vinsn_old);
+ rtx_insn *i1 =
+ VINSN_INSN_RTX (((const struct transformed_insns *) p)->vinsn_old);
+ rtx_insn *i2 =
+ VINSN_INSN_RTX (((const struct transformed_insns *) q)->vinsn_old);
if (INSN_UID (i1) == INSN_UID (i2))
return 1;
@@ -2939,7 +2954,7 @@ init_global_and_expr_for_insn (insn_t insn)
if (NOTE_INSN_BASIC_BLOCK_P (insn))
{
- init_global_data.prev_insn = NULL_RTX;
+ init_global_data.prev_insn = NULL;
return;
}
@@ -2956,7 +2971,7 @@ init_global_and_expr_for_insn (insn_t insn)
init_global_data.prev_insn = insn;
}
else
- init_global_data.prev_insn = NULL_RTX;
+ init_global_data.prev_insn = NULL;
if (GET_CODE (PATTERN (insn)) == ASM_INPUT
|| asm_noperands (PATTERN (insn)) >= 0)
@@ -3581,7 +3596,7 @@ sel_insn_is_speculation_check (rtx insn)
/* Extracts machine mode MODE and destination location DST_LOC
for given INSN. */
void
-get_dest_and_mode (rtx insn, rtx *dst_loc, enum machine_mode *mode)
+get_dest_and_mode (rtx insn, rtx *dst_loc, machine_mode *mode)
{
rtx pat = PATTERN (insn);
@@ -3947,8 +3962,8 @@ sel_remove_insn (insn_t insn, bool only_disconnect, bool full_tidying)
/* It is necessary to NULL these fields in case we are going to re-insert
INSN into the insns stream, as will usually happen in the ONLY_DISCONNECT
case, but also for NOPs that we will return to the nop pool. */
- PREV_INSN (insn) = NULL_RTX;
- NEXT_INSN (insn) = NULL_RTX;
+ SET_PREV_INSN (insn) = NULL_RTX;
+ SET_NEXT_INSN (insn) = NULL_RTX;
set_block_for_insn (insn, NULL);
return tidy_control_flow (bb, full_tidying);
@@ -3980,10 +3995,10 @@ sel_luid_for_non_insn (rtx x)
/* Find the proper seqno for inserting at INSN by successors.
Return -1 if no successors with positive seqno exist. */
static int
-get_seqno_by_succs (rtx insn)
+get_seqno_by_succs (rtx_insn *insn)
{
basic_block bb = BLOCK_FOR_INSN (insn);
- rtx tmp = insn, end = BB_END (bb);
+ rtx_insn *tmp = insn, *end = BB_END (bb);
int seqno;
insn_t succ = NULL;
succ_iterator si;
@@ -4082,10 +4097,10 @@ get_seqno_for_a_jump (insn_t insn, int old_seqno)
/* Find the proper seqno for inserting at INSN. Returns -1 if no predecessors
with positive seqno exist. */
int
-get_seqno_by_preds (rtx insn)
+get_seqno_by_preds (rtx_insn *insn)
{
basic_block bb = BLOCK_FOR_INSN (insn);
- rtx tmp = insn, head = BB_HEAD (bb);
+ rtx_insn *tmp = insn, *head = BB_HEAD (bb);
insn_t *preds;
int n, i, seqno;
@@ -4535,10 +4550,10 @@ static struct
/* Functions to work with control-flow graph. */
/* Return basic block note of BB. */
-insn_t
+rtx_insn *
sel_bb_head (basic_block bb)
{
- insn_t head;
+ rtx_insn *head;
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
{
@@ -4553,7 +4568,7 @@ sel_bb_head (basic_block bb)
head = next_nonnote_insn (note);
if (head && (BARRIER_P (head) || BLOCK_FOR_INSN (head) != bb))
- head = NULL_RTX;
+ head = NULL;
}
return head;
@@ -4567,11 +4582,11 @@ sel_bb_head_p (insn_t insn)
}
/* Return last insn of BB. */
-insn_t
+rtx_insn *
sel_bb_end (basic_block bb)
{
if (sel_bb_empty_p (bb))
- return NULL_RTX;
+ return NULL;
gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
@@ -4604,7 +4619,7 @@ in_current_region_p (basic_block bb)
/* Return the block which is a fallthru bb of a conditional jump JUMP. */
basic_block
-fallthru_bb_of_jump (rtx jump)
+fallthru_bb_of_jump (const rtx_insn *jump)
{
if (!JUMP_P (jump))
return NULL;
@@ -4660,7 +4675,7 @@ sel_restore_notes (void)
{
note_list = BB_NOTE_LIST (first);
restore_other_notes (NULL, first);
- BB_NOTE_LIST (first) = NULL_RTX;
+ BB_NOTE_LIST (first) = NULL;
FOR_BB_INSNS (first, insn)
if (NONDEBUG_INSN_P (insn))
@@ -4949,7 +4964,7 @@ recompute_rev_top_order (void)
void
clear_outdated_rtx_info (basic_block bb)
{
- rtx insn;
+ rtx_insn *insn;
FOR_BB_INSNS (bb, insn)
if (INSN_P (insn))
@@ -4981,17 +4996,17 @@ return_bb_to_pool (basic_block bb)
}
/* Get a bb_note from pool or return NULL_RTX if pool is empty. */
-static rtx
+static rtx_note *
get_bb_note_from_pool (void)
{
if (bb_note_pool.is_empty ())
- return NULL_RTX;
+ return NULL;
else
{
- rtx note = bb_note_pool.pop ();
+ rtx_note *note = bb_note_pool.pop ();
- PREV_INSN (note) = NULL_RTX;
- NEXT_INSN (note) = NULL_RTX;
+ SET_PREV_INSN (note) = NULL_RTX;
+ SET_NEXT_INSN (note) = NULL_RTX;
return note;
}
@@ -5269,7 +5284,7 @@ move_bb_info (basic_block merge_bb, basic_block empty_bb)
if (in_current_region_p (merge_bb))
concat_note_lists (BB_NOTE_LIST (empty_bb),
&BB_NOTE_LIST (merge_bb));
- BB_NOTE_LIST (empty_bb) = NULL_RTX;
+ BB_NOTE_LIST (empty_bb) = NULL;
}
@@ -5346,7 +5361,7 @@ static basic_block
sel_create_basic_block (void *headp, void *endp, basic_block after)
{
basic_block new_bb;
- insn_t new_bb_note;
+ rtx_note *new_bb_note;
gcc_assert (flag_sel_sched_pipelining_outer_loops
|| !last_added_blocks.exists ());
@@ -5357,7 +5372,8 @@ sel_create_basic_block (void *headp, void *endp, basic_block after)
new_bb = orig_cfg_hooks.create_basic_block (headp, endp, after);
else
{
- new_bb = create_basic_block_structure ((rtx) headp, (rtx) endp,
+ new_bb = create_basic_block_structure ((rtx_insn *) headp,
+ (rtx_insn *) endp,
new_bb_note, after);
new_bb->aux = NULL;
}
@@ -5439,10 +5455,10 @@ sel_split_block (basic_block bb, rtx after)
/* If BB ends with a jump insn whose ID is bigger then PREV_MAX_UID, return it.
Otherwise returns NULL. */
-static rtx
+static rtx_insn *
check_for_new_jump (basic_block bb, int prev_max_uid)
{
- rtx end;
+ rtx_insn *end;
end = sel_bb_end (bb);
if (end && INSN_UID (end) >= prev_max_uid)
@@ -5452,10 +5468,10 @@ check_for_new_jump (basic_block bb, int prev_max_uid)
/* Look for a new jump either in FROM_BB block or in newly created JUMP_BB block.
New means having UID at least equal to PREV_MAX_UID. */
-static rtx
+static rtx_insn *
find_new_jump (basic_block from, basic_block jump_bb, int prev_max_uid)
{
- rtx jump;
+ rtx_insn *jump;
/* Return immediately if no new insns were emitted. */
if (get_max_uid () == prev_max_uid)
@@ -5478,7 +5494,7 @@ sel_split_edge (edge e)
{
basic_block new_bb, src, other_bb = NULL;
int prev_max_uid;
- rtx jump;
+ rtx_insn *jump;
src = e->src;
prev_max_uid = get_max_uid ();
@@ -5541,7 +5557,7 @@ sel_create_recovery_block (insn_t orig_insn)
{
basic_block first_bb, second_bb, recovery_block;
basic_block before_recovery = NULL;
- rtx jump;
+ rtx_insn *jump;
first_bb = BLOCK_FOR_INSN (orig_insn);
if (sel_bb_end_p (orig_insn))
@@ -5592,7 +5608,7 @@ sel_redirect_edge_and_branch_force (edge e, basic_block to)
{
basic_block jump_bb, src, orig_dest = e->dest;
int prev_max_uid;
- rtx jump;
+ rtx_insn *jump;
int old_seqno = -1;
/* This function is now used only for bookkeeping code creation, where
@@ -5636,7 +5652,7 @@ sel_redirect_edge_and_branch (edge e, basic_block to)
bool latch_edge_p;
basic_block src, orig_dest = e->dest;
int prev_max_uid;
- rtx jump;
+ rtx_insn *jump;
edge redirected;
bool recompute_toporder_p = false;
bool maybe_unreachable = single_pred_p (orig_dest);
@@ -5724,10 +5740,10 @@ sel_unregister_cfg_hooks (void)
/* Emit an insn rtx based on PATTERN. If a jump insn is wanted,
LABEL is where this jump should be directed. */
-rtx
+rtx_insn *
create_insn_rtx_from_pattern (rtx pattern, rtx label)
{
- rtx insn_rtx;
+ rtx_insn *insn_rtx;
gcc_assert (!INSN_P (pattern));
@@ -5758,7 +5774,7 @@ create_insn_rtx_from_pattern (rtx pattern, rtx label)
/* Create a new vinsn for INSN_RTX. FORCE_UNIQUE_P is true when the vinsn
must not be clonable. */
vinsn_t
-create_vinsn_from_insn_rtx (rtx insn_rtx, bool force_unique_p)
+create_vinsn_from_insn_rtx (rtx_insn *insn_rtx, bool force_unique_p)
{
gcc_assert (INSN_P (insn_rtx) && !INSN_IN_STREAM_P (insn_rtx));
@@ -5767,10 +5783,11 @@ create_vinsn_from_insn_rtx (rtx insn_rtx, bool force_unique_p)
}
/* Create a copy of INSN_RTX. */
-rtx
+rtx_insn *
create_copy_of_insn_rtx (rtx insn_rtx)
{
- rtx res, link;
+ rtx_insn *res;
+ rtx link;
if (DEBUG_INSN_P (insn_rtx))
return create_insn_rtx_from_pattern (copy_rtx (PATTERN (insn_rtx)),
@@ -5858,7 +5875,7 @@ setup_nop_and_exit_insns (void)
void
free_nop_and_exit_insns (void)
{
- exit_insn = NULL_RTX;
+ exit_insn = NULL;
nop_pattern = NULL_RTX;
}
@@ -6186,7 +6203,7 @@ make_regions_from_the_rest (void)
FOR_EACH_BB_FN (bb, cfun)
{
- if (bb->loop_father && !bb->loop_father->num == 0
+ if (bb->loop_father && bb->loop_father->num != 0
&& !(bb->flags & BB_IRREDUCIBLE_LOOP))
loop_hdr[bb->index] = bb->loop_father->num;
}
@@ -6446,4 +6463,5 @@ sel_remove_loop_preheader (void)
SET_LOOP_PREHEADER_BLOCKS (loop_outer (current_loop_nest),
preheader_blocks);
}
+
#endif