summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTrevor Saunders <tsaunders@mozilla.com>2013-11-14 05:56:59 -0500
committerTrevor Saunders <tsaunders@mozilla.com>2014-02-18 22:44:34 -0500
commit1aa3f4163436423ea199a0a84e5096235a9edd5a (patch)
tree8e928b41b4a0eca855e351661834df8e9d0f71ce
parent70d26ff65e921d9a00d5b7b7ba8b4c693953527c (diff)
downloadgcc-1aa3f4163436423ea199a0a84e5096235a9edd5a.tar.gz
add ctor / dtor to bitmap_head
-rw-r--r--gcc/bb-reorder.c10
-rw-r--r--gcc/bitmap.c2
-rw-r--r--gcc/bitmap.h21
-rw-r--r--gcc/bt-load.c9
-rw-r--r--gcc/c/c-typeck.c8
-rw-r--r--gcc/cfganal.c9
-rw-r--r--gcc/cfgexpand.c16
-rw-r--r--gcc/cfgloop.c8
-rw-r--r--gcc/config/c6x/c6x.c4
-rw-r--r--gcc/cp/semantics.c4
-rw-r--r--gcc/dce.c82
-rw-r--r--gcc/df-core.c15
-rw-r--r--gcc/df-problems.c55
-rw-r--r--gcc/df-scan.c37
-rw-r--r--gcc/dominance.c8
-rw-r--r--gcc/function.c19
-rw-r--r--gcc/gcse.c12
-rw-r--r--gcc/gimple-fold.c8
-rw-r--r--gcc/init-regs.c10
-rw-r--r--gcc/ipa-inline.c24
-rw-r--r--gcc/ipa-reference.c15
-rw-r--r--gcc/ipa-split.c7
-rw-r--r--gcc/ira-color.c9
-rw-r--r--gcc/ira.c70
-rw-r--r--gcc/loop-invariant.c20
-rw-r--r--gcc/lower-subreg.c22
-rw-r--r--gcc/lra-assigns.c12
-rw-r--r--gcc/lra-constraints.c14
-rw-r--r--gcc/lra-eliminations.c8
-rw-r--r--gcc/predict.c30
-rw-r--r--gcc/recog.c14
-rw-r--r--gcc/reginfo.c7
-rw-r--r--gcc/regrename.c16
-rw-r--r--gcc/regstat.c21
-rw-r--r--gcc/sel-sched.c8
-rw-r--r--gcc/sese.c7
-rw-r--r--gcc/trans-mem.c42
-rw-r--r--gcc/tree-cfg.c20
-rw-r--r--gcc/tree-cfgcleanup.c16
-rw-r--r--gcc/tree-eh.c9
-rw-r--r--gcc/tree-if-conv.c13
-rw-r--r--gcc/tree-into-ssa.c55
-rw-r--r--gcc/tree-loop-distribution.c8
-rw-r--r--gcc/tree-object-size.c43
-rw-r--r--gcc/tree-predcom.c10
-rw-r--r--gcc/tree-scalar-evolution.c14
-rw-r--r--gcc/tree-sra.c17
-rw-r--r--gcc/tree-ssa-coalesce.c11
-rw-r--r--gcc/tree-ssa-dom.c18
-rw-r--r--gcc/tree-ssa-forwprop.c8
-rw-r--r--gcc/tree-ssa-live.c7
-rw-r--r--gcc/tree-ssa-loop-im.c16
-rw-r--r--gcc/tree-ssa-loop-ivcanon.c7
-rw-r--r--gcc/tree-ssa-loop-ivopts.c18
-rw-r--r--gcc/tree-ssa-loop-manip.c8
-rw-r--r--gcc/tree-ssa-loop-niter.c8
-rw-r--r--gcc/tree-ssa-pre.c8
-rw-r--r--gcc/tree-ssa-sink.c16
-rw-r--r--gcc/tree-ssa-strlen.c5
-rw-r--r--gcc/tree-ssa-tail-merge.c7
-rw-r--r--gcc/tree-ssa-threadupdate.c27
-rw-r--r--gcc/tree-ssa.c58
62 files changed, 435 insertions, 675 deletions
diff --git a/gcc/bb-reorder.c b/gcc/bb-reorder.c
index 3364d93fea2..5dc0c676239 100644
--- a/gcc/bb-reorder.c
+++ b/gcc/bb-reorder.c
@@ -2391,7 +2391,6 @@ static unsigned int
duplicate_computed_gotos (void)
{
basic_block bb, new_bb;
- bitmap candidates;
int max_size;
bool changed = false;
@@ -2409,7 +2408,7 @@ duplicate_computed_gotos (void)
max_size
= uncond_jump_length * PARAM_VALUE (PARAM_MAX_GOTO_DUPLICATION_INSNS);
- candidates = BITMAP_ALLOC (NULL);
+ bitmap_head candidates;
/* Look for blocks that end in a computed jump, and see if such blocks
are suitable for unfactoring. If a block is a candidate for unfactoring,
@@ -2453,11 +2452,11 @@ duplicate_computed_gotos (void)
if (all_flags & EDGE_COMPLEX)
continue;
- bitmap_set_bit (candidates, bb->index);
+ bitmap_set_bit (&candidates, bb->index);
}
/* Nothing to do if there is no computed jump here. */
- if (bitmap_empty_p (candidates))
+ if (bitmap_empty_p (&candidates))
goto done;
/* Duplicate computed gotos. */
@@ -2478,7 +2477,7 @@ duplicate_computed_gotos (void)
continue;
/* The successor block has to be a duplication candidate. */
- if (!bitmap_bit_p (candidates, single_succ (bb)->index))
+ if (!bitmap_bit_p (&candidates, single_succ (bb)->index))
continue;
/* Don't duplicate a partition crossing edge, which requires difficult
@@ -2501,7 +2500,6 @@ done:
fixup_partitions ();
cfg_layout_finalize ();
- BITMAP_FREE (candidates);
return 0;
}
diff --git a/gcc/bitmap.c b/gcc/bitmap.c
index 4855a6691ea..9d8ebc74558 100644
--- a/gcc/bitmap.c
+++ b/gcc/bitmap.c
@@ -310,6 +310,8 @@ bitmap_clear (bitmap head)
{
if (head->first)
bitmap_elt_clear_from (head, head->first);
+
+ head->first = NULL;
}
/* Initialize a bitmap obstack. If BIT_OBSTACK is NULL, initialize
diff --git a/gcc/bitmap.h b/gcc/bitmap.h
index 6fa25abdc77..fd27258655d 100644
--- a/gcc/bitmap.h
+++ b/gcc/bitmap.h
@@ -174,10 +174,23 @@ struct GTY((chain_next ("%h.next"), chain_prev ("%h.prev"))) bitmap_element {
BITMAP_WORD bits[BITMAP_ELEMENT_WORDS]; /* Bits that are set. */
};
+
+/* Global data */
+extern bitmap_element bitmap_zero_bits; /* Zero bitmap element */
+extern bitmap_obstack bitmap_default_obstack; /* Default bitmap obstack */
+
+/* Clear a bitmap by freeing up the linked list. */
+extern void bitmap_clear (bitmap);
+
+static void bitmap_initialize_stat (bitmap head, bitmap_obstack *obstack MEM_STAT_DECL);
+
/* Head of bitmap linked list. The 'current' member points to something
already pointed to by the chain started by first, so GTY((skip)) it. */
-
struct GTY(()) bitmap_head {
+ bitmap_head (bitmap_obstack *o = &bitmap_default_obstack MEM_STAT_DECL)
+ { bitmap_initialize_stat (this, o PASS_MEM_STAT); }
+ ~bitmap_head () { bitmap_clear (this); }
+
unsigned int indx; /* Index of last element looked at. */
unsigned int descriptor_id; /* Unique identifier for the allocation
site of this bitmap, for detailed
@@ -188,12 +201,6 @@ struct GTY(()) bitmap_head {
If NULL, then use GGC allocation. */
};
-/* Global data */
-extern bitmap_element bitmap_zero_bits; /* Zero bitmap element */
-extern bitmap_obstack bitmap_default_obstack; /* Default bitmap obstack */
-
-/* Clear a bitmap by freeing up the linked list. */
-extern void bitmap_clear (bitmap);
/* Copy a bitmap to another bitmap. */
extern void bitmap_copy (bitmap, const_bitmap);
diff --git a/gcc/bt-load.c b/gcc/bt-load.c
index 4fa68f11e98..26fdcb25457 100644
--- a/gcc/bt-load.c
+++ b/gcc/bt-load.c
@@ -1075,7 +1075,7 @@ combine_btr_defs (btr_def def, HARD_REG_SET *btrs_live_in_range)
target registers live over the merged range. */
int btr;
HARD_REG_SET combined_btrs_live;
- bitmap combined_live_range = BITMAP_ALLOC (NULL);
+ bitmap_head combined_live_range;
btr_user user;
if (other_def->live_range == NULL)
@@ -1084,10 +1084,10 @@ combine_btr_defs (btr_def def, HARD_REG_SET *btrs_live_in_range)
btr_def_live_range (other_def, &dummy_btrs_live_in_range);
}
COPY_HARD_REG_SET (combined_btrs_live, *btrs_live_in_range);
- bitmap_copy (combined_live_range, def->live_range);
+ bitmap_copy (&combined_live_range, def->live_range);
for (user = other_def->uses; user != NULL; user = user->next)
- augment_live_range (combined_live_range, &combined_btrs_live,
+ augment_live_range (&combined_live_range, &combined_btrs_live,
def->bb, user->bb,
(flag_btr_bb_exclusive
|| user->insn != BB_END (def->bb)
@@ -1124,7 +1124,7 @@ combine_btr_defs (btr_def def, HARD_REG_SET *btrs_live_in_range)
REGNO (user->use)));
clear_btr_from_live_range (other_def);
other_def->uses = NULL;
- bitmap_copy (def->live_range, combined_live_range);
+ bitmap_copy (def->live_range, &combined_live_range);
if (other_def->btr == btr && other_def->other_btr_uses_after_use)
def->other_btr_uses_after_use = 1;
COPY_HARD_REG_SET (*btrs_live_in_range, combined_btrs_live);
@@ -1133,7 +1133,6 @@ combine_btr_defs (btr_def def, HARD_REG_SET *btrs_live_in_range)
delete_insn (other_def->insn);
}
- BITMAP_FREE (combined_live_range);
}
}
}
diff --git a/gcc/c/c-typeck.c b/gcc/c/c-typeck.c
index da6a6fc9f98..d38d0e93cfd 100644
--- a/gcc/c/c-typeck.c
+++ b/gcc/c/c-typeck.c
@@ -11704,18 +11704,14 @@ c_find_omp_placeholder_r (tree *tp, int *, void *data)
tree
c_finish_omp_clauses (tree clauses)
{
- bitmap_head generic_head, firstprivate_head, lastprivate_head;
- bitmap_head aligned_head;
tree c, t, *pc = &clauses;
bool branch_seen = false;
bool copyprivate_seen = false;
tree *nowait_clause = NULL;
bitmap_obstack_initialize (NULL);
- bitmap_initialize (&generic_head, &bitmap_default_obstack);
- bitmap_initialize (&firstprivate_head, &bitmap_default_obstack);
- bitmap_initialize (&lastprivate_head, &bitmap_default_obstack);
- bitmap_initialize (&aligned_head, &bitmap_default_obstack);
+ bitmap_head generic_head, firstprivate_head, lastprivate_head;
+ bitmap_head aligned_head;
for (pc = &clauses, c = clauses; c ; c = *pc)
{
diff --git a/gcc/cfganal.c b/gcc/cfganal.c
index 4118857608a..478e543d069 100644
--- a/gcc/cfganal.c
+++ b/gcc/cfganal.c
@@ -725,16 +725,13 @@ post_order_compute (int *post_order, bool include_entry_exit,
basic_block
dfs_find_deadend (basic_block bb)
{
- bitmap visited = BITMAP_ALLOC (NULL);
+ bitmap_head visited;
for (;;)
{
if (EDGE_COUNT (bb->succs) == 0
- || ! bitmap_set_bit (visited, bb->index))
- {
- BITMAP_FREE (visited);
- return bb;
- }
+ || ! bitmap_set_bit (&visited, bb->index))
+ return bb;
bb = EDGE_SUCC (bb, 0)->dest;
}
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index 06d494c3d92..cbc915e9ee1 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -484,7 +484,7 @@ add_scope_conflicts (void)
{
basic_block bb;
bool changed;
- bitmap work = BITMAP_ALLOC (NULL);
+ bitmap_head work;
int *rpo;
int n_bbs;
@@ -514,17 +514,16 @@ add_scope_conflicts (void)
bitmap active;
bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
active = (bitmap)bb->aux;
- add_scope_conflicts_1 (bb, work, false);
- if (bitmap_ior_into (active, work))
+ add_scope_conflicts_1 (bb, &work, false);
+ if (bitmap_ior_into (active, &work))
changed = true;
}
}
FOR_EACH_BB_FN (bb, cfun)
- add_scope_conflicts_1 (bb, work, true);
+ add_scope_conflicts_1 (bb, &work, true);
free (rpo);
- BITMAP_FREE (work);
FOR_ALL_BB_FN (bb, cfun)
BITMAP_FREE (bb->aux);
}
@@ -684,7 +683,7 @@ update_alias_info_with_stack_vars (void)
{
unsigned i;
struct pointer_set_t *visited = pointer_set_create ();
- bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
+ bitmap_head temp;
for (i = 1; i < num_ssa_names; i++)
{
@@ -695,15 +694,14 @@ update_alias_info_with_stack_vars (void)
&& POINTER_TYPE_P (TREE_TYPE (name))
&& ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
- visited, temp);
+ visited, &temp);
}
add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
- decls_to_partitions, visited, temp);
+ decls_to_partitions, visited, &temp);
pointer_set_destroy (visited);
pointer_map_destroy (decls_to_partitions);
- BITMAP_FREE (temp);
}
}
diff --git a/gcc/cfgloop.c b/gcc/cfgloop.c
index 70744d83d19..279a6c9d5de 100644
--- a/gcc/cfgloop.c
+++ b/gcc/cfgloop.c
@@ -917,7 +917,6 @@ get_loop_body_in_bfs_order (const struct loop *loop)
{
basic_block *blocks;
basic_block bb;
- bitmap visited;
unsigned int i = 0;
unsigned int vc = 1;
@@ -925,7 +924,7 @@ get_loop_body_in_bfs_order (const struct loop *loop)
gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
blocks = XNEWVEC (basic_block, loop->num_nodes);
- visited = BITMAP_ALLOC (NULL);
+ bitmap_head visited;
bb = loop->header;
while (i < loop->num_nodes)
@@ -933,7 +932,7 @@ get_loop_body_in_bfs_order (const struct loop *loop)
edge e;
edge_iterator ei;
- if (bitmap_set_bit (visited, bb->index))
+ if (bitmap_set_bit (&visited, bb->index))
/* This basic block is now visited */
blocks[i++] = bb;
@@ -941,7 +940,7 @@ get_loop_body_in_bfs_order (const struct loop *loop)
{
if (flow_bb_inside_loop_p (loop, e->dest))
{
- if (bitmap_set_bit (visited, e->dest->index))
+ if (bitmap_set_bit (&visited, e->dest->index))
blocks[i++] = e->dest;
}
}
@@ -951,7 +950,6 @@ get_loop_body_in_bfs_order (const struct loop *loop)
bb = blocks[vc++];
}
- BITMAP_FREE (visited);
return blocks;
}
diff --git a/gcc/config/c6x/c6x.c b/gcc/config/c6x/c6x.c
index 9ba10df73c6..68beef9bd50 100644
--- a/gcc/config/c6x/c6x.c
+++ b/gcc/config/c6x/c6x.c
@@ -3524,15 +3524,13 @@ reshuffle_units (basic_block loop)
unit_req_table reqs;
edge e;
edge_iterator ei;
- bitmap_head bbs;
count_unit_reqs (reqs, head, PREV_INSN (tail));
merge_unit_reqs (reqs);
regrename_init (true);
- bitmap_initialize (&bbs, &bitmap_default_obstack);
-
+ bitmap_head bbs;
FOR_EACH_EDGE (e, ei, loop->preds)
bitmap_set_bit (&bbs, e->src->index);
diff --git a/gcc/cp/semantics.c b/gcc/cp/semantics.c
index eb1c44ee1f3..f8258ec271c 100644
--- a/gcc/cp/semantics.c
+++ b/gcc/cp/semantics.c
@@ -5220,10 +5220,6 @@ finish_omp_clauses (tree clauses)
bool copyprivate_seen = false;
bitmap_obstack_initialize (NULL);
- bitmap_initialize (&generic_head, &bitmap_default_obstack);
- bitmap_initialize (&firstprivate_head, &bitmap_default_obstack);
- bitmap_initialize (&lastprivate_head, &bitmap_default_obstack);
- bitmap_initialize (&aligned_head, &bitmap_default_obstack);
for (pc = &clauses, c = clauses; c ; c = *pc)
{
diff --git a/gcc/dce.c b/gcc/dce.c
index 07592f46a0e..a470946f790 100644
--- a/gcc/dce.c
+++ b/gcc/dce.c
@@ -263,7 +263,6 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
rtx p, insn, prev_insn;
bool ret;
HOST_WIDE_INT min_sp_off, max_sp_off;
- bitmap sp_bytes;
gcc_assert (CALL_P (call_insn));
if (!ACCUMULATE_OUTGOING_ARGS)
@@ -343,7 +342,7 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
if (min_sp_off >= max_sp_off)
return true;
- sp_bytes = BITMAP_ALLOC (NULL);
+ bitmap_head sp_bytes;
/* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
which contain arguments. Checking has been done in the previous
@@ -381,7 +380,7 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
}
for (byte = off; byte < off + MEM_SIZE (mem); byte++)
{
- if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
+ if (!bitmap_set_bit (&sp_bytes, byte - min_sp_off))
gcc_unreachable ();
}
}
@@ -464,7 +463,7 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
if (GET_MODE_SIZE (GET_MODE (mem)) == 0
|| !check_argument_store (mem, off, min_sp_off,
- max_sp_off, sp_bytes))
+ max_sp_off, &sp_bytes))
break;
if (!deletable_insn_p (insn, fast, NULL))
@@ -475,14 +474,13 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
else
bitmap_set_bit (arg_stores, INSN_UID (insn));
- if (bitmap_empty_p (sp_bytes))
+ if (bitmap_empty_p (&sp_bytes))
{
ret = true;
break;
}
}
- BITMAP_FREE (sp_bytes);
if (!ret && arg_stores)
bitmap_clear (arg_stores);
@@ -839,7 +837,7 @@ static bool
word_dce_process_block (basic_block bb, bool redo_out,
struct dead_debug_global *global_debug)
{
- bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
+ bitmap_head local_live (&dce_tmp_bitmap_obstack);
rtx insn;
bool block_changed;
struct dead_debug_local debug;
@@ -863,7 +861,7 @@ word_dce_process_block (basic_block bb, bool redo_out,
df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
}
- bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
+ bitmap_copy (&local_live, DF_WORD_LR_OUT (bb));
dead_debug_local_init (&debug, NULL, global_debug);
FOR_BB_INSNS_REVERSE (bb, insn)
@@ -874,8 +872,8 @@ word_dce_process_block (basic_block bb, bool redo_out,
if (DF_REF_REGNO (*use_rec) >= FIRST_PSEUDO_REGISTER
&& (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (*use_rec)))
== 2 * UNITS_PER_WORD)
- && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec))
- && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec) + 1))
+ && !bitmap_bit_p (&local_live, 2 * DF_REF_REGNO (*use_rec))
+ && !bitmap_bit_p (&local_live, 2 * DF_REF_REGNO (*use_rec) + 1))
dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
}
else if (INSN_P (insn))
@@ -884,14 +882,14 @@ word_dce_process_block (basic_block bb, bool redo_out,
/* No matter if the instruction is needed or not, we remove
any regno in the defs from the live set. */
- any_changed = df_word_lr_simulate_defs (insn, local_live);
+ any_changed = df_word_lr_simulate_defs (insn, &local_live);
if (any_changed)
mark_insn (insn, true);
/* On the other hand, we do not allow the dead uses to set
anything in local_live. */
if (marked_insn_p (insn))
- df_word_lr_simulate_uses (insn, local_live);
+ df_word_lr_simulate_uses (insn, &local_live);
/* Insert debug temps for dead REGs used in subsequent debug
insns. We may have to emit a debug temp even if the insn
@@ -913,16 +911,15 @@ word_dce_process_block (basic_block bb, bool redo_out,
{
fprintf (dump_file, "finished processing insn %d live out = ",
INSN_UID (insn));
- df_print_word_regset (dump_file, local_live);
+ df_print_word_regset (dump_file, &local_live);
}
}
- block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
+ block_changed = !bitmap_equal_p (&local_live, DF_WORD_LR_IN (bb));
if (block_changed)
- bitmap_copy (DF_WORD_LR_IN (bb), local_live);
+ bitmap_copy (DF_WORD_LR_IN (bb), &local_live);
dead_debug_local_finish (&debug, NULL);
- BITMAP_FREE (local_live);
return block_changed;
}
@@ -937,7 +934,7 @@ static bool
dce_process_block (basic_block bb, bool redo_out, bitmap au,
struct dead_debug_global *global_debug)
{
- bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
+ bitmap_head local_live (&dce_tmp_bitmap_obstack);
rtx insn;
bool block_changed;
df_ref *def_rec;
@@ -962,9 +959,9 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
df_print_regset (dump_file, DF_LR_OUT (bb));
}
- bitmap_copy (local_live, DF_LR_OUT (bb));
+ bitmap_copy (&local_live, DF_LR_OUT (bb));
- df_simulate_initialize_backwards (bb, local_live);
+ df_simulate_initialize_backwards (bb, &local_live);
dead_debug_local_init (&debug, NULL, global_debug);
FOR_BB_INSNS_REVERSE (bb, insn)
@@ -972,7 +969,7 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
{
df_ref *use_rec;
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
- if (!bitmap_bit_p (local_live, DF_REF_REGNO (*use_rec))
+ if (!bitmap_bit_p (&local_live, DF_REF_REGNO (*use_rec))
&& !bitmap_bit_p (au, DF_REF_REGNO (*use_rec)))
dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
}
@@ -983,7 +980,7 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
/* The insn is needed if there is someone who uses the output. */
if (!needed)
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
+ if (bitmap_bit_p (&local_live, DF_REF_REGNO (*def_rec))
|| bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
{
needed = true;
@@ -993,12 +990,12 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
/* No matter if the instruction is needed or not, we remove
any regno in the defs from the live set. */
- df_simulate_defs (insn, local_live);
+ df_simulate_defs (insn, &local_live);
/* On the other hand, we do not allow the dead uses to set
anything in local_live. */
if (needed)
- df_simulate_uses (insn, local_live);
+ df_simulate_uses (insn, &local_live);
/* Insert debug temps for dead REGs used in subsequent debug
insns. We may have to emit a debug temp even if the insn
@@ -1013,13 +1010,12 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
}
dead_debug_local_finish (&debug, NULL);
- df_simulate_finalize_backwards (bb, local_live);
+ df_simulate_finalize_backwards (bb, &local_live);
- block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
+ block_changed = !bitmap_equal_p (&local_live, DF_LR_IN (bb));
if (block_changed)
- bitmap_copy (DF_LR_IN (bb), local_live);
+ bitmap_copy (DF_LR_IN (bb), &local_live);
- BITMAP_FREE (local_live);
return block_changed;
}
@@ -1034,11 +1030,11 @@ fast_dce (bool word_level)
int *postorder = df_get_postorder (DF_BACKWARD);
int n_blocks = df_get_n_blocks (DF_BACKWARD);
/* The set of blocks that have been seen on this iteration. */
- bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
+ bitmap_head processed (&dce_blocks_bitmap_obstack);
/* The set of blocks that need to have the out vectors reset because
the in of one of their successors has changed. */
- bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
- bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
+ bitmap_head redo_out (&dce_blocks_bitmap_obstack);
+ bitmap_head all_blocks (&dce_blocks_bitmap_obstack);
bool global_changed = true;
/* These regs are considered always live so if they end up dying
@@ -1054,7 +1050,7 @@ fast_dce (bool word_level)
prescan_insns_for_dce (true);
for (i = 0; i < n_blocks; i++)
- bitmap_set_bit (all_blocks, postorder[i]);
+ bitmap_set_bit (&all_blocks, postorder[i]);
dead_debug_global_init (&global_debug, NULL);
@@ -1070,34 +1066,34 @@ fast_dce (bool word_level)
if (index < NUM_FIXED_BLOCKS)
{
- bitmap_set_bit (processed, index);
+ bitmap_set_bit (&processed, index);
continue;
}
if (word_level)
local_changed
- = word_dce_process_block (bb, bitmap_bit_p (redo_out, index),
+ = word_dce_process_block (bb, bitmap_bit_p (&redo_out, index),
&global_debug);
else
local_changed
- = dce_process_block (bb, bitmap_bit_p (redo_out, index),
+ = dce_process_block (bb, bitmap_bit_p (&redo_out, index),
bb_has_eh_pred (bb) ? au_eh : au,
&global_debug);
- bitmap_set_bit (processed, index);
+ bitmap_set_bit (&processed, index);
if (local_changed)
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds)
- if (bitmap_bit_p (processed, e->src->index))
+ if (bitmap_bit_p (&processed, e->src->index))
/* Be tricky about when we need to iterate the
analysis. We only have redo the analysis if the
bitmaps change at the top of a block that is the
entry to a loop. */
global_changed = true;
else
- bitmap_set_bit (redo_out, e->src->index);
+ bitmap_set_bit (&redo_out, e->src->index);
}
}
@@ -1111,17 +1107,17 @@ fast_dce (bool word_level)
the cheap. */
delete_unmarked_insns ();
bitmap_clear (marked);
- bitmap_clear (processed);
- bitmap_clear (redo_out);
+ bitmap_clear (&processed);
+ bitmap_clear (&redo_out);
/* We do not need to rescan any instructions. We only need
to redo the dataflow equations for the blocks that had a
change at the top of the block. Then we need to redo the
iteration. */
if (word_level)
- df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
+ df_analyze_problem (df_word_lr, &all_blocks, postorder, n_blocks);
else
- df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
+ df_analyze_problem (df_lr, &all_blocks, postorder, n_blocks);
if (old_flag & DF_LR_RUN_DCE)
df_set_flags (DF_LR_RUN_DCE);
@@ -1133,10 +1129,6 @@ fast_dce (bool word_level)
dead_debug_global_finish (&global_debug, NULL);
delete_unmarked_insns ();
-
- BITMAP_FREE (processed);
- BITMAP_FREE (redo_out);
- BITMAP_FREE (all_blocks);
}
diff --git a/gcc/df-core.c b/gcc/df-core.c
index edb3b25727a..46049917a65 100644
--- a/gcc/df-core.c
+++ b/gcc/df-core.c
@@ -506,8 +506,7 @@ df_set_blocks (bitmap blocks)
/* This block is called to change the focus from one subset
to another. */
int p;
- bitmap_head diff;
- bitmap_initialize (&diff, &df_bitmap_obstack);
+ bitmap_head diff (&df_bitmap_obstack);
bitmap_and_compl (&diff, df->blocks_to_analyze, blocks);
for (p = 0; p < df->num_problems_defined; p++)
{
@@ -531,14 +530,12 @@ df_set_blocks (bitmap blocks)
}
}
}
-
- bitmap_clear (&diff);
}
else
{
/* This block of code is executed to change the focus from
the entire function to a subset. */
- bitmap_head blocks_to_reset;
+ bitmap_head blocks_to_reset (&df_bitmap_obstack);
bool initialized = false;
int p;
for (p = 0; p < df->num_problems_defined; p++)
@@ -549,7 +546,6 @@ df_set_blocks (bitmap blocks)
if (!initialized)
{
basic_block bb;
- bitmap_initialize (&blocks_to_reset, &df_bitmap_obstack);
FOR_ALL_BB_FN (bb, cfun)
{
bitmap_set_bit (&blocks_to_reset, bb->index);
@@ -558,8 +554,6 @@ df_set_blocks (bitmap blocks)
dflow->problem->reset_fun (&blocks_to_reset);
}
}
- if (initialized)
- bitmap_clear (&blocks_to_reset);
df->blocks_to_analyze = BITMAP_ALLOC (&df_bitmap_obstack);
}
@@ -1687,9 +1681,8 @@ df_compact_blocks (void)
int i, p;
basic_block bb;
void *problem_temps;
- bitmap_head tmp;
+ bitmap_head tmp (&df_bitmap_obstack);
- bitmap_initialize (&tmp, &df_bitmap_obstack);
for (p = 0; p < df->num_problems_defined; p++)
{
struct dataflow *dflow = df->problems_in_order[p];
@@ -1761,8 +1754,6 @@ df_compact_blocks (void)
}
}
- bitmap_clear (&tmp);
-
i = NUM_FIXED_BLOCKS;
FOR_EACH_BB_FN (bb, cfun)
{
diff --git a/gcc/df-problems.c b/gcc/df-problems.c
index 1c06d61360a..7be610a9ac5 100644
--- a/gcc/df-problems.c
+++ b/gcc/df-problems.c
@@ -476,9 +476,8 @@ df_rd_confluence_n (edge e)
bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
bitmap_iterator bi;
unsigned int regno;
- bitmap_head tmp;
+ bitmap_head tmp (&df_bitmap_obstack);
- bitmap_initialize (&tmp, &df_bitmap_obstack);
bitmap_copy (&tmp, op2);
bitmap_and_compl_into (&tmp, dense_invalidated);
@@ -489,7 +488,6 @@ df_rd_confluence_n (edge e)
DF_DEFS_COUNT (regno));
}
changed |= bitmap_ior_into (op1, &tmp);
- bitmap_clear (&tmp);
return changed;
}
else
@@ -517,12 +515,11 @@ df_rd_transfer_function (int bb_index)
else
{
struct df_rd_problem_data *problem_data;
- bitmap_head tmp;
/* Note that TMP is _not_ a temporary bitmap if we end up replacing
OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
- bitmap_initialize (&tmp, &problem_data->rd_bitmaps);
+ bitmap_head tmp (&problem_data->rd_bitmaps);
bitmap_copy (&tmp, in);
EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
@@ -538,6 +535,8 @@ df_rd_transfer_function (int bb_index)
{
bitmap_clear (out);
bb_info->out = tmp;
+ // kind of hacky but hopefully that'll be fixed by more c++ification
+ tmp.first = tmp.current = NULL;
}
else
bitmap_clear (&tmp);
@@ -620,14 +619,13 @@ df_rd_start_dump (FILE *file)
static void
df_rd_dump_defs_set (bitmap defs_set, const char *prefix, FILE *file)
{
- bitmap_head tmp;
+ bitmap_head tmp (&df_bitmap_obstack);
unsigned int regno;
unsigned int m = DF_REG_SIZE (df);
bool first_reg = true;
fprintf (file, "%s\t(%d) ", prefix, (int) bitmap_count_bits (defs_set));
- bitmap_initialize (&tmp, &df_bitmap_obstack);
for (regno = 0; regno < m; regno++)
{
if (HARD_REGISTER_NUM_P (regno)
@@ -657,7 +655,6 @@ df_rd_dump_defs_set (bitmap defs_set, const char *prefix, FILE *file)
}
fprintf (file, "\n");
- bitmap_clear (&tmp);
}
/* Debugging info at top of bb. */
@@ -1283,17 +1280,13 @@ void
df_lr_verify_transfer_functions (void)
{
basic_block bb;
- bitmap_head saved_def;
- bitmap_head saved_use;
- bitmap_head all_blocks;
if (!df)
return;
- bitmap_initialize (&saved_def, &bitmap_default_obstack);
- bitmap_initialize (&saved_use, &bitmap_default_obstack);
- bitmap_initialize (&all_blocks, &bitmap_default_obstack);
-
+ bitmap_head saved_def;
+ bitmap_head saved_use;
+ bitmap_head all_blocks;
FOR_ALL_BB_FN (bb, cfun)
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
@@ -1333,10 +1326,6 @@ df_lr_verify_transfer_functions (void)
/* Make sure there are no dirty bits in blocks that have been deleted. */
gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
&all_blocks));
-
- bitmap_clear (&saved_def);
- bitmap_clear (&saved_use);
- bitmap_clear (&all_blocks);
}
@@ -1827,17 +1816,13 @@ void
df_live_verify_transfer_functions (void)
{
basic_block bb;
- bitmap_head saved_gen;
- bitmap_head saved_kill;
- bitmap_head all_blocks;
if (!df)
return;
- bitmap_initialize (&saved_gen, &bitmap_default_obstack);
- bitmap_initialize (&saved_kill, &bitmap_default_obstack);
- bitmap_initialize (&all_blocks, &bitmap_default_obstack);
-
+ bitmap_head saved_gen;
+ bitmap_head saved_kill;
+ bitmap_head all_blocks;
df_grow_insn_info ();
FOR_ALL_BB_FN (bb, cfun)
@@ -1879,9 +1864,6 @@ df_live_verify_transfer_functions (void)
/* Make sure there are no dirty bits in blocks that have been deleted. */
gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
&all_blocks));
- bitmap_clear (&saved_gen);
- bitmap_clear (&saved_kill);
- bitmap_clear (&all_blocks);
}
/*----------------------------------------------------------------------------
@@ -2110,7 +2092,6 @@ df_chain_create_bb (unsigned int bb_index)
rtx insn;
bitmap_head cpy;
- bitmap_initialize (&cpy, &bitmap_default_obstack);
bitmap_copy (&cpy, &bb_info->in);
bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
@@ -2152,8 +2133,6 @@ df_chain_create_bb (unsigned int bb_index)
df_chain_create_bb_process_use (&cpy,
df_get_artificial_uses (bb->index),
0);
-
- bitmap_clear (&cpy);
}
/* Create def-use chains from reaching use bitmaps for basic blocks
@@ -3378,12 +3357,7 @@ df_note_compute (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
- bitmap_head live, do_not_gen, artificial_uses;
-
- bitmap_initialize (&live, &df_bitmap_obstack);
- bitmap_initialize (&do_not_gen, &df_bitmap_obstack);
- bitmap_initialize (&artificial_uses, &df_bitmap_obstack);
-
+ bitmap_head live (&df_bitmap_obstack), do_not_gen (&df_bitmap_obstack), artificial_uses (&df_bitmap_obstack);
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
/* ??? Unlike fast DCE, we don't use global_debug for uses of dead
@@ -3393,10 +3367,6 @@ df_note_compute (bitmap all_blocks)
point before visiting a subsequent debug use. */
df_note_bb_compute (bb_index, &live, &do_not_gen, &artificial_uses);
}
-
- bitmap_clear (&live);
- bitmap_clear (&do_not_gen);
- bitmap_clear (&artificial_uses);
}
@@ -4429,6 +4399,7 @@ df_md_free (void)
struct df_md_problem_data *problem_data
= (struct df_md_problem_data *) df_md->problem_data;
+ bitmap_clear(&df_md_scratch);
bitmap_obstack_release (&problem_data->md_bitmaps);
free (problem_data);
df_md->problem_data = NULL;
diff --git a/gcc/df-scan.c b/gcc/df-scan.c
index 0b5b4374b7f..b89672eec31 100644
--- a/gcc/df-scan.c
+++ b/gcc/df-scan.c
@@ -1386,9 +1386,7 @@ df_insn_rescan_all (void)
basic_block bb;
bitmap_iterator bi;
unsigned int uid;
- bitmap_head tmp;
-
- bitmap_initialize (&tmp, &df_bitmap_obstack);
+ bitmap_head tmp (&df_bitmap_obstack);
if (df->changeable_flags & DF_NO_INSN_RESCAN)
{
@@ -1410,7 +1408,6 @@ df_insn_rescan_all (void)
df_insn_info_delete (uid);
}
- bitmap_clear (&tmp);
bitmap_clear (&df->insns_to_delete);
bitmap_clear (&df->insns_to_rescan);
bitmap_clear (&df->insns_to_notes_rescan);
@@ -1440,9 +1437,7 @@ df_process_deferred_rescans (void)
bool defer_insn_rescan = false;
bitmap_iterator bi;
unsigned int uid;
- bitmap_head tmp;
-
- bitmap_initialize (&tmp, &df_bitmap_obstack);
+ bitmap_head tmp (&df_bitmap_obstack);
if (df->changeable_flags & DF_NO_INSN_RESCAN)
{
@@ -1486,7 +1481,6 @@ df_process_deferred_rescans (void)
if (dump_file)
fprintf (dump_file, "ending the processing of deferred insns\n");
- bitmap_clear (&tmp);
bitmap_clear (&df->insns_to_delete);
bitmap_clear (&df->insns_to_rescan);
bitmap_clear (&df->insns_to_notes_rescan);
@@ -3904,10 +3898,9 @@ df_record_entry_block_defs (bitmap entry_block_defs)
void
df_update_entry_block_defs (void)
{
- bitmap_head refs;
+ bitmap_head refs (&df_bitmap_obstack);
bool changed = false;
- bitmap_initialize (&refs, &df_bitmap_obstack);
df_get_entry_block_def_set (&refs);
if (df->entry_block_defs)
{
@@ -3935,7 +3928,6 @@ df_update_entry_block_defs (void)
bitmap_copy (df->entry_block_defs, &refs);
df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
}
- bitmap_clear (&refs);
}
@@ -4077,10 +4069,9 @@ df_record_exit_block_uses (bitmap exit_block_uses)
void
df_update_exit_block_uses (void)
{
- bitmap_head refs;
+ bitmap_head refs (&df_bitmap_obstack);
bool changed = false;
- bitmap_initialize (&refs, &df_bitmap_obstack);
df_get_exit_block_use_set (&refs);
if (df->exit_block_uses)
{
@@ -4108,7 +4099,6 @@ df_update_exit_block_uses (void)
bitmap_copy (df->exit_block_uses,& refs);
df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
}
- bitmap_clear (&refs);
}
static bool initialized = false;
@@ -4458,10 +4448,9 @@ df_bb_verify (basic_block bb)
static bool
df_entry_block_bitmap_verify (bool abort_if_fail)
{
- bitmap_head entry_block_defs;
+ bitmap_head entry_block_defs (&df_bitmap_obstack);
bool is_eq;
- bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
df_get_entry_block_def_set (&entry_block_defs);
is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
@@ -4475,8 +4464,6 @@ df_entry_block_bitmap_verify (bool abort_if_fail)
gcc_assert (0);
}
- bitmap_clear (&entry_block_defs);
-
return is_eq;
}
@@ -4487,10 +4474,9 @@ df_entry_block_bitmap_verify (bool abort_if_fail)
static bool
df_exit_block_bitmap_verify (bool abort_if_fail)
{
- bitmap_head exit_block_uses;
+ bitmap_head exit_block_uses (&df_bitmap_obstack);
bool is_eq;
- bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
df_get_exit_block_use_set (&exit_block_uses);
is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
@@ -4504,8 +4490,6 @@ df_exit_block_bitmap_verify (bool abort_if_fail)
gcc_assert (0);
}
- bitmap_clear (&exit_block_uses);
-
return is_eq;
}
@@ -4518,8 +4502,6 @@ df_scan_verify (void)
{
unsigned int i;
basic_block bb;
- bitmap_head regular_block_artificial_uses;
- bitmap_head eh_block_artificial_uses;
if (!df)
return;
@@ -4540,8 +4522,8 @@ df_scan_verify (void)
/* (2) There are various bitmaps whose value may change over the
course of the compilation. This step recomputes them to make
sure that they have not slipped out of date. */
- bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
- bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
+ bitmap_head regular_block_artificial_uses (&df_bitmap_obstack);
+ bitmap_head eh_block_artificial_uses (&df_bitmap_obstack);
df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
@@ -4555,9 +4537,6 @@ df_scan_verify (void)
gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
&df->eh_block_artificial_uses));
- bitmap_clear (&regular_block_artificial_uses);
- bitmap_clear (&eh_block_artificial_uses);
-
/* Verify entry block and exit block. These only verify the bitmaps,
the refs are verified in df_bb_verify. */
df_entry_block_bitmap_verify (true);
diff --git a/gcc/dominance.c b/gcc/dominance.c
index ff0dfe6714f..7d634f114d4 100644
--- a/gcc/dominance.c
+++ b/gcc/dominance.c
@@ -1164,7 +1164,6 @@ static void
determine_dominators_for_sons (struct graph *g, vec<basic_block> bbs,
int y, int *son, int *brother)
{
- bitmap gprime;
int i, a, nc;
vec<int> *sccs;
basic_block bb, dom, ybb;
@@ -1189,12 +1188,11 @@ determine_dominators_for_sons (struct graph *g, vec<basic_block> bbs,
return;
}
- gprime = BITMAP_ALLOC (NULL);
+ bitmap_head gprime;
for (a = son[y]; a != -1; a = brother[a])
- bitmap_set_bit (gprime, a);
+ bitmap_set_bit (&gprime, a);
- nc = graphds_scc (g, gprime);
- BITMAP_FREE (gprime);
+ nc = graphds_scc (g, &gprime);
/* ??? Needed to work around the pre-processor confusion with
using a multi-argument template type as macro argument. */
diff --git a/gcc/function.c b/gcc/function.c
index b43e67f65ef..7346329e799 100644
--- a/gcc/function.c
+++ b/gcc/function.c
@@ -5884,7 +5884,6 @@ thread_prologue_and_epilogue_insns (void)
#ifdef HAVE_simple_return
vec<edge> unconverted_simple_returns = vNULL;
bool nonempty_prologue;
- bitmap_head bb_flags;
unsigned max_grow_size;
#endif
rtx returnjump;
@@ -5959,7 +5958,7 @@ thread_prologue_and_epilogue_insns (void)
#endif
#ifdef HAVE_simple_return
- bitmap_initialize (&bb_flags, &bitmap_default_obstack);
+ bitmap_head bb_flags;
/* Try to perform a kind of shrink-wrapping, making sure the
prologue/epilogue is emitted only around those parts of the
@@ -5982,9 +5981,6 @@ thread_prologue_and_epilogue_insns (void)
rtx p_insn;
vec<basic_block> vec;
basic_block bb;
- bitmap_head bb_antic_flags;
- bitmap_head bb_on_list;
- bitmap_head bb_tail;
if (dump_file)
fprintf (dump_file, "Attempting shrink-wrapping optimization.\n");
@@ -6009,9 +6005,9 @@ thread_prologue_and_epilogue_insns (void)
prepare_shrink_wrap (entry_edge->dest);
- bitmap_initialize (&bb_antic_flags, &bitmap_default_obstack);
- bitmap_initialize (&bb_on_list, &bitmap_default_obstack);
- bitmap_initialize (&bb_tail, &bitmap_default_obstack);
+ bitmap_head bb_antic_flags;
+ bitmap_head bb_on_list;
+ bitmap_head bb_tail;
/* Find the set of basic blocks that require a stack frame,
and blocks that are too big to be duplicated. */
@@ -6304,9 +6300,6 @@ thread_prologue_and_epilogue_insns (void)
}
fail_shrinkwrap:
- bitmap_clear (&bb_tail);
- bitmap_clear (&bb_antic_flags);
- bitmap_clear (&bb_on_list);
vec.release ();
}
#endif
@@ -6678,10 +6671,6 @@ epilogue_done:
}
#endif
-#ifdef HAVE_simple_return
- bitmap_clear (&bb_flags);
-#endif
-
/* Threading the prologue and epilogue changes the artificial refs
in the entry and exit blocks. */
epilogue_completed = 1;
diff --git a/gcc/gcse.c b/gcc/gcse.c
index 2bb0b5fec1e..700e497388a 100644
--- a/gcc/gcse.c
+++ b/gcc/gcse.c
@@ -3509,22 +3509,21 @@ calculate_bb_reg_pressure (void)
unsigned int j;
rtx insn;
basic_block bb;
- bitmap curr_regs_live;
bitmap_iterator bi;
ira_setup_eliminable_regset ();
- curr_regs_live = BITMAP_ALLOC (&reg_obstack);
+ bitmap_head curr_regs_live (&reg_obstack);
FOR_EACH_BB_FN (bb, cfun)
{
curr_bb = bb;
BB_DATA (bb)->live_in = BITMAP_ALLOC (NULL);
BB_DATA (bb)->backup = BITMAP_ALLOC (NULL);
bitmap_copy (BB_DATA (bb)->live_in, df_get_live_in (bb));
- bitmap_copy (curr_regs_live, df_get_live_out (bb));
+ bitmap_copy (&curr_regs_live, df_get_live_out (bb));
for (i = 0; i < ira_pressure_classes_num; i++)
curr_reg_pressure[ira_pressure_classes[i]] = 0;
- EXECUTE_IF_SET_IN_BITMAP (curr_regs_live, 0, j, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&curr_regs_live, 0, j, bi)
change_pressure (j, true);
FOR_BB_INSNS_REVERSE (bb, insn)
@@ -3544,7 +3543,7 @@ calculate_bb_reg_pressure (void)
if (!(DF_REF_FLAGS (*def_rec)
& (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
{
- if (bitmap_clear_bit (curr_regs_live, regno))
+ if (bitmap_clear_bit (&curr_regs_live, regno))
change_pressure (regno, false);
}
}
@@ -3554,12 +3553,11 @@ calculate_bb_reg_pressure (void)
dreg = DF_REF_REAL_REG (*use_rec);
gcc_assert (REG_P (dreg));
regno = REGNO (dreg);
- if (bitmap_set_bit (curr_regs_live, regno))
+ if (bitmap_set_bit (&curr_regs_live, regno))
change_pressure (regno, true);
}
}
}
- BITMAP_FREE (curr_regs_live);
if (dump_file == NULL)
return;
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c
index 5de44457aa8..6c872508bf2 100644
--- a/gcc/gimple-fold.c
+++ b/gcc/gimple-fold.c
@@ -876,7 +876,6 @@ gimple_fold_builtin (gimple stmt)
tree result, val[3];
tree callee, a;
int arg_idx, type;
- bitmap visited;
bool ignore;
int nargs;
location_t loc = gimple_location (stmt);
@@ -953,16 +952,13 @@ gimple_fold_builtin (gimple stmt)
return NULL_TREE;
/* Try to use the dataflow information gathered by the CCP process. */
- visited = BITMAP_ALLOC (NULL);
- bitmap_clear (visited);
+ bitmap_head visited;
memset (val, 0, sizeof (val));
a = gimple_call_arg (stmt, arg_idx);
- if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
+ if (!get_maxval_strlen (a, &val[arg_idx], &visited, type))
val[arg_idx] = NULL_TREE;
- BITMAP_FREE (visited);
-
result = NULL_TREE;
switch (DECL_FUNCTION_CODE (callee))
{
diff --git a/gcc/init-regs.c b/gcc/init-regs.c
index fcc6e2ca798..82d8df85aec 100644
--- a/gcc/init-regs.c
+++ b/gcc/init-regs.c
@@ -49,7 +49,7 @@ static void
initialize_uninitialized_regs (void)
{
basic_block bb;
- bitmap already_genned = BITMAP_ALLOC (NULL);
+ bitmap_head already_genned;
if (optimize == 1)
{
@@ -64,7 +64,7 @@ initialize_uninitialized_regs (void)
rtx insn;
bitmap lr = DF_LR_IN (bb);
bitmap ur = DF_LIVE_IN (bb);
- bitmap_clear (already_genned);
+ bitmap_clear (&already_genned);
FOR_BB_INSNS (bb, insn)
{
@@ -86,7 +86,7 @@ initialize_uninitialized_regs (void)
This is common for sequences of subreg operations.
They would be deleted during combine but there is no
reason to churn the system. */
- if (bitmap_bit_p (already_genned, regno))
+ if (bitmap_bit_p (&already_genned, regno))
continue;
/* A use is MUST uninitialized if it reaches the top of
@@ -99,7 +99,7 @@ initialize_uninitialized_regs (void)
rtx move_insn;
rtx reg = DF_REF_REAL_REG (use);
- bitmap_set_bit (already_genned, regno);
+ bitmap_set_bit (&already_genned, regno);
start_sequence ();
emit_move_insn (reg, CONST0_RTX (GET_MODE (reg)));
@@ -121,8 +121,6 @@ initialize_uninitialized_regs (void)
df_dump (dump_file);
df_remove_problem (df_live);
}
-
- BITMAP_FREE (already_genned);
}
static bool
diff --git a/gcc/ipa-inline.c b/gcc/ipa-inline.c
index d304133bfe4..d9bedcce20a 100644
--- a/gcc/ipa-inline.c
+++ b/gcc/ipa-inline.c
@@ -1530,16 +1530,15 @@ resolve_noninline_speculation (fibheap_t edge_heap, struct cgraph_edge *edge)
struct cgraph_node *node = edge->caller;
struct cgraph_node *where = node->global.inlined_to
? node->global.inlined_to : node;
- bitmap updated_nodes = BITMAP_ALLOC (NULL);
+ bitmap_head updated_nodes;
cgraph_resolve_speculation (edge, NULL);
reset_edge_caches (where);
inline_update_overall_summary (where);
update_caller_keys (edge_heap, where,
- updated_nodes, NULL);
+ &updated_nodes, NULL);
update_callee_keys (edge_heap, where,
- updated_nodes);
- BITMAP_FREE (updated_nodes);
+ &updated_nodes);
}
}
@@ -1555,7 +1554,7 @@ inline_small_functions (void)
struct cgraph_node *node;
struct cgraph_edge *edge;
fibheap_t edge_heap = fibheap_new ();
- bitmap updated_nodes = BITMAP_ALLOC (NULL);
+ bitmap_head updated_nodes;
int min_size, max_size;
auto_vec<cgraph_edge_p> new_indirect_edges;
int initial_size = 0;
@@ -1658,8 +1657,8 @@ inline_small_functions (void)
reset_node_growth_cache (where);
reset_edge_caches (where);
update_caller_keys (edge_heap, where,
- updated_nodes, NULL);
- bitmap_clear (updated_nodes);
+ &updated_nodes, NULL);
+ bitmap_clear (&updated_nodes);
}
}
@@ -1771,8 +1770,8 @@ inline_small_functions (void)
at once. Consequently we need to update all callee keys. */
if (flag_indirect_inlining)
add_new_edges_to_heap (edge_heap, new_indirect_edges);
- update_callee_keys (edge_heap, where, updated_nodes);
- bitmap_clear (updated_nodes);
+ update_callee_keys (edge_heap, where, &updated_nodes);
+ bitmap_clear (&updated_nodes);
}
else
{
@@ -1813,7 +1812,7 @@ inline_small_functions (void)
reset_edge_caches (edge->callee);
reset_node_growth_cache (callee);
- update_callee_keys (edge_heap, where, updated_nodes);
+ update_callee_keys (edge_heap, where, &updated_nodes);
}
where = edge->caller;
if (where->global.inlined_to)
@@ -1825,8 +1824,8 @@ inline_small_functions (void)
inlined into (since it's body size changed) and for the functions
called by function we inlined (since number of it inlinable callers
might change). */
- update_caller_keys (edge_heap, where, updated_nodes, NULL);
- bitmap_clear (updated_nodes);
+ update_caller_keys (edge_heap, where, &updated_nodes, NULL);
+ bitmap_clear (&updated_nodes);
if (dump_file)
{
@@ -1855,7 +1854,6 @@ inline_small_functions (void)
"Unit growth for small function inlining: %i->%i (%i%%)\n",
initial_size, overall_size,
initial_size ? overall_size * 100 / (initial_size) - 100: 0);
- BITMAP_FREE (updated_nodes);
cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
}
diff --git a/gcc/ipa-reference.c b/gcc/ipa-reference.c
index dbe32e4371a..ca36f5b06a7 100644
--- a/gcc/ipa-reference.c
+++ b/gcc/ipa-reference.c
@@ -964,7 +964,7 @@ ipa_reference_write_optimization_summary (void)
unsigned int count = 0;
int ltrans_statics_bitcount = 0;
lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
- bitmap ltrans_statics = BITMAP_ALLOC (NULL);
+ bitmap_head ltrans_statics;
int i;
reference_vars_to_consider = splay_tree_new (splay_tree_compare_ints, 0, 0);
@@ -979,7 +979,7 @@ ipa_reference_write_optimization_summary (void)
&& referenced_from_this_partition_p (&vnode->ref_list, encoder))
{
tree decl = vnode->decl;
- bitmap_set_bit (ltrans_statics, DECL_UID (decl));
+ bitmap_set_bit (&ltrans_statics, DECL_UID (decl));
splay_tree_insert (reference_vars_to_consider,
DECL_UID (decl), (splay_tree_value)decl);
ltrans_statics_bitcount ++;
@@ -992,13 +992,13 @@ ipa_reference_write_optimization_summary (void)
{
symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
cgraph_node *cnode = dyn_cast <cgraph_node> (snode);
- if (cnode && write_node_summary_p (cnode, encoder, ltrans_statics))
+ if (cnode && write_node_summary_p (cnode, encoder, &ltrans_statics))
count++;
}
streamer_write_uhwi_stream (ob->main_stream, count);
if (count)
- stream_out_bitmap (ob, ltrans_statics, ltrans_statics,
+ stream_out_bitmap (ob, &ltrans_statics, &ltrans_statics,
-1);
/* Process all of the functions. */
@@ -1007,7 +1007,7 @@ ipa_reference_write_optimization_summary (void)
{
symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
cgraph_node *cnode = dyn_cast <cgraph_node> (snode);
- if (cnode && write_node_summary_p (cnode, encoder, ltrans_statics))
+ if (cnode && write_node_summary_p (cnode, encoder, &ltrans_statics))
{
ipa_reference_optimization_summary_t info;
int node_ref;
@@ -1016,13 +1016,12 @@ ipa_reference_write_optimization_summary (void)
node_ref = lto_symtab_encoder_encode (encoder, snode);
streamer_write_uhwi_stream (ob->main_stream, node_ref);
- stream_out_bitmap (ob, info->statics_not_read, ltrans_statics,
+ stream_out_bitmap (ob, info->statics_not_read, &ltrans_statics,
ltrans_statics_bitcount);
- stream_out_bitmap (ob, info->statics_not_written, ltrans_statics,
+ stream_out_bitmap (ob, info->statics_not_written, &ltrans_statics,
ltrans_statics_bitcount);
}
}
- BITMAP_FREE (ltrans_statics);
lto_destroy_simple_output_block (ob);
splay_tree_delete (reference_vars_to_consider);
}
diff --git a/gcc/ipa-split.c b/gcc/ipa-split.c
index 38bd88365d8..a11d57424dd 100644
--- a/gcc/ipa-split.c
+++ b/gcc/ipa-split.c
@@ -208,7 +208,7 @@ static bool
verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
basic_block return_bb)
{
- bitmap seen = BITMAP_ALLOC (NULL);
+ bitmap_head seen;
vec<basic_block> worklist = vNULL;
edge e;
edge_iterator ei;
@@ -219,7 +219,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
&& !bitmap_bit_p (current->split_bbs, e->src->index))
{
worklist.safe_push (e->src);
- bitmap_set_bit (seen, e->src->index);
+ bitmap_set_bit (&seen, e->src->index);
}
while (!worklist.is_empty ())
@@ -229,7 +229,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
- && bitmap_set_bit (seen, e->src->index))
+ && bitmap_set_bit (&seen, e->src->index))
{
gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
e->src->index));
@@ -287,7 +287,6 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
}
}
done:
- BITMAP_FREE (seen);
worklist.release ();
return ok;
}
diff --git a/gcc/ira-color.c b/gcc/ira-color.c
index c20aaf72dc6..00149174ba3 100644
--- a/gcc/ira-color.c
+++ b/gcc/ira-color.c
@@ -4330,20 +4330,20 @@ ira_reassign_pseudos (int *spilled_pseudo_regs, int num,
bool changed_p;
ira_allocno_t a;
HARD_REG_SET forbidden_regs;
- bitmap temp = BITMAP_ALLOC (NULL);
+ bitmap_head temp;
/* Add pseudos which conflict with pseudos already in
SPILLED_PSEUDO_REGS to SPILLED_PSEUDO_REGS. This is preferable
to allocating in two steps as some of the conflicts might have
a higher priority than the pseudos passed in SPILLED_PSEUDO_REGS. */
for (i = 0; i < num; i++)
- bitmap_set_bit (temp, spilled_pseudo_regs[i]);
+ bitmap_set_bit (&temp, spilled_pseudo_regs[i]);
for (i = 0, n = num; i < n; i++)
{
int nr, j;
int regno = spilled_pseudo_regs[i];
- bitmap_set_bit (temp, regno);
+ bitmap_set_bit (&temp, regno);
a = ira_regno_allocno_map[regno];
nr = ALLOCNO_NUM_OBJECTS (a);
@@ -4358,7 +4358,7 @@ ira_reassign_pseudos (int *spilled_pseudo_regs, int num,
ira_allocno_t conflict_a = OBJECT_ALLOCNO (conflict_obj);
if (ALLOCNO_HARD_REGNO (conflict_a) < 0
&& ! ALLOCNO_DONT_REASSIGN_P (conflict_a)
- && bitmap_set_bit (temp, ALLOCNO_REGNO (conflict_a)))
+ && bitmap_set_bit (&temp, ALLOCNO_REGNO (conflict_a)))
{
spilled_pseudo_regs[num++] = ALLOCNO_REGNO (conflict_a);
/* ?!? This seems wrong. */
@@ -4396,7 +4396,6 @@ ira_reassign_pseudos (int *spilled_pseudo_regs, int num,
changed_p = true;
}
}
- BITMAP_FREE (temp);
return changed_p;
}
diff --git a/gcc/ira.c b/gcc/ira.c
index 7c49b7f6b74..04ad3a7f351 100644
--- a/gcc/ira.c
+++ b/gcc/ira.c
@@ -3453,9 +3453,9 @@ static int
update_equiv_regs (void)
{
rtx insn;
+ bitmap_head cleared_regs;
basic_block bb;
int loop_depth;
- bitmap cleared_regs;
bool *pdx_subregs;
/* We need to keep track of whether or not we recorded a LABEL_REF so
@@ -3765,7 +3765,6 @@ update_equiv_regs (void)
}
}
- cleared_regs = BITMAP_ALLOC (NULL);
/* Now scan all regs killed in an insn to see if any of them are
registers only used that once. If so, see if we can replace the
reference with the equivalent form. If we can, delete the
@@ -3864,7 +3863,7 @@ update_equiv_regs (void)
= XEXP (reg_equiv[regno].init_insns, 1);
ira_reg_equiv[regno].init_insns = NULL_RTX;
- bitmap_set_bit (cleared_regs, regno);
+ bitmap_set_bit (&cleared_regs, regno);
}
/* Move the initialization of the register to just before
INSN. Update the flow information. */
@@ -3898,23 +3897,23 @@ update_equiv_regs (void)
ira_reg_equiv[regno].init_insns
= gen_rtx_INSN_LIST (VOIDmode, new_insn, NULL_RTX);
- bitmap_set_bit (cleared_regs, regno);
+ bitmap_set_bit (&cleared_regs, regno);
}
}
}
}
}
- if (!bitmap_empty_p (cleared_regs))
+ if (!bitmap_empty_p (&cleared_regs))
{
FOR_EACH_BB_FN (bb, cfun)
{
- bitmap_and_compl_into (DF_LR_IN (bb), cleared_regs);
- bitmap_and_compl_into (DF_LR_OUT (bb), cleared_regs);
+ bitmap_and_compl_into (DF_LR_IN (bb), &cleared_regs);
+ bitmap_and_compl_into (DF_LR_OUT (bb), &cleared_regs);
if (! df_live)
continue;
- bitmap_and_compl_into (DF_LIVE_IN (bb), cleared_regs);
- bitmap_and_compl_into (DF_LIVE_OUT (bb), cleared_regs);
+ bitmap_and_compl_into (DF_LIVE_IN (bb), &cleared_regs);
+ bitmap_and_compl_into (DF_LIVE_OUT (bb), &cleared_regs);
}
/* Last pass - adjust debug insns referencing cleared regs. */
@@ -3926,14 +3925,12 @@ update_equiv_regs (void)
INSN_VAR_LOCATION_LOC (insn)
= simplify_replace_fn_rtx (old_loc, NULL_RTX,
adjust_cleared_regs,
- (void *) cleared_regs);
+ (void *) &cleared_regs);
if (old_loc != INSN_VAR_LOCATION_LOC (insn))
df_insn_rescan (insn);
}
}
- BITMAP_FREE (cleared_regs);
-
out:
/* Clean up. */
@@ -4114,8 +4111,8 @@ build_insn_chain (void)
basic_block bb;
struct insn_chain *c = NULL;
struct insn_chain *next = NULL;
- bitmap live_relevant_regs = BITMAP_ALLOC (NULL);
- bitmap elim_regset = BITMAP_ALLOC (NULL);
+ bitmap_head live_relevant_regs;
+ bitmap_head elim_regset;
/* live_subregs is a vector used to keep accurate information about
which hardregs are live in multiword pseudos. live_subregs and
live_subregs_used are indexed by pseudo number. The live_subreg
@@ -4124,31 +4121,31 @@ build_insn_chain (void)
live_subreg[allocno] is number of bytes that the pseudo can
occupy. */
sbitmap *live_subregs = XCNEWVEC (sbitmap, max_regno);
- bitmap live_subregs_used = BITMAP_ALLOC (NULL);
+ bitmap_head live_subregs_used;
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (eliminable_regset, i))
- bitmap_set_bit (elim_regset, i);
+ bitmap_set_bit (&elim_regset, i);
FOR_EACH_BB_REVERSE_FN (bb, cfun)
{
bitmap_iterator bi;
rtx insn;
- CLEAR_REG_SET (live_relevant_regs);
- bitmap_clear (live_subregs_used);
+ CLEAR_REG_SET (&live_relevant_regs);
+ bitmap_clear (&live_subregs_used);
EXECUTE_IF_SET_IN_BITMAP (df_get_live_out (bb), 0, i, bi)
{
if (i >= FIRST_PSEUDO_REGISTER)
break;
- bitmap_set_bit (live_relevant_regs, i);
+ bitmap_set_bit (&live_relevant_regs, i);
}
EXECUTE_IF_SET_IN_BITMAP (df_get_live_out (bb),
FIRST_PSEUDO_REGISTER, i, bi)
{
if (pseudo_for_reload_consideration_p (i))
- bitmap_set_bit (live_relevant_regs, i);
+ bitmap_set_bit (&live_relevant_regs, i);
}
FOR_BB_INSNS_REVERSE (bb, insn)
@@ -4205,8 +4202,8 @@ build_insn_chain (void)
+ GET_MODE_SIZE (GET_MODE (reg));
init_live_subregs
- (bitmap_bit_p (live_relevant_regs, regno),
- live_subregs, live_subregs_used, regno, reg);
+ (bitmap_bit_p (&live_relevant_regs, regno),
+ live_subregs, &live_subregs_used, regno, reg);
if (!DF_REF_FLAGS_IS_SET
(def, DF_REF_STRICT_LOW_PART))
@@ -4231,14 +4228,14 @@ build_insn_chain (void)
if (bitmap_empty_p (live_subregs[regno]))
{
- bitmap_clear_bit (live_subregs_used, regno);
- bitmap_clear_bit (live_relevant_regs, regno);
+ bitmap_clear_bit (&live_subregs_used, regno);
+ bitmap_clear_bit (&live_relevant_regs, regno);
}
else
/* Set live_relevant_regs here because
that bit has to be true to get us to
look at the live_subregs fields. */
- bitmap_set_bit (live_relevant_regs, regno);
+ bitmap_set_bit (&live_relevant_regs, regno);
}
else
{
@@ -4249,15 +4246,15 @@ build_insn_chain (void)
modeling the def as a killing def. */
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL))
{
- bitmap_clear_bit (live_subregs_used, regno);
- bitmap_clear_bit (live_relevant_regs, regno);
+ bitmap_clear_bit (&live_subregs_used, regno);
+ bitmap_clear_bit (&live_relevant_regs, regno);
}
}
}
}
- bitmap_and_compl_into (live_relevant_regs, elim_regset);
- bitmap_copy (&c->live_throughout, live_relevant_regs);
+ bitmap_and_compl_into (&live_relevant_regs, &elim_regset);
+ bitmap_copy (&c->live_throughout, &live_relevant_regs);
if (NONDEBUG_INSN_P (insn))
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
@@ -4278,7 +4275,7 @@ build_insn_chain (void)
continue;
/* Add the last use of each var to dead_or_set. */
- if (!bitmap_bit_p (live_relevant_regs, regno))
+ if (!bitmap_bit_p (&live_relevant_regs, regno))
{
if (regno < FIRST_PSEUDO_REGISTER)
{
@@ -4302,8 +4299,8 @@ build_insn_chain (void)
+ GET_MODE_SIZE (GET_MODE (reg));
init_live_subregs
- (bitmap_bit_p (live_relevant_regs, regno),
- live_subregs, live_subregs_used, regno, reg);
+ (bitmap_bit_p (&live_relevant_regs, regno),
+ live_subregs, &live_subregs_used, regno, reg);
/* Ignore the paradoxical bits. */
if (last > SBITMAP_SIZE (live_subregs[regno]))
@@ -4320,8 +4317,8 @@ build_insn_chain (void)
effectively saying do not use the subregs
because we are reading the whole
pseudo. */
- bitmap_clear_bit (live_subregs_used, regno);
- bitmap_set_bit (live_relevant_regs, regno);
+ bitmap_clear_bit (&live_subregs_used, regno);
+ bitmap_set_bit (&live_relevant_regs, regno);
}
}
}
@@ -4357,7 +4354,7 @@ build_insn_chain (void)
code did. */
c->block = bb->index;
c->insn = insn;
- bitmap_copy (&c->live_throughout, live_relevant_regs);
+ bitmap_copy (&c->live_throughout, &live_relevant_regs);
}
insn = PREV_INSN (insn);
}
@@ -4370,9 +4367,6 @@ build_insn_chain (void)
if (live_subregs[i] != NULL)
sbitmap_free (live_subregs[i]);
free (live_subregs);
- BITMAP_FREE (live_subregs_used);
- BITMAP_FREE (live_relevant_regs);
- BITMAP_FREE (elim_regset);
if (dump_file)
print_insn_chains (dump_file);
diff --git a/gcc/loop-invariant.c b/gcc/loop-invariant.c
index 100a2c1b7ff..f1f1e9df178 100644
--- a/gcc/loop-invariant.c
+++ b/gcc/loop-invariant.c
@@ -986,24 +986,20 @@ find_invariants_body (struct loop *loop, basic_block *body,
static void
find_invariants (struct loop *loop)
{
- bitmap may_exit = BITMAP_ALLOC (NULL);
- bitmap always_reached = BITMAP_ALLOC (NULL);
- bitmap has_exit = BITMAP_ALLOC (NULL);
- bitmap always_executed = BITMAP_ALLOC (NULL);
+ bitmap_head may_exit;
+ bitmap_head always_reached;
+ bitmap_head has_exit;
+ bitmap_head always_executed;
basic_block *body = get_loop_body_in_dom_order (loop);
- find_exits (loop, body, may_exit, has_exit);
- compute_always_reached (loop, body, may_exit, always_reached);
- compute_always_reached (loop, body, has_exit, always_executed);
+ find_exits (loop, body, &may_exit, &has_exit);
+ compute_always_reached (loop, body, &may_exit, &always_reached);
+ compute_always_reached (loop, body, &has_exit, &always_executed);
find_defs (loop);
- find_invariants_body (loop, body, always_reached, always_executed);
+ find_invariants_body (loop, body, &always_reached, &always_executed);
merge_identical_invariants ();
- BITMAP_FREE (always_reached);
- BITMAP_FREE (always_executed);
- BITMAP_FREE (may_exit);
- BITMAP_FREE (has_exit);
free (body);
}
diff --git a/gcc/lower-subreg.c b/gcc/lower-subreg.c
index a1331c00507..a6982cdfc55 100644
--- a/gcc/lower-subreg.c
+++ b/gcc/lower-subreg.c
@@ -412,33 +412,27 @@ find_pseudo_copy (rtx set)
static void
propagate_pseudo_copies (void)
{
- bitmap queue, propagate;
+ bitmap_head queue, propagate;
- queue = BITMAP_ALLOC (NULL);
- propagate = BITMAP_ALLOC (NULL);
-
- bitmap_copy (queue, decomposable_context);
+ bitmap_copy (&queue, decomposable_context);
do
{
bitmap_iterator iter;
unsigned int i;
- bitmap_clear (propagate);
+ bitmap_clear (&propagate);
- EXECUTE_IF_SET_IN_BITMAP (queue, 0, i, iter)
+ EXECUTE_IF_SET_IN_BITMAP (&queue, 0, i, iter)
{
bitmap b = reg_copy_graph[i];
if (b)
- bitmap_ior_and_compl_into (propagate, b, non_decomposable_context);
+ bitmap_ior_and_compl_into (&propagate, b, non_decomposable_context);
}
- bitmap_and_compl (queue, propagate, decomposable_context);
- bitmap_ior_into (decomposable_context, propagate);
+ bitmap_and_compl (&queue, &propagate, decomposable_context);
+ bitmap_ior_into (decomposable_context, &propagate);
}
- while (!bitmap_empty_p (queue));
-
- BITMAP_FREE (queue);
- BITMAP_FREE (propagate);
+ while (!bitmap_empty_p (&queue));
}
/* A pointer to one of these values is passed to
diff --git a/gcc/lra-assigns.c b/gcc/lra-assigns.c
index 268edccd606..dcf49393947 100644
--- a/gcc/lra-assigns.c
+++ b/gcc/lra-assigns.c
@@ -743,7 +743,7 @@ static int curr_pseudo_check;
/* Array used for validity of elements in TRY_HARD_REG_PSEUDOS. */
static int try_hard_reg_pseudos_check[FIRST_PSEUDO_REGISTER];
/* Pseudos who hold given hard register at the considered points. */
-static bitmap_head try_hard_reg_pseudos[FIRST_PSEUDO_REGISTER];
+static bitmap try_hard_reg_pseudos[FIRST_PSEUDO_REGISTER];
/* Set up try_hard_reg_pseudos for given program point P and class
RCLASS. Those are pseudos living at P and assigned to a hard
@@ -773,9 +773,9 @@ setup_try_hard_regno_pseudos (int p, enum reg_class rclass)
{
try_hard_reg_pseudos_check[hard_regno + i]
= curr_pseudo_check;
- bitmap_clear (&try_hard_reg_pseudos[hard_regno + i]);
+ bitmap_clear (try_hard_reg_pseudos[hard_regno + i]);
}
- bitmap_set_bit (&try_hard_reg_pseudos[hard_regno + i],
+ bitmap_set_bit (try_hard_reg_pseudos[hard_regno + i],
spill_regno);
}
}
@@ -858,9 +858,9 @@ spill_for (int regno, bitmap spilled_pseudo_bitmap)
{
if (try_hard_reg_pseudos_check[hard_regno + j] != curr_pseudo_check)
continue;
- lra_assert (!bitmap_empty_p (&try_hard_reg_pseudos[hard_regno + j]));
+ lra_assert (!bitmap_empty_p (try_hard_reg_pseudos[hard_regno + j]));
bitmap_ior_into (&spill_pseudos_bitmap,
- &try_hard_reg_pseudos[hard_regno + j]);
+ try_hard_reg_pseudos[hard_regno + j]);
}
/* Spill pseudos. */
EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)
@@ -1187,7 +1187,7 @@ assign_by_spills (void)
curr_update_hard_regno_preference_check = 0;
memset (try_hard_reg_pseudos_check, 0, sizeof (try_hard_reg_pseudos_check));
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- bitmap_initialize (&try_hard_reg_pseudos[i], &reg_obstack);
+ try_hard_reg_pseudos[i] = BITMAP_ALLOC (&reg_obstack);
curr_pseudo_check = 0;
bitmap_initialize (&changed_insns, &reg_obstack);
bitmap_initialize (&non_reload_pseudos, &reg_obstack);
diff --git a/gcc/lra-constraints.c b/gcc/lra-constraints.c
index d3831380fee..4d2ed162cf9 100644
--- a/gcc/lra-constraints.c
+++ b/gcc/lra-constraints.c
@@ -3961,7 +3961,6 @@ lra_constraints (bool first_p)
unsigned int min_len, new_min_len, uid;
rtx set, x, reg, dest_reg;
basic_block last_bb;
- bitmap_head equiv_insn_bitmap;
bitmap_iterator bi;
lra_constraint_iter++;
@@ -3991,7 +3990,7 @@ lra_constraints (bool first_p)
/* Do elimination before the equivalence processing as we can spill
some pseudos during elimination. */
lra_eliminate (false, first_p);
- bitmap_initialize (&equiv_insn_bitmap, &reg_obstack);
+ bitmap_head equiv_insn_bitmap (&reg_obstack);
for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
if (lra_reg_info[i].nrefs != 0)
{
@@ -5721,9 +5720,8 @@ undo_optional_reloads (void)
unsigned int regno, uid;
bitmap_iterator bi, bi2;
rtx insn, set, src, dest;
- bitmap_head removed_optional_reload_pseudos, insn_bitmap;
+ bitmap_head removed_optional_reload_pseudos (&reg_obstack);
- bitmap_initialize (&removed_optional_reload_pseudos, &reg_obstack);
bitmap_copy (&removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
{
@@ -5765,7 +5763,7 @@ undo_optional_reloads (void)
}
}
change_p = ! bitmap_empty_p (&removed_optional_reload_pseudos);
- bitmap_initialize (&insn_bitmap, &reg_obstack);
+ bitmap_head insn_bitmap (&reg_obstack);
EXECUTE_IF_SET_IN_BITMAP (&removed_optional_reload_pseudos, 0, regno, bi)
{
if (lra_dump_file != NULL)
@@ -5815,8 +5813,6 @@ undo_optional_reloads (void)
/* Clear restore_regnos. */
EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
lra_reg_info[regno].restore_regno = -1;
- bitmap_clear (&insn_bitmap);
- bitmap_clear (&removed_optional_reload_pseudos);
return change_p;
}
@@ -5828,7 +5824,6 @@ lra_undo_inheritance (void)
unsigned int regno;
int restore_regno, hard_regno;
int n_all_inherit, n_inherit, n_all_split, n_split;
- bitmap_head remove_pseudos;
bitmap_iterator bi;
bool change_p;
@@ -5839,7 +5834,7 @@ lra_undo_inheritance (void)
fprintf (lra_dump_file,
"\n********** Undoing inheritance #%d: **********\n\n",
lra_undo_inheritance_iter);
- bitmap_initialize (&remove_pseudos, &reg_obstack);
+ bitmap_head remove_pseudos (&reg_obstack);
n_inherit = n_all_inherit = 0;
EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
if (lra_reg_info[regno].restore_regno >= 0)
@@ -5880,7 +5875,6 @@ lra_undo_inheritance (void)
n_split, n_all_split,
(double) n_split / n_all_split * 100);
change_p = remove_inheritance_pseudos (&remove_pseudos);
- bitmap_clear (&remove_pseudos);
/* Clear restore_regnos. */
EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
lra_reg_info[regno].restore_regno = -1;
diff --git a/gcc/lra-eliminations.c b/gcc/lra-eliminations.c
index abdf6973310..428dca2bd7c 100644
--- a/gcc/lra-eliminations.c
+++ b/gcc/lra-eliminations.c
@@ -1075,7 +1075,6 @@ static void
spill_pseudos (HARD_REG_SET set)
{
int i;
- bitmap_head to_process;
rtx insn;
if (hard_reg_set_empty_p (set))
@@ -1088,7 +1087,7 @@ spill_pseudos (HARD_REG_SET set)
fprintf (lra_dump_file, " %d", i);
fprintf (lra_dump_file, "\n");
}
- bitmap_initialize (&to_process, &reg_obstack);
+ bitmap_head to_process (&reg_obstack);
for (i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
if (lra_reg_info[i].nrefs != 0 && reg_renumber[i] >= 0
&& overlaps_hard_reg_set_p (set,
@@ -1107,7 +1106,6 @@ spill_pseudos (HARD_REG_SET set)
lra_push_insn (insn);
lra_set_used_insn_alternative (insn, -1);
}
- bitmap_clear (&to_process);
}
/* Update all offsets and possibility for elimination on eliminable
@@ -1357,7 +1355,6 @@ void
lra_eliminate (bool final_p, bool first_p)
{
unsigned int uid;
- bitmap_head insns_with_changed_offsets;
bitmap_iterator bi;
struct elim_table *ep;
@@ -1368,7 +1365,7 @@ lra_eliminate (bool final_p, bool first_p)
if (first_p)
init_elimination ();
- bitmap_initialize (&insns_with_changed_offsets, &reg_obstack);
+ bitmap_head insns_with_changed_offsets (&reg_obstack);
if (final_p)
{
#ifdef ENABLE_CHECKING
@@ -1396,7 +1393,6 @@ lra_eliminate (bool final_p, bool first_p)
if (lra_insn_recog_data[uid] != NULL)
process_insn_for_elimination (lra_insn_recog_data[uid]->insn,
final_p, first_p);
- bitmap_clear (&insns_with_changed_offsets);
lra_eliminate_done:
timevar_pop (TV_LRA_ELIMINATE);
diff --git a/gcc/predict.c b/gcc/predict.c
index db5eed910a3..22846e1438a 100644
--- a/gcc/predict.c
+++ b/gcc/predict.c
@@ -2006,7 +2006,6 @@ tree_predict_by_opcode (basic_block bb)
tree type;
tree val;
enum tree_code cmp;
- bitmap visited;
edge_iterator ei;
if (!stmt || gimple_code (stmt) != GIMPLE_COND)
@@ -2018,9 +2017,8 @@ tree_predict_by_opcode (basic_block bb)
op1 = gimple_cond_rhs (stmt);
cmp = gimple_cond_code (stmt);
type = TREE_TYPE (op0);
- visited = BITMAP_ALLOC (NULL);
- val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, visited);
- BITMAP_FREE (visited);
+ bitmap_head visited;
+ val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, &visited);
if (val)
{
int percent = PARAM_VALUE (BUILTIN_EXPECT_PROBABILITY);
@@ -2512,9 +2510,8 @@ static void
predict_paths_leading_to (basic_block bb, enum br_predictor pred,
enum prediction taken)
{
- bitmap visited = BITMAP_ALLOC (NULL);
- predict_paths_for_bb (bb, bb, pred, taken, visited);
- BITMAP_FREE (visited);
+ bitmap_head visited;
+ predict_paths_for_bb (bb, bb, pred, taken, &visited);
}
/* Like predict_paths_leading_to but take edge instead of basic block. */
@@ -2538,9 +2535,8 @@ predict_paths_leading_to_edge (edge e, enum br_predictor pred,
}
if (!has_nonloop_edge)
{
- bitmap visited = BITMAP_ALLOC (NULL);
- predict_paths_for_bb (bb, bb, pred, taken, visited);
- BITMAP_FREE (visited);
+ bitmap_head visited;
+ predict_paths_for_bb (bb, bb, pred, taken, &visited);
}
else
predict_edge_def (e, pred, taken);
@@ -2727,7 +2723,7 @@ estimate_loops_at_level (struct loop *first_loop)
edge e;
basic_block *bbs;
unsigned i;
- bitmap tovisit = BITMAP_ALLOC (NULL);
+ bitmap_head tovisit;
estimate_loops_at_level (loop->inner);
@@ -2737,10 +2733,9 @@ estimate_loops_at_level (struct loop *first_loop)
bbs = get_loop_body (loop);
for (i = 0; i < loop->num_nodes; i++)
- bitmap_set_bit (tovisit, bbs[i]->index);
+ bitmap_set_bit (&tovisit, bbs[i]->index);
free (bbs);
- propagate_freq (loop->header, tovisit);
- BITMAP_FREE (tovisit);
+ propagate_freq (loop->header, &tovisit);
}
}
@@ -2749,7 +2744,7 @@ estimate_loops_at_level (struct loop *first_loop)
static void
estimate_loops (void)
{
- bitmap tovisit = BITMAP_ALLOC (NULL);
+ bitmap_head tovisit;
basic_block bb;
/* Start by estimating the frequencies in the loops. */
@@ -2759,10 +2754,9 @@ estimate_loops (void)
/* Now propagate the frequencies through all the blocks. */
FOR_ALL_BB_FN (bb, cfun)
{
- bitmap_set_bit (tovisit, bb->index);
+ bitmap_set_bit (&tovisit, bb->index);
}
- propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit);
- BITMAP_FREE (tovisit);
+ propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), &tovisit);
}
/* Drop the profile for NODE to guessed, and update its frequency based on
diff --git a/gcc/recog.c b/gcc/recog.c
index f9040dcde75..5c550752223 100644
--- a/gcc/recog.c
+++ b/gcc/recog.c
@@ -3616,7 +3616,6 @@ static void
peephole2_optimize (void)
{
rtx insn;
- bitmap live;
int i;
basic_block bb;
@@ -3631,7 +3630,7 @@ peephole2_optimize (void)
for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
search_ofs = 0;
- live = BITMAP_ALLOC (&reg_obstack);
+ bitmap_head live;
FOR_EACH_BB_REVERSE_FN (bb, cfun)
{
@@ -3641,9 +3640,9 @@ peephole2_optimize (void)
rtl_profile_for_bb (bb);
/* Start up propagation. */
- bitmap_copy (live, DF_LR_IN (bb));
- df_simulate_initialize_forwards (bb, live);
- peep2_reinit_state (live);
+ bitmap_copy (&live, DF_LR_IN (bb));
+ df_simulate_initialize_forwards (bb, &live);
+ peep2_reinit_state (&live);
insn = BB_HEAD (bb);
for (;;)
@@ -3659,7 +3658,7 @@ peephole2_optimize (void)
past_end = true;
continue;
}
- if (!past_end && peep2_fill_buffer (bb, insn, live))
+ if (!past_end && peep2_fill_buffer (bb, insn, &live))
goto next_insn;
/* If we did not fill an empty buffer, it signals the end of the
@@ -3671,7 +3670,7 @@ peephole2_optimize (void)
pos = peep2_buf_position (peep2_current + peep2_current_count);
peep2_insn_data[pos].insn = PEEP2_EOB;
- COPY_REG_SET (peep2_insn_data[pos].live_before, live);
+ COPY_REG_SET (peep2_insn_data[pos].live_before, &live);
/* Match the peephole. */
head = peep2_insn_data[peep2_current].insn;
@@ -3695,7 +3694,6 @@ peephole2_optimize (void)
default_rtl_profile ();
for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
BITMAP_FREE (peep2_insn_data[i].live_before);
- BITMAP_FREE (live);
if (peep2_do_rebuild_jump_labels)
rebuild_jump_labels (get_insns ());
}
diff --git a/gcc/reginfo.c b/gcc/reginfo.c
index bdb980da3e1..df7f0306983 100644
--- a/gcc/reginfo.c
+++ b/gcc/reginfo.c
@@ -1259,19 +1259,18 @@ init_subregs_of_mode (void)
basic_block bb;
rtx insn;
bitmap_obstack srom_obstack;
- bitmap subregs_of_mode;
gcc_assert (invalid_mode_changes == NULL);
invalid_mode_changes = BITMAP_ALLOC (NULL);
bitmap_obstack_initialize (&srom_obstack);
- subregs_of_mode = BITMAP_ALLOC (&srom_obstack);
+ bitmap_head subregs_of_mode (&srom_obstack);
FOR_EACH_BB_FN (bb, cfun)
FOR_BB_INSNS (bb, insn)
if (NONDEBUG_INSN_P (insn))
- find_subregs_of_mode (PATTERN (insn), subregs_of_mode);
+ find_subregs_of_mode (PATTERN (insn), &subregs_of_mode);
- BITMAP_FREE (subregs_of_mode);
+ bitmap_clear (&subregs_of_mode);
bitmap_obstack_release (&srom_obstack);
}
diff --git a/gcc/regrename.c b/gcc/regrename.c
index 6517f4e384d..fef5eb079ef 100644
--- a/gcc/regrename.c
+++ b/gcc/regrename.c
@@ -122,7 +122,7 @@ static struct du_head *open_chains;
/* Bitmap of open chains. The bits set always match the list found in
open_chains. */
-static bitmap_head open_chains_set;
+static bitmap_head *open_chains_set;
/* Record the registers being tracked in open_chains. */
static HARD_REG_SET live_in_chains;
@@ -235,7 +235,7 @@ create_new_chain (unsigned this_regno, unsigned this_nregs, rtx *loc,
head->id = current_id++;
bitmap_initialize (&head->conflicts, &bitmap_default_obstack);
- bitmap_copy (&head->conflicts, &open_chains_set);
+ bitmap_copy (&head->conflicts, open_chains_set);
mark_conflict (open_chains, head->id);
/* Since we're tracking this as a chain now, remove it from the
@@ -249,7 +249,7 @@ create_new_chain (unsigned this_regno, unsigned this_nregs, rtx *loc,
}
COPY_HARD_REG_SET (head->hard_conflicts, live_hard_regs);
- bitmap_set_bit (&open_chains_set, head->id);
+ bitmap_set_bit (open_chains_set, head->id);
open_chains = head;
@@ -541,7 +541,7 @@ init_rename_info (struct bb_rename_info *p, basic_block bb)
bitmap_initialize (&p->incoming_open_chains_set, &bitmap_default_obstack);
open_chains = NULL;
- bitmap_clear (&open_chains_set);
+ bitmap_clear (open_chains_set);
CLEAR_HARD_REG_SET (live_in_chains);
REG_SET_TO_HARD_REG_SET (live_hard_regs, df_get_live_in (bb));
@@ -687,7 +687,7 @@ regrename_analyze (bitmap bb_mask)
current_id = 0;
id_to_chain.create (0);
- bitmap_initialize (&open_chains_set, &bitmap_default_obstack);
+ open_chains_set = new bitmap_head;
/* The order in which we visit blocks ensures that whenever
possible, we only process a block after at least one of its
@@ -736,7 +736,7 @@ regrename_analyze (bitmap bb_mask)
if (dump_file)
dump_def_use_chain (old_length);
- bitmap_copy (&this_info->open_chains_set, &open_chains_set);
+ bitmap_copy (&this_info->open_chains_set, open_chains_set);
/* Add successor blocks to the worklist if necessary, and record
data about our own open chains at the end of this block, which
@@ -1053,7 +1053,7 @@ scan_rtx_reg (rtx insn, rtx *loc, enum reg_class cl, enum scan_actions action,
int subset = (this_regno >= head->regno
&& this_regno + this_nregs <= head->regno + head->nregs);
- if (!bitmap_bit_p (&open_chains_set, head->id)
+ if (!bitmap_bit_p (open_chains_set, head->id)
|| head->regno + head->nregs <= this_regno
|| this_regno + this_nregs <= head->regno)
{
@@ -1131,7 +1131,7 @@ scan_rtx_reg (rtx insn, rtx *loc, enum reg_class cl, enum scan_actions action,
if (subset && !superset)
head->cannot_rename = 1;
- bitmap_clear_bit (&open_chains_set, head->id);
+ bitmap_clear_bit (open_chains_set, head->id);
nregs = head->nregs;
while (nregs-- > 0)
diff --git a/gcc/regstat.c b/gcc/regstat.c
index 75d9cb446ba..4b158853255 100644
--- a/gcc/regstat.c
+++ b/gcc/regstat.c
@@ -356,10 +356,10 @@ void
regstat_compute_ri (void)
{
basic_block bb;
- bitmap live = BITMAP_ALLOC (&df_bitmap_obstack);
- bitmap artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
- bitmap local_live = BITMAP_ALLOC (&df_bitmap_obstack);
- bitmap local_processed = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head live (&df_bitmap_obstack);
+ bitmap_head artificial_uses (&df_bitmap_obstack);
+ bitmap_head local_live (&df_bitmap_obstack);
+ bitmap_head local_processed (&df_bitmap_obstack);
unsigned int regno;
bitmap_iterator bi;
int *local_live_last_luid;
@@ -377,15 +377,11 @@ regstat_compute_ri (void)
FOR_EACH_BB_FN (bb, cfun)
{
- regstat_bb_compute_ri (bb->index, live, artificial_uses,
- local_live, local_processed,
+ regstat_bb_compute_ri (bb->index, &live, &artificial_uses,
+ &local_live, &local_processed,
local_live_last_luid);
}
- BITMAP_FREE (live);
- BITMAP_FREE (artificial_uses);
- BITMAP_FREE (local_live);
- BITMAP_FREE (local_processed);
free (local_live_last_luid);
/* See the setjmp comment in regstat_bb_compute_ri. */
@@ -512,7 +508,7 @@ void
regstat_compute_calls_crossed (void)
{
basic_block bb;
- bitmap live = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head live (&df_bitmap_obstack);
/* Initialize everything. */
gcc_assert (!reg_info_p);
@@ -524,10 +520,9 @@ regstat_compute_calls_crossed (void)
FOR_EACH_BB_FN (bb, cfun)
{
- regstat_bb_compute_calls_crossed (bb->index, live);
+ regstat_bb_compute_calls_crossed (bb->index, &live);
}
- BITMAP_FREE (live);
timevar_pop (TV_REG_STATS);
}
diff --git a/gcc/sel-sched.c b/gcc/sel-sched.c
index 490f3610aea..3b25331bb3f 100644
--- a/gcc/sel-sched.c
+++ b/gcc/sel-sched.c
@@ -7286,11 +7286,11 @@ static void
sel_region_target_finish (bool reset_sched_cycles_p)
{
int i;
- bitmap scheduled_blocks = BITMAP_ALLOC (NULL);
+ bitmap_head scheduled_blocks;
for (i = 0; i < current_nr_blocks; i++)
{
- if (bitmap_bit_p (scheduled_blocks, i))
+ if (bitmap_bit_p (&scheduled_blocks, i))
continue;
/* While pipelining outer loops, skip bundling for loop
@@ -7298,7 +7298,7 @@ sel_region_target_finish (bool reset_sched_cycles_p)
if (sel_is_loop_preheader_p (EBB_FIRST_BB (i)))
continue;
- find_ebb_boundaries (EBB_FIRST_BB (i), scheduled_blocks);
+ find_ebb_boundaries (EBB_FIRST_BB (i), &scheduled_blocks);
if (no_real_insns_p (current_sched_info->head, current_sched_info->tail))
continue;
@@ -7320,8 +7320,6 @@ sel_region_target_finish (bool reset_sched_cycles_p)
sched_extend_luids ();
}
}
-
- BITMAP_FREE (scheduled_blocks);
}
/* Free the scheduling data for the current region. When RESET_SCHED_CYCLES_P
diff --git a/gcc/sese.c b/gcc/sese.c
index 342c5e864d9..59f87cda4ff 100644
--- a/gcc/sese.c
+++ b/gcc/sese.c
@@ -369,14 +369,13 @@ sese_insert_phis_for_liveouts (sese region, basic_block bb,
{
unsigned i;
bitmap_iterator bi;
- bitmap liveouts = BITMAP_ALLOC (NULL);
+ bitmap_head liveouts;
update_ssa (TODO_update_ssa);
- sese_build_liveouts (region, liveouts);
- EXECUTE_IF_SET_IN_BITMAP (liveouts, 0, i, bi)
+ sese_build_liveouts (region, &liveouts);
+ EXECUTE_IF_SET_IN_BITMAP (&liveouts, 0, i, bi)
sese_add_exit_phis_edge (bb, ssa_name (i), false_e, true_e);
- BITMAP_FREE (liveouts);
update_ssa (TODO_update_ssa);
}
diff --git a/gcc/trans-mem.c b/gcc/trans-mem.c
index fe6dc2886d6..4d6915a603a 100644
--- a/gcc/trans-mem.c
+++ b/gcc/trans-mem.c
@@ -1947,7 +1947,7 @@ tm_region_init (struct tm_region *region)
edge e;
basic_block bb;
auto_vec<basic_block> queue;
- bitmap visited_blocks = BITMAP_ALLOC (NULL);
+ bitmap_head visited_blocks;
struct tm_region *old_region;
auto_vec<tm_region_p> bb_regions;
@@ -1978,9 +1978,9 @@ tm_region_init (struct tm_region *region)
/* Process subsequent blocks. */
FOR_EACH_EDGE (e, ei, bb->succs)
- if (!bitmap_bit_p (visited_blocks, e->dest->index))
+ if (!bitmap_bit_p (&visited_blocks, e->dest->index))
{
- bitmap_set_bit (visited_blocks, e->dest->index);
+ bitmap_set_bit (&visited_blocks, e->dest->index);
queue.safe_push (e->dest);
/* If the current block started a new region, make sure that only
@@ -1993,7 +1993,6 @@ tm_region_init (struct tm_region *region)
}
}
while (!queue.is_empty ());
- BITMAP_FREE (visited_blocks);
}
/* The "gate" function for all transactional memory expansion and optimization
@@ -2532,11 +2531,11 @@ get_tm_region_blocks (basic_block entry_block,
unsigned i;
edge e;
edge_iterator ei;
- bitmap visited_blocks = BITMAP_ALLOC (NULL);
+ bitmap_head visited_blocks;
i = 0;
bbs.safe_push (entry_block);
- bitmap_set_bit (visited_blocks, entry_block->index);
+ bitmap_set_bit (&visited_blocks, entry_block->index);
do
{
@@ -2554,18 +2553,17 @@ get_tm_region_blocks (basic_block entry_block,
FOR_EACH_EDGE (e, ei, bb->succs)
if ((include_uninstrumented_p
|| !(e->flags & EDGE_TM_UNINSTRUMENTED))
- && !bitmap_bit_p (visited_blocks, e->dest->index))
+ && !bitmap_bit_p (&visited_blocks, e->dest->index))
{
- bitmap_set_bit (visited_blocks, e->dest->index);
+ bitmap_set_bit (&visited_blocks, e->dest->index);
bbs.safe_push (e->dest);
}
}
while (i < bbs.length ());
if (all_region_blocks)
- bitmap_ior_into (all_region_blocks, visited_blocks);
+ bitmap_ior_into (all_region_blocks, &visited_blocks);
- BITMAP_FREE (visited_blocks);
return bbs;
}
@@ -4368,7 +4366,7 @@ ipa_tm_scan_irr_blocks (vec<basic_block> *pqueue, bitmap new_irr,
bool any_new_irr = false;
edge e;
edge_iterator ei;
- bitmap visited_blocks = BITMAP_ALLOC (NULL);
+ bitmap_head visited_blocks;
do
{
@@ -4386,17 +4384,15 @@ ipa_tm_scan_irr_blocks (vec<basic_block> *pqueue, bitmap new_irr,
else if (exit_blocks == NULL || !bitmap_bit_p (exit_blocks, bb->index))
{
FOR_EACH_EDGE (e, ei, bb->succs)
- if (!bitmap_bit_p (visited_blocks, e->dest->index))
+ if (!bitmap_bit_p (&visited_blocks, e->dest->index))
{
- bitmap_set_bit (visited_blocks, e->dest->index);
+ bitmap_set_bit (&visited_blocks, e->dest->index);
pqueue->safe_push (e->dest);
}
}
}
while (!pqueue->is_empty ());
- BITMAP_FREE (visited_blocks);
-
return any_new_irr;
}
@@ -4411,15 +4407,14 @@ ipa_tm_propagate_irr (basic_block entry_block, bitmap new_irr,
bitmap old_irr, bitmap exit_blocks)
{
vec<basic_block> bbs;
- bitmap all_region_blocks;
/* If this block is in the old set, no need to rescan. */
if (old_irr && bitmap_bit_p (old_irr, entry_block->index))
return;
- all_region_blocks = BITMAP_ALLOC (&tm_obstack);
+ bitmap_head all_region_blocks (&tm_obstack);
bbs = get_tm_region_blocks (entry_block, exit_blocks, NULL,
- all_region_blocks, false);
+ &all_region_blocks, false);
do
{
basic_block bb = bbs.pop ();
@@ -4464,14 +4459,13 @@ ipa_tm_propagate_irr (basic_block entry_block, bitmap new_irr,
/* Make sure block is actually in a TM region, and it
isn't already in old_irr. */
if ((!old_irr || !bitmap_bit_p (old_irr, son->index))
- && bitmap_bit_p (all_region_blocks, son->index))
+ && bitmap_bit_p (&all_region_blocks, son->index))
bitmap_set_bit (new_irr, son->index);
}
}
}
while (!bbs.is_empty ());
- BITMAP_FREE (all_region_blocks);
bbs.release ();
}
@@ -5207,7 +5201,7 @@ ipa_tm_transform_calls (struct cgraph_node *node, struct tm_region *region,
edge e;
edge_iterator ei;
auto_vec<basic_block> queue;
- bitmap visited_blocks = BITMAP_ALLOC (NULL);
+ bitmap_head visited_blocks;
queue.safe_push (bb);
do
@@ -5224,16 +5218,14 @@ ipa_tm_transform_calls (struct cgraph_node *node, struct tm_region *region,
continue;
FOR_EACH_EDGE (e, ei, bb->succs)
- if (!bitmap_bit_p (visited_blocks, e->dest->index))
+ if (!bitmap_bit_p (&visited_blocks, e->dest->index))
{
- bitmap_set_bit (visited_blocks, e->dest->index);
+ bitmap_set_bit (&visited_blocks, e->dest->index);
queue.safe_push (e->dest);
}
}
while (!queue.is_empty ());
- BITMAP_FREE (visited_blocks);
-
return need_ssa_rename;
}
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index 3f17e1424f7..ed6e9596388 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -7637,7 +7637,6 @@ remove_edge_and_dominated_blocks (edge e)
{
vec<basic_block> bbs_to_remove = vNULL;
vec<basic_block> bbs_to_fix_dom = vNULL;
- bitmap df, df_idom;
edge f;
edge_iterator ei;
bool none_removed = false;
@@ -7679,11 +7678,10 @@ remove_edge_and_dominated_blocks (edge e)
}
}
- df = BITMAP_ALLOC (NULL);
- df_idom = BITMAP_ALLOC (NULL);
+ bitmap_head df, df_idom;
if (none_removed)
- bitmap_set_bit (df_idom,
+ bitmap_set_bit (&df_idom,
get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
else
{
@@ -7693,16 +7691,16 @@ remove_edge_and_dominated_blocks (edge e)
FOR_EACH_EDGE (f, ei, bb->succs)
{
if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
- bitmap_set_bit (df, f->dest->index);
+ bitmap_set_bit (&df, f->dest->index);
}
}
FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
- bitmap_clear_bit (df, bb->index);
+ bitmap_clear_bit (&df, bb->index);
- EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&df, 0, i, bi)
{
bb = BASIC_BLOCK_FOR_FN (cfun, i);
- bitmap_set_bit (df_idom,
+ bitmap_set_bit (&df_idom,
get_immediate_dominator (CDI_DOMINATORS, bb)->index);
}
}
@@ -7711,7 +7709,7 @@ remove_edge_and_dominated_blocks (edge e)
{
/* Record the set of the altered basic blocks. */
bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
- bitmap_ior_into (cfgcleanup_altered_bbs, df);
+ bitmap_ior_into (cfgcleanup_altered_bbs, &df);
}
/* Remove E and the cancelled blocks. */
@@ -7737,7 +7735,7 @@ remove_edge_and_dominated_blocks (edge e)
removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
dominates W, and because of P, Z does not dominate W), and W belongs to
the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
- EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&df_idom, 0, i, bi)
{
bb = BASIC_BLOCK_FOR_FN (cfun, i);
for (dbb = first_dom_son (CDI_DOMINATORS, bb);
@@ -7748,8 +7746,6 @@ remove_edge_and_dominated_blocks (edge e)
iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
- BITMAP_FREE (df);
- BITMAP_FREE (df_idom);
bbs_to_remove.release ();
bbs_to_fix_dom.release ();
}
diff --git a/gcc/tree-cfgcleanup.c b/gcc/tree-cfgcleanup.c
index 922ae0d82a6..e0767a82b10 100644
--- a/gcc/tree-cfgcleanup.c
+++ b/gcc/tree-cfgcleanup.c
@@ -540,19 +540,18 @@ fixup_noreturn_call (gimple stmt)
bitmap_iterator bi;
unsigned int bb_index;
- bitmap blocks = BITMAP_ALLOC (NULL);
+ bitmap_head blocks;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
{
if (gimple_code (use_stmt) != GIMPLE_PHI)
- bitmap_set_bit (blocks, gimple_bb (use_stmt)->index);
+ bitmap_set_bit (&blocks, gimple_bb (use_stmt)->index);
else
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
SET_USE (use_p, error_mark_node);
}
- EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&blocks, 0, bb_index, bi)
delete_basic_block (BASIC_BLOCK_FOR_FN (cfun, bb_index));
- BITMAP_FREE (blocks);
release_ssa_name (op);
}
update_stmt (stmt);
@@ -725,14 +724,13 @@ cleanup_tree_cfg_noloop (void)
static void
repair_loop_structures (void)
{
- bitmap changed_bbs;
unsigned n_new_loops;
calculate_dominance_info (CDI_DOMINATORS);
timevar_push (TV_REPAIR_LOOPS);
- changed_bbs = BITMAP_ALLOC (NULL);
- n_new_loops = fix_loop_structure (changed_bbs);
+ bitmap_head changed_bbs;
+ n_new_loops = fix_loop_structure (&changed_bbs);
/* This usually does nothing. But sometimes parts of cfg that originally
were inside a loop get out of it due to edge removal (since they
@@ -740,11 +738,9 @@ repair_loop_structures (void)
irreducible loop can become reducible - in this case force a full
rewrite into loop-closed SSA form. */
if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
- rewrite_into_loop_closed_ssa (n_new_loops ? NULL : changed_bbs,
+ rewrite_into_loop_closed_ssa (n_new_loops ? NULL : &changed_bbs,
TODO_update_ssa);
- BITMAP_FREE (changed_bbs);
-
#ifdef ENABLE_CHECKING
verify_loop_structure ();
#endif
diff --git a/gcc/tree-eh.c b/gcc/tree-eh.c
index 06941cd1745..68accb6f1f3 100644
--- a/gcc/tree-eh.c
+++ b/gcc/tree-eh.c
@@ -4125,7 +4125,6 @@ cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
gimple_stmt_iterator ngsi, ogsi;
edge_iterator ei;
edge e;
- bitmap ophi_handled;
/* The destination block must not be a regular successor for any
of the preds of the landing pad. Thus, avoid turning
@@ -4146,7 +4145,7 @@ cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
FOR_EACH_EDGE (e, ei, old_bb->preds)
redirect_edge_var_map_clear (e);
- ophi_handled = BITMAP_ALLOC (NULL);
+ bitmap_head ophi_handled;
/* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
for the edges we're going to move. */
@@ -4186,7 +4185,7 @@ cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
goto fail;
}
}
- bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
+ bitmap_set_bit (&ophi_handled, SSA_NAME_VERSION (nop));
FOR_EACH_EDGE (e, ei, old_bb->preds)
{
location_t oloc;
@@ -4218,7 +4217,7 @@ cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
{
gimple ophi = gsi_stmt (ogsi);
tree oresult = gimple_phi_result (ophi);
- if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
+ if (!bitmap_bit_p (&ophi_handled, SSA_NAME_VERSION (oresult)))
goto fail;
}
@@ -4248,13 +4247,11 @@ cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
else
ei_next (&ei);
- BITMAP_FREE (ophi_handled);
return true;
fail:
FOR_EACH_EDGE (e, ei, old_bb->preds)
redirect_edge_var_map_clear (e);
- BITMAP_FREE (ophi_handled);
return false;
}
diff --git a/gcc/tree-if-conv.c b/gcc/tree-if-conv.c
index 0dc340f15aa..191a7b5c450 100644
--- a/gcc/tree-if-conv.c
+++ b/gcc/tree-if-conv.c
@@ -994,12 +994,12 @@ if_convertible_bb_p (struct loop *loop, basic_block bb, basic_block exit_bb)
VISITED bitmap keeps track of the visited blocks. */
static bool
-pred_blocks_visited_p (basic_block bb, bitmap *visited)
+pred_blocks_visited_p (basic_block bb, bitmap visited)
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds)
- if (!bitmap_bit_p (*visited, e->src->index))
+ if (!bitmap_bit_p (visited, e->src->index))
return false;
return true;
@@ -1016,7 +1016,6 @@ get_loop_body_in_if_conv_order (const struct loop *loop)
{
basic_block *blocks, *blocks_in_bfs_order;
basic_block bb;
- bitmap visited;
unsigned int index = 0;
unsigned int visited_count = 0;
@@ -1024,7 +1023,7 @@ get_loop_body_in_if_conv_order (const struct loop *loop)
gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
blocks = XCNEWVEC (basic_block, loop->num_nodes);
- visited = BITMAP_ALLOC (NULL);
+ bitmap_head visited;
blocks_in_bfs_order = get_loop_body_in_bfs_order (loop);
@@ -1036,18 +1035,17 @@ get_loop_body_in_if_conv_order (const struct loop *loop)
if (bb->flags & BB_IRREDUCIBLE_LOOP)
{
free (blocks_in_bfs_order);
- BITMAP_FREE (visited);
free (blocks);
return NULL;
}
- if (!bitmap_bit_p (visited, bb->index))
+ if (!bitmap_bit_p (&visited, bb->index))
{
if (pred_blocks_visited_p (bb, &visited)
|| bb == loop->header)
{
/* This block is now visited. */
- bitmap_set_bit (visited, bb->index);
+ bitmap_set_bit (&visited, bb->index);
blocks[visited_count++] = bb;
}
}
@@ -1060,7 +1058,6 @@ get_loop_body_in_if_conv_order (const struct loop *loop)
index = 0;
}
free (blocks_in_bfs_order);
- BITMAP_FREE (visited);
return blocks;
}
diff --git a/gcc/tree-into-ssa.c b/gcc/tree-into-ssa.c
index 3ca2bd10bcf..4f32025b178 100644
--- a/gcc/tree-into-ssa.c
+++ b/gcc/tree-into-ssa.c
@@ -770,11 +770,9 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
{
bitmap_iterator bi;
unsigned i, b, p, u, top;
- bitmap live_phis;
basic_block def_bb, use_bb;
edge e;
edge_iterator ei;
- bitmap to_remove;
struct dom_dfsnum *defs;
unsigned n_defs, adef;
@@ -786,14 +784,11 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
/* The phi must dominate a use, or an argument of a live phi. Also, we
do not create any phi nodes in def blocks, unless they are also livein. */
- to_remove = BITMAP_ALLOC (NULL);
- bitmap_and_compl (to_remove, kills, uses);
- bitmap_and_compl_into (phis, to_remove);
+ bitmap_head to_remove;
+ bitmap_and_compl (&to_remove, kills, uses);
+ bitmap_and_compl_into (phis, &to_remove);
if (bitmap_empty_p (phis))
- {
- BITMAP_FREE (to_remove);
- return;
- }
+ return;
/* We want to remove the unnecessary phi nodes, but we do not want to compute
liveness information, as that may be linear in the size of CFG, and if
@@ -813,13 +808,13 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
that contains entry and exit dfs numbers for the basic block with the use.
If we store the bounds for all the uses to an array and sort it, we can
locate the nearest dominating def in logarithmic time by binary search.*/
- bitmap_ior (to_remove, kills, phis);
- n_defs = bitmap_count_bits (to_remove);
+ bitmap_ior (&to_remove, kills, phis);
+ n_defs = bitmap_count_bits (&to_remove);
defs = XNEWVEC (struct dom_dfsnum, 2 * n_defs + 1);
defs[0].bb_index = 1;
defs[0].dfs_num = 0;
adef = 1;
- EXECUTE_IF_SET_IN_BITMAP (to_remove, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&to_remove, 0, i, bi)
{
def_bb = BASIC_BLOCK_FOR_FN (cfun, i);
defs[adef].bb_index = i;
@@ -828,7 +823,6 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
defs[adef + 1].dfs_num = bb_dom_dfs_out (CDI_DOMINATORS, def_bb);
adef += 2;
}
- BITMAP_FREE (to_remove);
gcc_assert (adef == 2 * n_defs + 1);
qsort (defs, adef, sizeof (struct dom_dfsnum), cmp_dfsnum);
gcc_assert (defs[0].bb_index == 1);
@@ -876,7 +870,7 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
gcc_assert (worklist.is_empty ());
/* Now process the uses. */
- live_phis = BITMAP_ALLOC (NULL);
+ bitmap_head live_phis;
EXECUTE_IF_SET_IN_BITMAP (uses, 0, i, bi)
{
worklist.safe_push (i);
@@ -904,7 +898,7 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
}
/* If the phi node is already live, there is nothing to do. */
- if (!bitmap_set_bit (live_phis, p))
+ if (!bitmap_set_bit (&live_phis, p))
continue;
/* Add the new uses to the worklist. */
@@ -927,8 +921,7 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
}
}
- bitmap_copy (phis, live_phis);
- BITMAP_FREE (live_phis);
+ bitmap_copy (phis, &live_phis);
free (defs);
}
@@ -2227,17 +2220,16 @@ private:
/* Notice that this bitmap is indexed using variable UIDs, so it must be
large enough to accommodate all the variables referenced in the
function, not just the ones we are renaming. */
- bitmap m_kills;
+ bitmap_head m_kills;
};
mark_def_dom_walker::mark_def_dom_walker (cdi_direction direction)
- : dom_walker (direction), m_kills (BITMAP_ALLOC (NULL))
+ : dom_walker (direction)
{
}
mark_def_dom_walker::~mark_def_dom_walker ()
{
- BITMAP_FREE (m_kills);
}
/* Block processing routine for mark_def_sites. Clear the KILLS bitmap
@@ -2248,9 +2240,9 @@ mark_def_dom_walker::before_dom_children (basic_block bb)
{
gimple_stmt_iterator gsi;
- bitmap_clear (m_kills);
+ bitmap_clear (&m_kills);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- mark_def_sites (bb, gsi_stmt (gsi), m_kills);
+ mark_def_sites (bb, gsi_stmt (gsi), &m_kills);
}
/* Initialize internal data needed during renaming. */
@@ -2991,7 +2983,7 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
{
basic_block entry;
struct def_blocks_d *db;
- bitmap idf, pruned_idf;
+ bitmap idf;
bitmap_iterator bi;
unsigned i;
@@ -3009,7 +3001,7 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
/* Compute the initial iterated dominance frontier. */
idf = compute_idf (db->def_blocks, dfs);
- pruned_idf = BITMAP_ALLOC (NULL);
+ bitmap_head pruned_idf;
if (TREE_CODE (var) == SSA_NAME)
{
@@ -3025,13 +3017,13 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
if (BASIC_BLOCK_FOR_FN (cfun, i) != entry
&& dominated_by_p (CDI_DOMINATORS,
BASIC_BLOCK_FOR_FN (cfun, i), entry))
- bitmap_set_bit (pruned_idf, i);
+ bitmap_set_bit (&pruned_idf, i);
}
else
{
/* Otherwise, do not prune the IDF for VAR. */
gcc_checking_assert (update_flags == TODO_update_ssa_full_phi);
- bitmap_copy (pruned_idf, idf);
+ bitmap_copy (&pruned_idf, idf);
}
}
else
@@ -3039,10 +3031,10 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
/* Otherwise, VAR is a symbol that needs to be put into SSA form
for the first time, so we need to compute the full IDF for
it. */
- bitmap_copy (pruned_idf, idf);
+ bitmap_copy (&pruned_idf, idf);
}
- if (!bitmap_empty_p (pruned_idf))
+ if (!bitmap_empty_p (&pruned_idf))
{
/* Make sure that PRUNED_IDF blocks and all their feeding blocks
are included in the region to be updated. The feeding blocks
@@ -3051,8 +3043,8 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
/* FIXME, this is not needed if we are updating symbols. We are
already starting at the ENTRY block anyway. */
- bitmap_ior_into (blocks, pruned_idf);
- EXECUTE_IF_SET_IN_BITMAP (pruned_idf, 0, i, bi)
+ bitmap_ior_into (blocks, &pruned_idf);
+ EXECUTE_IF_SET_IN_BITMAP (&pruned_idf, 0, i, bi)
{
edge e;
edge_iterator ei;
@@ -3063,10 +3055,9 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
bitmap_set_bit (blocks, e->src->index);
}
- insert_phi_nodes_for (var, pruned_idf, true);
+ insert_phi_nodes_for (var, &pruned_idf, true);
}
- BITMAP_FREE (pruned_idf);
BITMAP_FREE (idf);
}
diff --git a/gcc/tree-loop-distribution.c b/gcc/tree-loop-distribution.c
index 9db92dbf9bf..4fdbaf9c92f 100644
--- a/gcc/tree-loop-distribution.c
+++ b/gcc/tree-loop-distribution.c
@@ -1212,7 +1212,7 @@ rdg_build_partitions (struct graph *rdg,
vec<gimple> starting_stmts,
vec<partition_t> *partitions)
{
- bitmap processed = BITMAP_ALLOC (NULL);
+ bitmap_head processed;
int i;
gimple stmt;
@@ -1226,11 +1226,11 @@ rdg_build_partitions (struct graph *rdg,
/* If the vertex is already contained in another partition so
is the partition rooted at it. */
- if (bitmap_bit_p (processed, v))
+ if (bitmap_bit_p (&processed, v))
continue;
partition_t partition = build_rdg_partition_for_vertex (rdg, v);
- bitmap_ior_into (processed, partition->stmts);
+ bitmap_ior_into (&processed, partition->stmts);
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -1243,8 +1243,6 @@ rdg_build_partitions (struct graph *rdg,
/* All vertices should have been assigned to at least one partition now,
other than vertices belonging to dead code. */
-
- BITMAP_FREE (processed);
}
/* Dump to FILE the PARTITIONS. */
diff --git a/gcc/tree-object-size.c b/gcc/tree-object-size.c
index 994845b3d29..302ee22401f 100644
--- a/gcc/tree-object-size.c
+++ b/gcc/tree-object-size.c
@@ -46,7 +46,7 @@ along with GCC; see the file COPYING3. If not see
struct object_size_info
{
int object_size_type;
- bitmap visited, reexamine;
+ bitmap_head visited, reexamine;
int pass;
bool changed;
unsigned int *depths;
@@ -525,8 +525,6 @@ compute_builtin_object_size (tree ptr, int object_size_type)
fprintf (dump_file, ":\n");
}
- osi.visited = BITMAP_ALLOC (NULL);
- osi.reexamine = BITMAP_ALLOC (NULL);
osi.object_size_type = object_size_type;
osi.depths = NULL;
osi.stack = NULL;
@@ -543,9 +541,9 @@ compute_builtin_object_size (tree ptr, int object_size_type)
/* Second pass: keep recomputing object sizes of variables
that need reexamination, until no object sizes are
increased or all object sizes are computed. */
- if (! bitmap_empty_p (osi.reexamine))
+ if (! bitmap_empty_p (&osi.reexamine))
{
- bitmap reexamine = BITMAP_ALLOC (NULL);
+ bitmap_head reexamine;
/* If looking for minimum instead of maximum object size,
detect cases where a pointer is increased in a loop.
@@ -561,9 +559,9 @@ compute_builtin_object_size (tree ptr, int object_size_type)
osi.pass = 1;
/* collect_object_sizes_for is changing
osi.reexamine bitmap, so iterate over a copy. */
- bitmap_copy (reexamine, osi.reexamine);
- EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
- if (bitmap_bit_p (osi.reexamine, i))
+ bitmap_copy (&reexamine, &osi.reexamine);
+ EXECUTE_IF_SET_IN_BITMAP (&reexamine, 0, i, bi)
+ if (bitmap_bit_p (&osi.reexamine, i))
check_for_plus_in_loops (&osi, ssa_name (i));
free (osi.depths);
@@ -579,9 +577,9 @@ compute_builtin_object_size (tree ptr, int object_size_type)
osi.changed = false;
/* collect_object_sizes_for is changing
osi.reexamine bitmap, so iterate over a copy. */
- bitmap_copy (reexamine, osi.reexamine);
- EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
- if (bitmap_bit_p (osi.reexamine, i))
+ bitmap_copy (&reexamine, &osi.reexamine);
+ EXECUTE_IF_SET_IN_BITMAP (&reexamine, 0, i, bi)
+ if (bitmap_bit_p (&osi.reexamine, i))
{
collect_object_sizes_for (&osi, ssa_name (i));
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -594,16 +592,14 @@ compute_builtin_object_size (tree ptr, int object_size_type)
}
}
while (osi.changed);
-
- BITMAP_FREE (reexamine);
}
- EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&osi.reexamine, 0, i, bi)
bitmap_set_bit (computed[object_size_type], i);
/* Debugging dumps. */
if (dump_file)
{
- EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&osi.visited, 0, i, bi)
if (object_sizes[object_size_type][i]
!= unknown[object_size_type])
{
@@ -617,9 +613,6 @@ compute_builtin_object_size (tree ptr, int object_size_type)
object_sizes[object_size_type][i]);
}
}
-
- BITMAP_FREE (osi.reexamine);
- BITMAP_FREE (osi.visited);
}
return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
@@ -767,7 +760,7 @@ merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
osi->changed = true;
}
}
- return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
+ return bitmap_bit_p (&osi->reexamine, SSA_NAME_VERSION (orig));
}
@@ -909,7 +902,7 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
if (osi->pass == 0)
{
- if (bitmap_set_bit (osi->visited, varno))
+ if (bitmap_set_bit (&osi->visited, varno))
{
object_sizes[object_size_type][varno]
= (object_size_type & 2) ? -1 : 0;
@@ -918,7 +911,7 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
{
/* Found a dependency loop. Mark the variable for later
re-examination. */
- bitmap_set_bit (osi->reexamine, varno);
+ bitmap_set_bit (&osi->reexamine, varno);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Found a dependency loop at ");
@@ -1022,11 +1015,11 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
|| object_sizes[object_size_type][varno] == unknown[object_size_type])
{
bitmap_set_bit (computed[object_size_type], varno);
- bitmap_clear_bit (osi->reexamine, varno);
+ bitmap_clear_bit (&osi->reexamine, varno);
}
else
{
- bitmap_set_bit (osi->reexamine, varno);
+ bitmap_set_bit (&osi->reexamine, varno);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Need to reexamine ");
@@ -1057,7 +1050,7 @@ check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
for (sp = osi->tos; sp > osi->stack; )
{
--sp;
- bitmap_clear_bit (osi->reexamine, *sp);
+ bitmap_clear_bit (&osi->reexamine, *sp);
bitmap_set_bit (computed[osi->object_size_type], *sp);
object_sizes[osi->object_size_type][*sp] = 0;
if (*sp == varno)
@@ -1066,7 +1059,7 @@ check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
}
return;
}
- else if (! bitmap_bit_p (osi->reexamine, varno))
+ else if (! bitmap_bit_p (&osi->reexamine, varno))
return;
osi->depths[varno] = depth;
diff --git a/gcc/tree-predcom.c b/gcc/tree-predcom.c
index 730bad46aa4..e1ba17ec1c1 100644
--- a/gcc/tree-predcom.c
+++ b/gcc/tree-predcom.c
@@ -2418,7 +2418,6 @@ tree_predictive_commoning_loop (struct loop *loop)
struct tree_niter_desc desc;
bool unroll = false;
edge exit;
- bitmap tmp_vars;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Processing loop %d\n", loop->num);
@@ -2460,7 +2459,7 @@ tree_predictive_commoning_loop (struct loop *loop)
/* Find the suitable components and split them into chains. */
components = filter_suitable_components (loop, components);
- tmp_vars = BITMAP_ALLOC (NULL);
+ bitmap_head tmp_vars;
looparound_phis = BITMAP_ALLOC (NULL);
determine_roots (loop, components, &chains);
release_components (components);
@@ -2501,7 +2500,7 @@ tree_predictive_commoning_loop (struct loop *loop)
fprintf (dump_file, "Unrolling %u times.\n", unroll_factor);
dta.chains = chains;
- dta.tmp_vars = tmp_vars;
+ dta.tmp_vars = &tmp_vars;
update_ssa (TODO_update_ssa_only_virtuals);
@@ -2515,20 +2514,19 @@ tree_predictive_commoning_loop (struct loop *loop)
tree_transform_and_unroll_loop (loop, unroll_factor, exit, &desc,
execute_pred_commoning_cbck, &dta);
- eliminate_temp_copies (loop, tmp_vars);
+ eliminate_temp_copies (loop, &tmp_vars);
}
else
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file,
"Executing predictive commoning without unrolling.\n");
- execute_pred_commoning (loop, chains, tmp_vars);
+ execute_pred_commoning (loop, chains, &tmp_vars);
}
end: ;
release_chains (chains);
free_data_refs (datarefs);
- BITMAP_FREE (tmp_vars);
BITMAP_FREE (looparound_phis);
free_affine_expand_cache (&name_expansions);
diff --git a/gcc/tree-scalar-evolution.c b/gcc/tree-scalar-evolution.c
index f1ddc24b1a2..4cad74b0d48 100644
--- a/gcc/tree-scalar-evolution.c
+++ b/gcc/tree-scalar-evolution.c
@@ -3344,7 +3344,7 @@ scev_const_prop (void)
tree name, type, ev;
gimple phi, ass;
struct loop *loop, *ex_loop;
- bitmap ssa_names_to_remove = NULL;
+ bitmap_head ssa_names_to_remove;
unsigned i;
gimple_stmt_iterator psi;
@@ -3378,20 +3378,16 @@ scev_const_prop (void)
if (name != ev)
replace_uses_by (name, ev);
- if (!ssa_names_to_remove)
- ssa_names_to_remove = BITMAP_ALLOC (NULL);
- bitmap_set_bit (ssa_names_to_remove, SSA_NAME_VERSION (name));
+ bitmap_set_bit (&ssa_names_to_remove, SSA_NAME_VERSION (name));
}
}
/* Remove the ssa names that were replaced by constants. We do not
remove them directly in the previous cycle, since this
invalidates scev cache. */
- if (ssa_names_to_remove)
- {
- bitmap_iterator bi;
+ bitmap_iterator bi;
- EXECUTE_IF_SET_IN_BITMAP (ssa_names_to_remove, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&ssa_names_to_remove, 0, i, bi)
{
gimple_stmt_iterator psi;
name = ssa_name (i);
@@ -3402,9 +3398,7 @@ scev_const_prop (void)
remove_phi_node (&psi, true);
}
- BITMAP_FREE (ssa_names_to_remove);
scev_reset ();
- }
/* Now the regular final value replacement. */
FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c
index 284d54448bd..bae615cecd9 100644
--- a/gcc/tree-sra.c
+++ b/gcc/tree-sra.c
@@ -2481,7 +2481,7 @@ static bool
analyze_all_variable_accesses (void)
{
int res = 0;
- bitmap tmp = BITMAP_ALLOC (NULL);
+ bitmap_head tmp;
bitmap_iterator bi;
unsigned i, max_total_scalarization_size;
@@ -2517,8 +2517,8 @@ analyze_all_variable_accesses (void)
}
}
- bitmap_copy (tmp, candidate_bitmap);
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
+ bitmap_copy (&tmp, candidate_bitmap);
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, i, bi)
{
tree var = candidate (i);
struct access *access;
@@ -2531,8 +2531,8 @@ analyze_all_variable_accesses (void)
propagate_all_subaccesses ();
- bitmap_copy (tmp, candidate_bitmap);
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
+ bitmap_copy (&tmp, candidate_bitmap);
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, i, bi)
{
tree var = candidate (i);
struct access *access = get_first_repr_for_decl (var);
@@ -2553,8 +2553,6 @@ analyze_all_variable_accesses (void)
disqualify_candidate (var, "No scalar replacements to be created.");
}
- BITMAP_FREE (tmp);
-
if (res)
{
statistics_counter_event (cfun, "Scalarized aggregates", res);
@@ -4772,7 +4770,7 @@ convert_callers_for_node (struct cgraph_node *node,
void *data)
{
ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
- bitmap recomputed_callers = BITMAP_ALLOC (NULL);
+ bitmap_head recomputed_callers;
struct cgraph_edge *cs;
for (cs = node->callers; cs; cs = cs->next_caller)
@@ -4792,10 +4790,9 @@ convert_callers_for_node (struct cgraph_node *node,
}
for (cs = node->callers; cs; cs = cs->next_caller)
- if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
+ if (bitmap_set_bit (&recomputed_callers, cs->caller->uid)
&& gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
compute_inline_parameters (cs->caller, true);
- BITMAP_FREE (recomputed_callers);
return true;
}
diff --git a/gcc/tree-ssa-coalesce.c b/gcc/tree-ssa-coalesce.c
index 86276b361a6..d48c0c2d497 100644
--- a/gcc/tree-ssa-coalesce.c
+++ b/gcc/tree-ssa-coalesce.c
@@ -1256,12 +1256,12 @@ coalesce_ssa_name (void)
tree_live_info_p liveinfo;
ssa_conflicts_p graph;
coalesce_list_p cl;
- bitmap used_in_copies = BITMAP_ALLOC (NULL);
+ bitmap_head used_in_copies;
var_map map;
unsigned int i;
cl = create_coalesce_list ();
- map = create_outofssa_var_map (cl, used_in_copies);
+ map = create_outofssa_var_map (cl, &used_in_copies);
/* If optimization is disabled, we need to coalesce all the names originating
from the same SSA_NAME_VAR so debug info remains undisturbed. */
@@ -1295,8 +1295,8 @@ coalesce_ssa_name (void)
? MUST_COALESCE_COST - 1 : MUST_COALESCE_COST;
add_coalesce (cl, SSA_NAME_VERSION (a),
SSA_NAME_VERSION (*slot), cost);
- bitmap_set_bit (used_in_copies, SSA_NAME_VERSION (a));
- bitmap_set_bit (used_in_copies, SSA_NAME_VERSION (*slot));
+ bitmap_set_bit (&used_in_copies, SSA_NAME_VERSION (a));
+ bitmap_set_bit (&used_in_copies, SSA_NAME_VERSION (*slot));
}
}
}
@@ -1306,8 +1306,7 @@ coalesce_ssa_name (void)
dump_var_map (dump_file, map);
/* Don't calculate live ranges for variables not in the coalesce list. */
- partition_view_bitmap (map, used_in_copies, true);
- BITMAP_FREE (used_in_copies);
+ partition_view_bitmap (map, &used_in_copies, true);
if (num_var_partitions (map) < 1)
{
diff --git a/gcc/tree-ssa-dom.c b/gcc/tree-ssa-dom.c
index 98cf60888ab..41781ad243b 100644
--- a/gcc/tree-ssa-dom.c
+++ b/gcc/tree-ssa-dom.c
@@ -3029,9 +3029,6 @@ eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
static unsigned int
eliminate_degenerate_phis (void)
{
- bitmap interesting_names;
- bitmap interesting_names1;
-
/* Bitmap of blocks which need EH information updated. We can not
update it on-the-fly as doing so invalidates the dominator tree. */
need_eh_cleanup = BITMAP_ALLOC (NULL);
@@ -3046,8 +3043,7 @@ eliminate_degenerate_phis (void)
Experiments have show we generally get better compilation
time behavior with bitmaps rather than sbitmaps. */
- interesting_names = BITMAP_ALLOC (NULL);
- interesting_names1 = BITMAP_ALLOC (NULL);
+ bitmap_head interesting_names, interesting_names1;
calculate_dominance_info (CDI_DOMINATORS);
cfg_altered = false;
@@ -3061,13 +3057,13 @@ eliminate_degenerate_phis (void)
in dominator order leaves fewer PHIs for later examination
by the worklist phase. */
eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR_FOR_FN (cfun),
- interesting_names);
+ &interesting_names);
/* Second phase. Eliminate second order degenerate PHIs as well
as trivial copies or constant initializations identified by
the first phase or this phase. Basically we keep iterating
until our set of INTERESTING_NAMEs is empty. */
- while (!bitmap_empty_p (interesting_names))
+ while (!bitmap_empty_p (&interesting_names))
{
unsigned int i;
bitmap_iterator bi;
@@ -3075,9 +3071,9 @@ eliminate_degenerate_phis (void)
/* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
changed during the loop. Copy it to another bitmap and
use that. */
- bitmap_copy (interesting_names1, interesting_names);
+ bitmap_copy (&interesting_names1, &interesting_names);
- EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&interesting_names1, 0, i, bi)
{
tree name = ssa_name (i);
@@ -3085,7 +3081,7 @@ eliminate_degenerate_phis (void)
their defining statement was deleted (unreachable). */
if (name)
eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
- interesting_names);
+ &interesting_names);
}
}
@@ -3105,8 +3101,6 @@ eliminate_degenerate_phis (void)
BITMAP_FREE (need_eh_cleanup);
}
- BITMAP_FREE (interesting_names);
- BITMAP_FREE (interesting_names1);
return 0;
}
diff --git a/gcc/tree-ssa-forwprop.c b/gcc/tree-ssa-forwprop.c
index b2294290077..85e48beb805 100644
--- a/gcc/tree-ssa-forwprop.c
+++ b/gcc/tree-ssa-forwprop.c
@@ -1304,7 +1304,6 @@ simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
len = labels.length ();
if (len < branch_num - 1)
{
- bitmap target_blocks;
edge_iterator ei;
edge e;
@@ -1328,16 +1327,16 @@ simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
gimple_switch_set_num_labels (stmt, len + 1);
/* Cleanup any edges that are now dead. */
- target_blocks = BITMAP_ALLOC (NULL);
+ bitmap_head target_blocks;
for (i = 0; i < gimple_switch_num_labels (stmt); i++)
{
tree elt = gimple_switch_label (stmt, i);
basic_block target = label_to_block (CASE_LABEL (elt));
- bitmap_set_bit (target_blocks, target->index);
+ bitmap_set_bit (&target_blocks, target->index);
}
for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
{
- if (! bitmap_bit_p (target_blocks, e->dest->index))
+ if (! bitmap_bit_p (&target_blocks, e->dest->index))
{
remove_edge (e);
cfg_changed = true;
@@ -1346,7 +1345,6 @@ simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
else
ei_next (&ei);
}
- BITMAP_FREE (target_blocks);
}
}
diff --git a/gcc/tree-ssa-live.c b/gcc/tree-ssa-live.c
index 13e4fb0cfa4..a2ee97a34d3 100644
--- a/gcc/tree-ssa-live.c
+++ b/gcc/tree-ssa-live.c
@@ -1044,23 +1044,22 @@ live_worklist (tree_live_info_p live)
unsigned b;
basic_block bb;
sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
- bitmap tmp = BITMAP_ALLOC (&liveness_bitmap_obstack);
+ bitmap_head tmp (&liveness_bitmap_obstack);
bitmap_clear (visited);
/* Visit all the blocks in reverse order and propagate live on entry values
into the predecessors blocks. */
FOR_EACH_BB_REVERSE_FN (bb, cfun)
- loe_visit_block (live, bb, visited, tmp);
+ loe_visit_block (live, bb, visited, &tmp);
/* Process any blocks which require further iteration. */
while (live->stack_top != live->work_stack)
{
b = *--(live->stack_top);
- loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited, tmp);
+ loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited, &tmp);
}
- BITMAP_FREE (tmp);
sbitmap_free (visited);
}
diff --git a/gcc/tree-ssa-loop-im.c b/gcc/tree-ssa-loop-im.c
index c75f25749ae..5aac2e4cc98 100644
--- a/gcc/tree-ssa-loop-im.c
+++ b/gcc/tree-ssa-loop-im.c
@@ -2278,20 +2278,19 @@ store_motion_loop (struct loop *loop, bitmap sm_executed)
{
vec<edge> exits = get_loop_exit_edges (loop);
struct loop *subloop;
- bitmap sm_in_loop = BITMAP_ALLOC (&lim_bitmap_obstack);
+ bitmap_head sm_in_loop (&lim_bitmap_obstack);
if (loop_suitable_for_sm (loop, exits))
{
- find_refs_for_sm (loop, sm_executed, sm_in_loop);
- hoist_memory_references (loop, sm_in_loop, exits);
+ find_refs_for_sm (loop, sm_executed, &sm_in_loop);
+ hoist_memory_references (loop, &sm_in_loop, exits);
}
exits.release ();
- bitmap_ior_into (sm_executed, sm_in_loop);
+ bitmap_ior_into (sm_executed, &sm_in_loop);
for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
store_motion_loop (subloop, sm_executed);
- bitmap_and_compl_into (sm_executed, sm_in_loop);
- BITMAP_FREE (sm_in_loop);
+ bitmap_and_compl_into (sm_executed, &sm_in_loop);
}
/* Try to perform store motion for all memory references modified inside
@@ -2301,12 +2300,11 @@ static void
store_motion (void)
{
struct loop *loop;
- bitmap sm_executed = BITMAP_ALLOC (&lim_bitmap_obstack);
+ bitmap_head sm_executed (&lim_bitmap_obstack);
for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next)
- store_motion_loop (loop, sm_executed);
+ store_motion_loop (loop, &sm_executed);
- BITMAP_FREE (sm_executed);
gsi_commit_edge_inserts ();
}
diff --git a/gcc/tree-ssa-loop-ivcanon.c b/gcc/tree-ssa-loop-ivcanon.c
index b475b067bb5..05cf3d91ed3 100644
--- a/gcc/tree-ssa-loop-ivcanon.c
+++ b/gcc/tree-ssa-loop-ivcanon.c
@@ -992,7 +992,7 @@ canonicalize_induction_variables (void)
struct loop *loop;
bool changed = false;
bool irred_invalidated = false;
- bitmap loop_closed_ssa_invalidated = BITMAP_ALLOC (NULL);
+ bitmap_head loop_closed_ssa_invalidated;
free_numbers_of_iterations_estimates ();
estimate_numbers_of_iterations ();
@@ -1005,7 +1005,7 @@ canonicalize_induction_variables (void)
}
gcc_assert (!need_ssa_update_p (cfun));
- unloop_loops (loop_closed_ssa_invalidated, &irred_invalidated);
+ unloop_loops (&loop_closed_ssa_invalidated, &irred_invalidated);
if (irred_invalidated
&& loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
mark_irreducible_loops ();
@@ -1014,12 +1014,11 @@ canonicalize_induction_variables (void)
evaluation could reveal new information. */
scev_reset ();
- if (!bitmap_empty_p (loop_closed_ssa_invalidated))
+ if (!bitmap_empty_p (&loop_closed_ssa_invalidated))
{
gcc_checking_assert (loops_state_satisfies_p (LOOP_CLOSED_SSA));
rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
}
- BITMAP_FREE (loop_closed_ssa_invalidated);
if (changed)
return TODO_cleanup_cfg;
diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index 14ba20fce77..3e7fd7a294b 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -5015,7 +5015,7 @@ determine_use_iv_costs (struct ivopts_data *data)
unsigned i, j;
struct iv_use *use;
struct iv_cand *cand;
- bitmap to_clear = BITMAP_ALLOC (NULL);
+ bitmap_head to_clear;
alloc_use_cost_map (data);
@@ -5039,18 +5039,16 @@ determine_use_iv_costs (struct ivopts_data *data)
{
cand = iv_cand (data, j);
if (!determine_use_iv_cost (data, use, cand))
- bitmap_set_bit (to_clear, j);
+ bitmap_set_bit (&to_clear, j);
}
/* Remove the candidates for that the cost is infinite from
the list of related candidates. */
- bitmap_and_compl_into (use->related_cands, to_clear);
- bitmap_clear (to_clear);
+ bitmap_and_compl_into (use->related_cands, &to_clear);
+ bitmap_clear (&to_clear);
}
}
- BITMAP_FREE (to_clear);
-
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Use-candidate costs:\n");
@@ -6546,7 +6544,7 @@ remove_unused_ivs (struct ivopts_data *data)
{
unsigned j;
bitmap_iterator bi;
- bitmap toremove = BITMAP_ALLOC (NULL);
+ bitmap_head toremove;
/* Figure out an order in which to release SSA DEFs so that we don't
release something that we'd have to propagate into a debug stmt
@@ -6562,7 +6560,7 @@ remove_unused_ivs (struct ivopts_data *data)
&& !info->iv->have_use_for
&& !info->preserve_biv)
{
- bitmap_set_bit (toremove, SSA_NAME_VERSION (info->iv->ssa_name));
+ bitmap_set_bit (&toremove, SSA_NAME_VERSION (info->iv->ssa_name));
tree def = info->iv->ssa_name;
@@ -6668,9 +6666,7 @@ remove_unused_ivs (struct ivopts_data *data)
}
}
- release_defs_bitset (toremove);
-
- BITMAP_FREE (toremove);
+ release_defs_bitset (&toremove);
}
/* Frees memory occupied by struct tree_niter_desc in *VALUE. Callback
diff --git a/gcc/tree-ssa-loop-manip.c b/gcc/tree-ssa-loop-manip.c
index 9dcbc530c36..0e0ffe0c86d 100644
--- a/gcc/tree-ssa-loop-manip.c
+++ b/gcc/tree-ssa-loop-manip.c
@@ -317,18 +317,16 @@ add_exit_phis_var (tree var, bitmap use_blocks, bitmap *loop_exits)
unsigned index;
bitmap_iterator bi;
basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (var));
- bitmap live_exits = BITMAP_ALLOC (&loop_renamer_obstack);
+ bitmap_head live_exits (&loop_renamer_obstack);
gcc_checking_assert (! bitmap_bit_p (use_blocks, def_bb->index));
- compute_live_loop_exits (live_exits, use_blocks, loop_exits, def_bb);
+ compute_live_loop_exits (&live_exits, use_blocks, loop_exits, def_bb);
- EXECUTE_IF_SET_IN_BITMAP (live_exits, 0, index, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&live_exits, 0, index, bi)
{
add_exit_phi (BASIC_BLOCK_FOR_FN (cfun, index), var);
}
-
- BITMAP_FREE (live_exits);
}
/* Add exit phis for the names marked in NAMES_TO_RENAME.
diff --git a/gcc/tree-ssa-loop-niter.c b/gcc/tree-ssa-loop-niter.c
index 7628363cc62..d0ec6b6b75e 100644
--- a/gcc/tree-ssa-loop-niter.c
+++ b/gcc/tree-ssa-loop-niter.c
@@ -3297,7 +3297,6 @@ maybe_lower_iteration_bound (struct loop *loop)
struct nb_iter_bound *elt;
bool found_exit = false;
vec<basic_block> queue = vNULL;
- bitmap visited;
/* Collect all statements with interesting (i.e. lower than
nb_iterations_upper_bound) bound on them.
@@ -3325,8 +3324,8 @@ maybe_lower_iteration_bound (struct loop *loop)
any of the statements known to have undefined effect on the last
iteration. */
queue.safe_push (loop->header);
- visited = BITMAP_ALLOC (NULL);
- bitmap_set_bit (visited, loop->header->index);
+ bitmap_head visited;
+ bitmap_set_bit (&visited, loop->header->index);
found_exit = false;
do
@@ -3367,7 +3366,7 @@ maybe_lower_iteration_bound (struct loop *loop)
found_exit = true;
break;
}
- if (bitmap_set_bit (visited, e->dest->index))
+ if (bitmap_set_bit (&visited, e->dest->index))
queue.safe_push (e->dest);
}
}
@@ -3386,7 +3385,6 @@ maybe_lower_iteration_bound (struct loop *loop)
record_niter_bound (loop, loop->nb_iterations_upper_bound - double_int_one,
false, true);
}
- BITMAP_FREE (visited);
queue.release ();
pointer_set_destroy (not_executed_last_iteration);
}
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index 1e55356556f..275a0659da7 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -753,8 +753,7 @@ bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
if (dest != orig)
{
- bitmap_head temp;
- bitmap_initialize (&temp, &grand_bitmap_obstack);
+ bitmap_head temp (&grand_bitmap_obstack);
bitmap_and_into (&dest->values, &orig->values);
bitmap_copy (&temp, &dest->expressions);
@@ -765,7 +764,6 @@ bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
if (!bitmap_bit_p (&dest->values, value_id))
bitmap_clear_bit (&dest->expressions, i);
}
- bitmap_clear (&temp);
}
}
@@ -798,9 +796,7 @@ bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
{
unsigned int i;
bitmap_iterator bi;
- bitmap_head temp;
-
- bitmap_initialize (&temp, &grand_bitmap_obstack);
+ bitmap_head temp (&grand_bitmap_obstack);
bitmap_copy (&temp, &a->expressions);
EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
diff --git a/gcc/tree-ssa-sink.c b/gcc/tree-ssa-sink.c
index 6d02975c4dd..ab421dd2e81 100644
--- a/gcc/tree-ssa-sink.c
+++ b/gcc/tree-ssa-sink.c
@@ -140,7 +140,7 @@ all_immediate_uses_same_place (gimple stmt)
static basic_block
nearest_common_dominator_of_uses (gimple stmt, bool *debug_stmts)
{
- bitmap blocks = BITMAP_ALLOC (NULL);
+ bitmap_head blocks;
basic_block commondom;
unsigned int j;
bitmap_iterator bi;
@@ -149,7 +149,7 @@ nearest_common_dominator_of_uses (gimple stmt, bool *debug_stmts)
use_operand_p use_p;
tree var;
- bitmap_clear (blocks);
+ bitmap_clear (&blocks);
FOR_EACH_SSA_TREE_OPERAND (var, stmt, op_iter, SSA_OP_ALL_DEFS)
{
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
@@ -175,18 +175,14 @@ nearest_common_dominator_of_uses (gimple stmt, bool *debug_stmts)
/* Short circuit. Nothing dominates the entry block. */
if (useblock == ENTRY_BLOCK_PTR_FOR_FN (cfun))
- {
- BITMAP_FREE (blocks);
- return NULL;
- }
- bitmap_set_bit (blocks, useblock->index);
+ return NULL;
+ bitmap_set_bit (&blocks, useblock->index);
}
}
- commondom = BASIC_BLOCK_FOR_FN (cfun, bitmap_first_set_bit (blocks));
- EXECUTE_IF_SET_IN_BITMAP (blocks, 0, j, bi)
+ commondom = BASIC_BLOCK_FOR_FN (cfun, bitmap_first_set_bit (&blocks));
+ EXECUTE_IF_SET_IN_BITMAP (&blocks, 0, j, bi)
commondom = nearest_common_dominator (CDI_DOMINATORS, commondom,
BASIC_BLOCK_FOR_FN (cfun, j));
- BITMAP_FREE (blocks);
return commondom;
}
diff --git a/gcc/tree-ssa-strlen.c b/gcc/tree-ssa-strlen.c
index f55b7ee6dc8..4d3e24c5762 100644
--- a/gcc/tree-ssa-strlen.c
+++ b/gcc/tree-ssa-strlen.c
@@ -1973,10 +1973,9 @@ strlen_dom_walker::before_dom_children (basic_block bb)
gimple phi = gsi_stmt (gsi);
if (virtual_operand_p (gimple_phi_result (phi)))
{
- bitmap visited = BITMAP_ALLOC (NULL);
+ bitmap_head visited;
int count_vdef = 100;
- do_invalidate (dombb, phi, visited, &count_vdef);
- BITMAP_FREE (visited);
+ do_invalidate (dombb, phi, &visited, &count_vdef);
if (count_vdef == 0)
{
/* If there were too many vdefs in between immediate
diff --git a/gcc/tree-ssa-tail-merge.c b/gcc/tree-ssa-tail-merge.c
index f6b1ba08154..7b5291b667f 100644
--- a/gcc/tree-ssa-tail-merge.c
+++ b/gcc/tree-ssa-tail-merge.c
@@ -1363,15 +1363,14 @@ deps_ok_for_redirect_from_bb_to_bb (basic_block from, basic_block to)
basic_block cd, dep_bb = BB_DEP_BB (to);
edge_iterator ei;
edge e;
- bitmap from_preds = BITMAP_ALLOC (NULL);
+ bitmap_head from_preds;
if (dep_bb == NULL)
return true;
FOR_EACH_EDGE (e, ei, from->preds)
- bitmap_set_bit (from_preds, e->src->index);
- cd = nearest_common_dominator_for_set (CDI_DOMINATORS, from_preds);
- BITMAP_FREE (from_preds);
+ bitmap_set_bit (&from_preds, e->src->index);
+ cd = nearest_common_dominator_for_set (CDI_DOMINATORS, &from_preds);
return dominated_by_p (CDI_DOMINATORS, dep_bb, cd);
}
diff --git a/gcc/tree-ssa-threadupdate.c b/gcc/tree-ssa-threadupdate.c
index f458d6a9985..5c0e85e5bbc 100644
--- a/gcc/tree-ssa-threadupdate.c
+++ b/gcc/tree-ssa-threadupdate.c
@@ -1423,7 +1423,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
{
unsigned int i;
bitmap_iterator bi;
- bitmap tmp = BITMAP_ALLOC (NULL);
+ bitmap_head tmp;
basic_block bb;
edge e;
edge_iterator ei;
@@ -1450,7 +1450,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
{
edge e = (*path)[0]->e;
e->aux = (void *)path;
- bitmap_set_bit (tmp, e->dest->index);
+ bitmap_set_bit (&tmp, e->dest->index);
}
}
@@ -1475,7 +1475,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
{
edge e = (*path)[0]->e;
e->aux = path;
- bitmap_set_bit (tmp, e->dest->index);
+ bitmap_set_bit (&tmp, e->dest->index);
}
else if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -1489,7 +1489,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
to duplicate it or it's an otherwise empty redirection block. */
if (optimize_function_for_size_p (cfun))
{
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, i, bi)
{
bb = BASIC_BLOCK_FOR_FN (cfun, i);
if (EDGE_COUNT (bb->preds) > 1
@@ -1510,7 +1510,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
}
}
else
- bitmap_copy (threaded_blocks, tmp);
+ bitmap_copy (threaded_blocks, &tmp);
/* Look for jump threading paths which cross multiple loop headers.
@@ -1519,7 +1519,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
We don't want to blindly cancel the requests. We can instead do better
by trimming off the end of the jump thread path. */
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, i, bi)
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
FOR_EACH_EDGE (e, ei, bb->preds)
@@ -1577,7 +1577,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
Note since we've moved the thread request data to the edges,
we have to iterate on those rather than the threaded_edges vector. */
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, i, bi)
{
bb = BASIC_BLOCK_FOR_FN (cfun, i);
FOR_EACH_EDGE (e, ei, bb->preds)
@@ -1603,8 +1603,6 @@ mark_threaded_blocks (bitmap threaded_blocks)
}
}
}
-
- BITMAP_FREE (tmp);
}
@@ -1639,7 +1637,6 @@ thread_through_all_blocks (bool may_peel_loop_headers)
bool retval = false;
unsigned int i;
bitmap_iterator bi;
- bitmap threaded_blocks;
struct loop *loop;
/* We must know about loops in order to preserve them. */
@@ -1648,16 +1645,16 @@ thread_through_all_blocks (bool may_peel_loop_headers)
if (!paths.exists ())
return false;
- threaded_blocks = BITMAP_ALLOC (NULL);
+ bitmap_head threaded_blocks;
memset (&thread_stats, 0, sizeof (thread_stats));
- mark_threaded_blocks (threaded_blocks);
+ mark_threaded_blocks (&threaded_blocks);
initialize_original_copy_tables ();
/* First perform the threading requests that do not affect
loop structure. */
- EXECUTE_IF_SET_IN_BITMAP (threaded_blocks, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&threaded_blocks, 0, i, bi)
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
@@ -1671,7 +1668,7 @@ thread_through_all_blocks (bool may_peel_loop_headers)
FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
{
if (!loop->header
- || !bitmap_bit_p (threaded_blocks, loop->header->index))
+ || !bitmap_bit_p (&threaded_blocks, loop->header->index))
continue;
retval |= thread_through_loop_header (loop, may_peel_loop_headers);
@@ -1770,8 +1767,6 @@ thread_through_all_blocks (bool may_peel_loop_headers)
free_original_copy_tables ();
- BITMAP_FREE (threaded_blocks);
- threaded_blocks = NULL;
paths.release ();
if (retval)
diff --git a/gcc/tree-ssa.c b/gcc/tree-ssa.c
index 20f061ffaf0..2a9b4309d54 100644
--- a/gcc/tree-ssa.c
+++ b/gcc/tree-ssa.c
@@ -967,7 +967,7 @@ verify_ssa (bool check_modified_stmt)
ssa_op_iter iter;
tree op;
enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
- bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
+ bitmap_head names_defined_in_bb;
gcc_assert (!need_ssa_update_p (cfun));
@@ -1024,7 +1024,7 @@ verify_ssa (bool check_modified_stmt)
if (verify_phi_args (phi, bb, definition_block))
goto err;
- bitmap_set_bit (names_defined_in_bb,
+ bitmap_set_bit (&names_defined_in_bb,
SSA_NAME_VERSION (gimple_phi_result (phi)));
}
@@ -1056,7 +1056,7 @@ verify_ssa (bool check_modified_stmt)
{
op = USE_FROM_PTR (use_p);
if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
- use_p, stmt, false, names_defined_in_bb))
+ use_p, stmt, false, &names_defined_in_bb))
goto err;
}
@@ -1072,11 +1072,11 @@ verify_ssa (bool check_modified_stmt)
4, TDF_VOPS);
goto err;
}
- bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
+ bitmap_set_bit (&names_defined_in_bb, SSA_NAME_VERSION (op));
}
}
- bitmap_clear (names_defined_in_bb);
+ bitmap_clear (&names_defined_in_bb);
}
free (definition_block);
@@ -1088,7 +1088,6 @@ verify_ssa (bool check_modified_stmt)
else
set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
- BITMAP_FREE (names_defined_in_bb);
timevar_pop (TV_TREE_SSA_VERIFY);
return;
@@ -1446,9 +1445,7 @@ execute_update_addresses_taken (void)
{
gimple_stmt_iterator gsi;
basic_block bb;
- bitmap addresses_taken = BITMAP_ALLOC (NULL);
- bitmap not_reg_needs = BITMAP_ALLOC (NULL);
- bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
+ bitmap_head addresses_taken, not_reg_needs, suitable_for_renaming;
tree var;
unsigned i;
@@ -1465,7 +1462,7 @@ execute_update_addresses_taken (void)
tree decl;
/* Note all addresses taken by the stmt. */
- gimple_ior_addresses_taken (addresses_taken, stmt);
+ gimple_ior_addresses_taken (&addresses_taken, stmt);
/* If we have a call or an assignment, see if the lhs contains
a local decl that requires not to be a gimple register. */
@@ -1478,7 +1475,7 @@ execute_update_addresses_taken (void)
{
decl = get_base_address (lhs);
if (DECL_P (decl))
- bitmap_set_bit (not_reg_needs, DECL_UID (decl));
+ bitmap_set_bit (&not_reg_needs, DECL_UID (decl));
}
}
@@ -1486,7 +1483,7 @@ execute_update_addresses_taken (void)
{
tree rhs = gimple_assign_rhs1 (stmt);
if ((decl = non_rewritable_mem_ref_base (rhs)))
- bitmap_set_bit (not_reg_needs, DECL_UID (decl));
+ bitmap_set_bit (&not_reg_needs, DECL_UID (decl));
}
else if (code == GIMPLE_CALL)
@@ -1495,7 +1492,7 @@ execute_update_addresses_taken (void)
{
tree arg = gimple_call_arg (stmt, i);
if ((decl = non_rewritable_mem_ref_base (arg)))
- bitmap_set_bit (not_reg_needs, DECL_UID (decl));
+ bitmap_set_bit (&not_reg_needs, DECL_UID (decl));
}
}
@@ -1515,14 +1512,14 @@ execute_update_addresses_taken (void)
require we do not need any. */
|| !useless_type_conversion_p
(TREE_TYPE (lhs), TREE_TYPE (decl))))
- bitmap_set_bit (not_reg_needs, DECL_UID (decl));
+ bitmap_set_bit (&not_reg_needs, DECL_UID (decl));
}
}
for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
{
tree link = gimple_asm_input_op (stmt, i);
if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
- bitmap_set_bit (not_reg_needs, DECL_UID (decl));
+ bitmap_set_bit (&not_reg_needs, DECL_UID (decl));
}
}
}
@@ -1538,7 +1535,7 @@ execute_update_addresses_taken (void)
if (TREE_CODE (op) == ADDR_EXPR
&& (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
&& DECL_P (var))
- bitmap_set_bit (addresses_taken, DECL_UID (var));
+ bitmap_set_bit (&addresses_taken, DECL_UID (var));
}
}
}
@@ -1547,16 +1544,16 @@ execute_update_addresses_taken (void)
unused vars from BLOCK trees, which causes code generation differences
for -g vs. -g0. */
for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
- maybe_optimize_var (var, addresses_taken, not_reg_needs,
- suitable_for_renaming);
+ maybe_optimize_var (var, &addresses_taken, &not_reg_needs,
+ &suitable_for_renaming);
FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
- maybe_optimize_var (var, addresses_taken, not_reg_needs,
- suitable_for_renaming);
+ maybe_optimize_var (var, &addresses_taken, &not_reg_needs,
+ &suitable_for_renaming);
/* Operand caches need to be recomputed for operands referencing the updated
variables and operands need to be rewritten to expose bare symbols. */
- if (!bitmap_empty_p (suitable_for_renaming))
+ if (!bitmap_empty_p (&suitable_for_renaming))
{
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
@@ -1582,14 +1579,14 @@ execute_update_addresses_taken (void)
&& (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
&& DECL_P (sym)
&& !TREE_ADDRESSABLE (sym)
- && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
+ && bitmap_bit_p (&suitable_for_renaming, DECL_UID (sym)))
lhs = sym;
else
lhs = gimple_assign_lhs (stmt);
/* Rewrite the RHS and make sure the resulting assignment
is validly typed. */
- maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
+ maybe_rewrite_mem_ref_base (rhsp, &suitable_for_renaming);
rhs = gimple_assign_rhs1 (stmt);
if (gimple_assign_lhs (stmt) != lhs
&& !useless_type_conversion_p (TREE_TYPE (lhs),
@@ -1604,7 +1601,7 @@ execute_update_addresses_taken (void)
TREE_ADDRESSABLE just remove the stmt. */
if (DECL_P (lhs)
&& TREE_CLOBBER_P (rhs)
- && bitmap_bit_p (suitable_for_renaming, DECL_UID (lhs)))
+ && bitmap_bit_p (&suitable_for_renaming, DECL_UID (lhs)))
{
unlink_stmt_vdef (stmt);
gsi_remove (&gsi, true);
@@ -1625,7 +1622,7 @@ execute_update_addresses_taken (void)
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree *argp = gimple_call_arg_ptr (stmt, i);
- maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
+ maybe_rewrite_mem_ref_base (argp, &suitable_for_renaming);
}
}
@@ -1636,13 +1633,13 @@ execute_update_addresses_taken (void)
{
tree link = gimple_asm_output_op (stmt, i);
maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
- suitable_for_renaming);
+ &suitable_for_renaming);
}
for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
{
tree link = gimple_asm_input_op (stmt, i);
maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
- suitable_for_renaming);
+ &suitable_for_renaming);
}
}
@@ -1651,10 +1648,10 @@ execute_update_addresses_taken (void)
{
tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
tree decl;
- maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
+ maybe_rewrite_mem_ref_base (valuep, &suitable_for_renaming);
decl = non_rewritable_mem_ref_base (*valuep);
if (decl
- && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
+ && bitmap_bit_p (&suitable_for_renaming, DECL_UID (decl)))
gimple_debug_bind_reset_value (stmt);
}
@@ -1673,9 +1670,6 @@ execute_update_addresses_taken (void)
update_ssa (TODO_update_ssa);
}
- BITMAP_FREE (not_reg_needs);
- BITMAP_FREE (addresses_taken);
- BITMAP_FREE (suitable_for_renaming);
timevar_pop (TV_ADDRESS_TAKEN);
}