diff options
author | zadeck <zadeck@138bc75d-0d04-0410-961f-82ee72b054a4> | 2005-12-20 23:10:49 +0000 |
---|---|---|
committer | zadeck <zadeck@138bc75d-0d04-0410-961f-82ee72b054a4> | 2005-12-20 23:10:49 +0000 |
commit | 6180f28dae8a9eda8e2e5b3bb829e9c6bae6c832 (patch) | |
tree | 3c3755600937dd34ae35ee7f0ba92fc15d99c5c7 /gcc | |
parent | eb00c7b798dcaceedb951ce4deabdd6ced716c9d (diff) | |
download | gcc-6180f28dae8a9eda8e2e5b3bb829e9c6bae6c832.tar.gz |
2005-12-20 Kenneth Zadeck <zadeck@naturalbridge.com>
* cfganal.c (flow_reverse_top_sort_order_compute):
Renamed to post_order_compute and additional parameter added which
allows the inclusion of entry and exit blocks into list.
(mark_dfs_back_edges): Fixed comment.
(flow_depth_first_order_compute): Renamed to
pre_and_rev_post_order_compute additional parameter added which
allows the inclusion of entry and exit blocks into list.
* global.c (set_up_bb_rts_numbers): Call to
flow_reverse_top_sort_order_compute renamed to
post_order_compute.
* var-tracking.c (vt_stack_adjustments): Fixed comment.
(vt_find_locations): Call to
flow_depth_first_order_compute renamed to
pre_and_rev_post_order_compute.
* cfgloop.c (flow_find_loops): Ditto.
* tree-ssa-reassoc.c (init_reassoc): Ditto.
* df.c (df_analyze_1, df_analyze_subcfg): Calls to
flow_reverse_top_sort_order_compute renamed to post_order_compute
and calls to flow_reverse_top_sort_order_compute renamed to
post_order_compute.
* basic_block.h: Ditto.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@108874 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc')
-rw-r--r-- | gcc/ChangeLog | 25 | ||||
-rw-r--r-- | gcc/basic-block.h | 4 | ||||
-rw-r--r-- | gcc/cfganal.c | 83 | ||||
-rw-r--r-- | gcc/cfgloop.c | 2 | ||||
-rw-r--r-- | gcc/df.c | 8 | ||||
-rw-r--r-- | gcc/global.c | 2 | ||||
-rw-r--r-- | gcc/tree-ssa-reassoc.c | 2 | ||||
-rw-r--r-- | gcc/var-tracking.c | 4 |
8 files changed, 95 insertions, 35 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 300c0f748bf..0d8c853d3b6 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,28 @@ +2005-12-20 Kenneth Zadeck <zadeck@naturalbridge.com> + + * cfganal.c (flow_reverse_top_sort_order_compute): + Renamed to post_order_compute and additional parameter added which + allows the inclusion of entry and exit blocks into list. + (mark_dfs_back_edges): Fixed comment. + (flow_depth_first_order_compute): Renamed to + pre_and_rev_post_order_compute additional parameter added which + allows the inclusion of entry and exit blocks into list. + * global.c (set_up_bb_rts_numbers): Call to + flow_reverse_top_sort_order_compute renamed to + post_order_compute. + * var-tracking.c (vt_stack_adjustments): Fixed comment. + (vt_find_locations): Call to + flow_depth_first_order_compute renamed to + pre_and_rev_post_order_compute. + * cfgloop.c (flow_find_loops): Ditto. + * tree-ssa-reassoc.c (init_reassoc): Ditto. + * df.c (df_analyze_1, df_analyze_subcfg): Calls to + flow_reverse_top_sort_order_compute renamed to post_order_compute + and calls to flow_reverse_top_sort_order_compute renamed to + post_order_compute. + * basic_block.h: Ditto. + + 2005-12-20 Roger Sayle <roger@eyesopen.com> Paolo Bonzini <bonzini@gnu.org> diff --git a/gcc/basic-block.h b/gcc/basic-block.h index 8407f3a7e3c..69dba56dd81 100644 --- a/gcc/basic-block.h +++ b/gcc/basic-block.h @@ -503,8 +503,8 @@ extern edge redirect_edge_succ_nodup (edge, basic_block); extern void redirect_edge_pred (edge, basic_block); extern basic_block create_basic_block_structure (rtx, rtx, rtx, basic_block); extern void clear_bb_flags (void); -extern void flow_reverse_top_sort_order_compute (int *); -extern int flow_depth_first_order_compute (int *, int *); +extern int post_order_compute (int *, bool); +extern int pre_and_rev_post_order_compute (int *, int *, bool); extern int dfs_enumerate_from (basic_block, int, bool (*)(basic_block, void *), basic_block *, int, void *); diff --git a/gcc/cfganal.c b/gcc/cfganal.c index e005388d4a3..efe5db7f6f5 100644 --- a/gcc/cfganal.c +++ b/gcc/cfganal.c @@ -152,7 +152,7 @@ could_fall_through (basic_block src, basic_block target) Steven Muchnick Morgan Kaufmann, 1997 - and heavily borrowed from flow_depth_first_order_compute. */ + and heavily borrowed from pre_and_rev_post_order_compute. */ bool mark_dfs_back_edges (void) @@ -645,16 +645,20 @@ connect_infinite_loops_to_exit (void) return; } -/* Compute reverse top sort order. */ +/* Compute reverse top sort order. + This is computing a post order numbering of the graph. */ -void -flow_reverse_top_sort_order_compute (int *rts_order) +int +post_order_compute (int *post_order, bool include_entry_exit) { edge_iterator *stack; int sp; - int postnum = 0; + int post_order_num = 0; sbitmap visited; + if (include_entry_exit) + post_order[post_order_num++] = EXIT_BLOCK; + /* Allocate stack for back-tracking up CFG. */ stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator)); sp = 0; @@ -690,12 +694,12 @@ flow_reverse_top_sort_order_compute (int *rts_order) time, check its successors. */ stack[sp++] = ei_start (dest->succs); else - rts_order[postnum++] = dest->index; + post_order[post_order_num++] = dest->index; } else { if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR) - rts_order[postnum++] = src->index; + post_order[post_order_num++] = src->index; if (!ei_one_before_end_p (ei)) ei_next (&stack[sp - 1]); @@ -704,30 +708,50 @@ flow_reverse_top_sort_order_compute (int *rts_order) } } + if (include_entry_exit) + post_order[post_order_num++] = ENTRY_BLOCK; + free (stack); sbitmap_free (visited); + return post_order_num; } /* Compute the depth first search order and store in the array - DFS_ORDER if nonzero, marking the nodes visited in VISITED. If - RC_ORDER is nonzero, return the reverse completion number for each + PRE_ORDER if nonzero, marking the nodes visited in VISITED. If + REV_POST_ORDER is nonzero, return the reverse completion number for each node. Returns the number of nodes visited. A depth first search tries to get as far away from the starting point as quickly as - possible. */ + possible. + + pre_order is a really a preorder numbering of the graph. + rev_post_order is really a reverse postorder numbering of the graph. + */ int -flow_depth_first_order_compute (int *dfs_order, int *rc_order) +pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order, + bool include_entry_exit) { edge_iterator *stack; int sp; - int dfsnum = 0; - int rcnum = n_basic_blocks - 1 - NUM_FIXED_BLOCKS; + int pre_order_num = 0; + int rev_post_order_num = n_basic_blocks - 1; sbitmap visited; /* Allocate stack for back-tracking up CFG. */ stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator)); sp = 0; + if (include_entry_exit) + { + if (pre_order) + pre_order[pre_order_num] = ENTRY_BLOCK; + pre_order_num++; + if (rev_post_order) + rev_post_order[rev_post_order_num--] = ENTRY_BLOCK; + } + else + rev_post_order_num -= NUM_FIXED_BLOCKS; + /* Allocate bitmap to track nodes that have been visited. */ visited = sbitmap_alloc (last_basic_block); @@ -754,27 +778,27 @@ flow_depth_first_order_compute (int *dfs_order, int *rc_order) /* Mark that we have visited the destination. */ SET_BIT (visited, dest->index); - if (dfs_order) - dfs_order[dfsnum] = dest->index; + if (pre_order) + pre_order[pre_order_num] = dest->index; - dfsnum++; + pre_order_num++; if (EDGE_COUNT (dest->succs) > 0) /* Since the DEST node has been visited for the first time, check its successors. */ stack[sp++] = ei_start (dest->succs); - else if (rc_order) + else if (rev_post_order) /* There are no successors for the DEST node so assign its reverse completion number. */ - rc_order[rcnum--] = dest->index; + rev_post_order[rev_post_order_num--] = dest->index; } else { if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR - && rc_order) + && rev_post_order) /* There are no more successors for the SRC node so assign its reverse completion number. */ - rc_order[rcnum--] = src->index; + rev_post_order[rev_post_order_num--] = src->index; if (!ei_one_before_end_p (ei)) ei_next (&stack[sp - 1]); @@ -786,11 +810,22 @@ flow_depth_first_order_compute (int *dfs_order, int *rc_order) free (stack); sbitmap_free (visited); - /* The number of nodes visited should be the number of blocks minus - the entry and exit blocks which are not visited here. */ - gcc_assert (dfsnum == n_basic_blocks - NUM_FIXED_BLOCKS); + if (include_entry_exit) + { + if (pre_order) + pre_order[pre_order_num] = EXIT_BLOCK; + pre_order_num++; + if (rev_post_order) + rev_post_order[rev_post_order_num--] = EXIT_BLOCK; + /* The number of nodes visited should be the number of blocks. */ + gcc_assert (pre_order_num == n_basic_blocks); + } + else + /* The number of nodes visited should be the number of blocks minus + the entry and exit blocks which are not visited here. */ + gcc_assert (pre_order_num == n_basic_blocks - NUM_FIXED_BLOCKS); - return dfsnum; + return pre_order_num; } /* Compute the depth first search order on the _reverse_ graph and diff --git a/gcc/cfgloop.c b/gcc/cfgloop.c index dabeacf214e..7629c917016 100644 --- a/gcc/cfgloop.c +++ b/gcc/cfgloop.c @@ -696,7 +696,7 @@ flow_loops_find (struct loops *loops) natural loops will be found before inner natural loops. */ dfs_order = xmalloc (n_basic_blocks * sizeof (int)); rc_order = xmalloc (n_basic_blocks * sizeof (int)); - flow_depth_first_order_compute (dfs_order, rc_order); + pre_and_rev_post_order_compute (dfs_order, rc_order, false); /* Save CFG derived information to avoid recomputing it. */ loops->cfg.dfs_order = dfs_order; @@ -1996,8 +1996,8 @@ df_analyze_1 (struct df *df, bitmap blocks, int flags, int update) df->rc_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS); df->rts_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS); - flow_depth_first_order_compute (df->dfs_order, df->rc_order); - flow_reverse_top_sort_order_compute (df->rts_order); + pre_and_rev_post_order_compute (df->dfs_order, df->rc_order, false); + post_order_compute (df->rts_order, false); if (aflags & DF_RD) { /* Compute the sets of gens and kills for the defs of each bb. */ @@ -2424,8 +2424,8 @@ df_analyze_subcfg (struct df *df, bitmap blocks, int flags) df->rc_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS); df->rts_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS); - flow_depth_first_order_compute (df->dfs_order, df->rc_order); - flow_reverse_top_sort_order_compute (df->rts_order); + pre_and_rev_post_order_compute (df->dfs_order, df->rc_order, false); + post_order_compute (df->rts_order, false); n_blocks = prune_to_subcfg (df->dfs_order, n_basic_blocks - NUM_FIXED_BLOCKS, blocks); prune_to_subcfg (df->rc_order, n_basic_blocks - NUM_FIXED_BLOCKS, blocks); diff --git a/gcc/global.c b/gcc/global.c index e5ecd3266ef..e92a4cb7a95 100644 --- a/gcc/global.c +++ b/gcc/global.c @@ -2282,7 +2282,7 @@ set_up_bb_rts_numbers (void) int *rts_order; rts_order = xmalloc (sizeof (int) * (n_basic_blocks - NUM_FIXED_BLOCKS)); - flow_reverse_top_sort_order_compute (rts_order); + post_order_compute (rts_order, false); for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++) BB_INFO_BY_INDEX (rts_order [i])->rts_number = i; free (rts_order); diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c index b8dfbfc6946..97492b8fb7f 100644 --- a/gcc/tree-ssa-reassoc.c +++ b/gcc/tree-ssa-reassoc.c @@ -1424,7 +1424,7 @@ init_reassoc (void) /* Reverse RPO (Reverse Post Order) will give us something where deeper loops come later. */ - flow_depth_first_order_compute (NULL, bbs); + pre_and_rev_post_order_compute (NULL, bbs, false); bb_rank = xcalloc (last_basic_block + 1, sizeof (unsigned int)); operand_rank = htab_create (511, operand_entry_hash, diff --git a/gcc/var-tracking.c b/gcc/var-tracking.c index 766f6ef3aa1..046cab46d31 100644 --- a/gcc/var-tracking.c +++ b/gcc/var-tracking.c @@ -485,7 +485,7 @@ bb_stack_adjust_offset (basic_block bb) /* Compute stack adjustments for all blocks by traversing DFS tree. Return true when the adjustments on all incoming edges are consistent. - Heavily borrowed from flow_depth_first_order_compute. */ + Heavily borrowed from pre_and_rev_post_order_compute. */ static bool vt_stack_adjustments (void) @@ -1640,7 +1640,7 @@ vt_find_locations (void) so that the data-flow runs faster. */ rc_order = xmalloc ((n_basic_blocks - NUM_FIXED_BLOCKS) * sizeof (int)); bb_order = xmalloc (last_basic_block * sizeof (int)); - flow_depth_first_order_compute (NULL, rc_order); + pre_and_rev_post_order_compute (NULL, rc_order, false); for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++) bb_order[rc_order[i]] = i; free (rc_order); |