summaryrefslogtreecommitdiff
path: root/gcc/bb-reorder.c
diff options
context:
space:
mode:
authorbernds <bernds@138bc75d-0d04-0410-961f-82ee72b054a4>2011-09-07 17:38:29 +0000
committerbernds <bernds@138bc75d-0d04-0410-961f-82ee72b054a4>2011-09-07 17:38:29 +0000
commit4e018137d6241d9d1a8efae9bbe09b0a01e3db01 (patch)
tree100018d65570ea82bfb6bb98f81863956490c2cd /gcc/bb-reorder.c
parentbf42c975f0127222935364350a8c0ff092297602 (diff)
downloadgcc-4e018137d6241d9d1a8efae9bbe09b0a01e3db01.tar.gz
* bb-reorder.c (insert_section_boundary_note): Don't check
optimize_function_for_speed_p. (gate_handle_partition_blocks): Do it here instead. (gate_handle_reorder_blocks): Move preliminary checks here ... (rest_of_handle_reorder_blocks): ... from here. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@178657 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/bb-reorder.c')
-rw-r--r--gcc/bb-reorder.c33
1 files changed, 17 insertions, 16 deletions
diff --git a/gcc/bb-reorder.c b/gcc/bb-reorder.c
index d0ed8ea7bd3..874ece2a9ad 100644
--- a/gcc/bb-reorder.c
+++ b/gcc/bb-reorder.c
@@ -1965,8 +1965,7 @@ insert_section_boundary_note (void)
rtx new_note;
int first_partition = 0;
- if (!flag_reorder_blocks_and_partition
- || !optimize_function_for_speed_p (cfun))
+ if (!flag_reorder_blocks_and_partition)
return;
FOR_EACH_BB (bb)
@@ -2296,7 +2295,17 @@ gate_handle_reorder_blocks (void)
{
if (targetm.cannot_modify_jumps_p ())
return false;
- return (optimize > 0);
+ /* Don't reorder blocks when optimizing for size because extra jump insns may
+ be created; also barrier may create extra padding.
+
+ More correctly we should have a block reordering mode that tried to
+ minimize the combined size of all the jumps. This would more or less
+ automatically remove extra jumps, but would also try to use more short
+ jumps instead of long jumps. */
+ if (!optimize_function_for_speed_p (cfun))
+ return false;
+ return (optimize > 0
+ && (flag_reorder_blocks || flag_reorder_blocks_and_partition));
}
@@ -2310,19 +2319,8 @@ rest_of_handle_reorder_blocks (void)
splitting possibly introduced more crossjumping opportunities. */
cfg_layout_initialize (CLEANUP_EXPENSIVE);
- if ((flag_reorder_blocks || flag_reorder_blocks_and_partition)
- /* Don't reorder blocks when optimizing for size because extra jump insns may
- be created; also barrier may create extra padding.
-
- More correctly we should have a block reordering mode that tried to
- minimize the combined size of all the jumps. This would more or less
- automatically remove extra jumps, but would also try to use more short
- jumps instead of long jumps. */
- && optimize_function_for_speed_p (cfun))
- {
- reorder_basic_blocks ();
- cleanup_cfg (CLEANUP_EXPENSIVE);
- }
+ reorder_basic_blocks ();
+ cleanup_cfg (CLEANUP_EXPENSIVE);
FOR_EACH_BB (bb)
if (bb->next_bb != EXIT_BLOCK_PTR)
@@ -2362,6 +2360,9 @@ gate_handle_partition_blocks (void)
arises. */
return (flag_reorder_blocks_and_partition
&& optimize
+ /* See gate_handle_reorder_blocks. We should not partition if
+ we are going to omit the reordering. */
+ && optimize_function_for_speed_p (cfun)
&& !DECL_ONE_ONLY (current_function_decl)
&& !user_defined_section_attribute);
}