comparison gcc/tree-cfgcleanup.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 /* CFG cleanup for trees. 1 /* CFG cleanup for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc. 2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify 6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by 7 it under the terms of the GNU General Public License as published by
82 { 82 {
83 if (gimple_switch_num_labels (swtch) != 2) 83 if (gimple_switch_num_labels (swtch) != 2)
84 return false; 84 return false;
85 85
86 tree index = gimple_switch_index (swtch); 86 tree index = gimple_switch_index (swtch);
87 tree default_label = CASE_LABEL (gimple_switch_default_label (swtch));
88 tree label = gimple_switch_label (swtch, 1); 87 tree label = gimple_switch_label (swtch, 1);
89 tree low = CASE_LOW (label); 88 tree low = CASE_LOW (label);
90 tree high = CASE_HIGH (label); 89 tree high = CASE_HIGH (label);
91 90
92 basic_block default_bb = label_to_block_fn (cfun, default_label); 91 basic_block default_bb = gimple_switch_default_bb (cfun, swtch);
93 basic_block case_bb = label_to_block_fn (cfun, CASE_LABEL (label)); 92 basic_block case_bb = label_to_block (cfun, CASE_LABEL (label));
94 93
95 basic_block bb = gimple_bb (swtch); 94 basic_block bb = gimple_bb (swtch);
96 gcond *cond; 95 gcond *cond;
97 96
98 /* Replace switch statement with condition statement. */ 97 /* Replace switch statement with condition statement. */
120 119
121 /* Disconnect an unreachable block in the control expression starting 120 /* Disconnect an unreachable block in the control expression starting
122 at block BB. */ 121 at block BB. */
123 122
124 static bool 123 static bool
125 cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi, 124 cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
126 bool first_p)
127 { 125 {
128 edge taken_edge; 126 edge taken_edge;
129 bool retval = false; 127 bool retval = false;
130 gimple *stmt = gsi_stmt (gsi); 128 gimple *stmt = gsi_stmt (gsi);
131 129
144 142
145 fold_defer_overflow_warnings (); 143 fold_defer_overflow_warnings ();
146 switch (gimple_code (stmt)) 144 switch (gimple_code (stmt))
147 { 145 {
148 case GIMPLE_COND: 146 case GIMPLE_COND:
149 /* During a first iteration on the CFG only remove trivially 147 {
150 dead edges but mark other conditions for re-evaluation. */ 148 gimple_match_op res_op;
151 if (first_p) 149 if (gimple_simplify (stmt, &res_op, NULL, no_follow_ssa_edges,
152 { 150 no_follow_ssa_edges)
153 val = const_binop (gimple_cond_code (stmt), boolean_type_node, 151 && res_op.code == INTEGER_CST)
154 gimple_cond_lhs (stmt), 152 val = res_op.ops[0];
155 gimple_cond_rhs (stmt)); 153 }
156 if (! val)
157 bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
158 }
159 else
160 {
161 code_helper rcode;
162 tree ops[3] = {};
163 if (gimple_simplify (stmt, &rcode, ops, NULL, no_follow_ssa_edges,
164 no_follow_ssa_edges)
165 && rcode == INTEGER_CST)
166 val = ops[0];
167 }
168 break; 154 break;
169 155
170 case GIMPLE_SWITCH: 156 case GIMPLE_SWITCH:
171 val = gimple_switch_index (as_a <gswitch *> (stmt)); 157 val = gimple_switch_index (as_a <gswitch *> (stmt));
172 break; 158 break;
233 219
234 /* Try to remove superfluous control structures in basic block BB. Returns 220 /* Try to remove superfluous control structures in basic block BB. Returns
235 true if anything changes. */ 221 true if anything changes. */
236 222
237 static bool 223 static bool
238 cleanup_control_flow_bb (basic_block bb, bool first_p) 224 cleanup_control_flow_bb (basic_block bb)
239 { 225 {
240 gimple_stmt_iterator gsi; 226 gimple_stmt_iterator gsi;
241 bool retval = false; 227 bool retval = false;
242 gimple *stmt; 228 gimple *stmt;
243 229
256 242
257 if (gimple_code (stmt) == GIMPLE_COND 243 if (gimple_code (stmt) == GIMPLE_COND
258 || gimple_code (stmt) == GIMPLE_SWITCH) 244 || gimple_code (stmt) == GIMPLE_SWITCH)
259 { 245 {
260 gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt); 246 gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt);
261 retval |= cleanup_control_expr_graph (bb, gsi, first_p); 247 retval |= cleanup_control_expr_graph (bb, gsi);
262 } 248 }
263 else if (gimple_code (stmt) == GIMPLE_GOTO 249 else if (gimple_code (stmt) == GIMPLE_GOTO
264 && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR 250 && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR
265 && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0)) 251 && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0))
266 == LABEL_DECL)) 252 == LABEL_DECL))
277 edges which do not go to the right block. For the one 263 edges which do not go to the right block. For the one
278 edge which goes to the right block, fix up its flags. */ 264 edge which goes to the right block, fix up its flags. */
279 label = TREE_OPERAND (gimple_goto_dest (stmt), 0); 265 label = TREE_OPERAND (gimple_goto_dest (stmt), 0);
280 if (DECL_CONTEXT (label) != cfun->decl) 266 if (DECL_CONTEXT (label) != cfun->decl)
281 return retval; 267 return retval;
282 target_block = label_to_block (label); 268 target_block = label_to_block (cfun, label);
283 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 269 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
284 { 270 {
285 if (e->dest != target_block) 271 if (e->dest != target_block)
286 remove_edge_and_dominated_blocks (e); 272 remove_edge_and_dominated_blocks (e);
287 else 273 else
357 343
358 FOR_EACH_EDGE (e, ei, bb->preds) 344 FOR_EACH_EDGE (e, ei, bb->preds)
359 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || (e->flags & EDGE_EH)) 345 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || (e->flags & EDGE_EH))
360 return false; 346 return false;
361 /* If goto_locus of any of the edges differs, prevent removing 347 /* If goto_locus of any of the edges differs, prevent removing
362 the forwarder block for -O0. */ 348 the forwarder block when not optimizing. */
363 else if (optimize == 0 && e->goto_locus != locus) 349 else if (!optimize
350 && (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
351 || LOCATION_LOCUS (locus) != UNKNOWN_LOCATION)
352 && e->goto_locus != locus)
364 return false; 353 return false;
365 } 354 }
366 355
367 /* Now walk through the statements backward. We can ignore labels, 356 /* Now walk through the statements backward. We can ignore labels,
368 anything else means this is not a forwarder block. */ 357 anything else means this is not a forwarder block. */
373 switch (gimple_code (stmt)) 362 switch (gimple_code (stmt))
374 { 363 {
375 case GIMPLE_LABEL: 364 case GIMPLE_LABEL:
376 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt)))) 365 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
377 return false; 366 return false;
378 if (optimize == 0 && gimple_location (stmt) != locus) 367 if (!optimize
368 && (gimple_has_location (stmt)
369 || LOCATION_LOCUS (locus) != UNKNOWN_LOCATION)
370 && gimple_location (stmt) != locus)
379 return false; 371 return false;
380 break; 372 break;
381 373
382 /* ??? For now, hope there's a corresponding debug 374 /* ??? For now, hope there's a corresponding debug
383 assignment at the destination. */ 375 assignment at the destination. */
454 static bool 446 static bool
455 remove_forwarder_block (basic_block bb) 447 remove_forwarder_block (basic_block bb)
456 { 448 {
457 edge succ = single_succ_edge (bb), e, s; 449 edge succ = single_succ_edge (bb), e, s;
458 basic_block dest = succ->dest; 450 basic_block dest = succ->dest;
459 gimple *label; 451 gimple *stmt;
460 edge_iterator ei; 452 edge_iterator ei;
461 gimple_stmt_iterator gsi, gsi_to; 453 gimple_stmt_iterator gsi, gsi_to;
462 bool can_move_debug_stmts; 454 bool can_move_debug_stmts;
463 455
464 /* We check for infinite loops already in tree_forwarder_block_p. 456 /* We check for infinite loops already in tree_forwarder_block_p.
467 if (dest == bb) 459 if (dest == bb)
468 return false; 460 return false;
469 461
470 /* If the destination block consists of a nonlocal label or is a 462 /* If the destination block consists of a nonlocal label or is a
471 EH landing pad, do not merge it. */ 463 EH landing pad, do not merge it. */
472 label = first_stmt (dest); 464 stmt = first_stmt (dest);
473 if (label) 465 if (stmt)
474 if (glabel *label_stmt = dyn_cast <glabel *> (label)) 466 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
475 if (DECL_NONLOCAL (gimple_label_label (label_stmt)) 467 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
476 || EH_LANDING_PAD_NR (gimple_label_label (label_stmt)) != 0) 468 || EH_LANDING_PAD_NR (gimple_label_label (label_stmt)) != 0)
477 return false; 469 return false;
478 470
479 /* If there is an abnormal edge to basic block BB, but not into 471 /* If there is an abnormal edge to basic block BB, but not into
550 jump targets end up in a sane place and debug information for 542 jump targets end up in a sane place and debug information for
551 labels is retained. */ 543 labels is retained. */
552 gsi_to = gsi_start_bb (dest); 544 gsi_to = gsi_start_bb (dest);
553 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) 545 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
554 { 546 {
555 tree decl; 547 stmt = gsi_stmt (gsi);
556 label = gsi_stmt (gsi); 548 if (is_gimple_debug (stmt))
557 if (is_gimple_debug (label))
558 break; 549 break;
559 decl = gimple_label_label (as_a <glabel *> (label)); 550
551 /* Forwarder blocks can only contain labels and debug stmts, and
552 labels must come first, so if we get to this point, we know
553 we're looking at a label. */
554 tree decl = gimple_label_label (as_a <glabel *> (stmt));
560 if (EH_LANDING_PAD_NR (decl) != 0 555 if (EH_LANDING_PAD_NR (decl) != 0
561 || DECL_NONLOCAL (decl) 556 || DECL_NONLOCAL (decl)
562 || FORCED_LABEL (decl) 557 || FORCED_LABEL (decl)
563 || !DECL_ARTIFICIAL (decl)) 558 || !DECL_ARTIFICIAL (decl))
564 { 559 gsi_move_before (&gsi, &gsi_to);
565 gsi_remove (&gsi, false);
566 gsi_insert_before (&gsi_to, label, GSI_SAME_STMT);
567 }
568 else 560 else
569 gsi_next (&gsi); 561 gsi_next (&gsi);
570 } 562 }
571 563
572 /* Move debug statements if the destination has a single predecessor. */ 564 /* Move debug statements if the destination has a single predecessor. */
573 if (can_move_debug_stmts) 565 if (can_move_debug_stmts && !gsi_end_p (gsi))
574 { 566 {
575 gsi_to = gsi_after_labels (dest); 567 gsi_to = gsi_after_labels (dest);
576 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); ) 568 do
577 { 569 {
578 gimple *debug = gsi_stmt (gsi); 570 gimple *debug = gsi_stmt (gsi);
579 if (!is_gimple_debug (debug)) 571 gcc_assert (is_gimple_debug (debug));
580 break; 572 gsi_move_before (&gsi, &gsi_to);
581 gsi_remove (&gsi, false); 573 }
582 gsi_insert_before (&gsi_to, debug, GSI_SAME_STMT); 574 while (!gsi_end_p (gsi));
583 }
584 } 575 }
585 576
586 bitmap_set_bit (cfgcleanup_altered_bbs, dest->index); 577 bitmap_set_bit (cfgcleanup_altered_bbs, dest->index);
587 578
588 /* Update the dominators. */ 579 /* Update the dominators. */
696 return true; 687 return true;
697 return bb1->count.ok_for_merging (bb2->count); 688 return bb1->count.ok_for_merging (bb2->count);
698 } 689 }
699 690
700 691
701 /* Tries to cleanup cfg in basic block BB. Returns true if anything 692 /* Tries to cleanup cfg in basic block BB by merging blocks. Returns
702 changes. */ 693 true if anything changes. */
703 694
704 static bool 695 static bool
705 cleanup_tree_cfg_bb (basic_block bb) 696 cleanup_tree_cfg_bb (basic_block bb)
706 { 697 {
707 if (tree_forwarder_block_p (bb, false) 698 if (tree_forwarder_block_p (bb, false)
730 } 721 }
731 722
732 return false; 723 return false;
733 } 724 }
734 725
735 /* Iterate the cfg cleanups, while anything changes. */ 726 /* Do cleanup_control_flow_bb in PRE order. */
736 727
737 static bool 728 static bool
738 cleanup_tree_cfg_1 (void) 729 cleanup_control_flow_pre ()
739 { 730 {
740 bool retval = false; 731 bool retval = false;
741 basic_block bb; 732
742 unsigned i, n; 733 /* We want remove_edge_and_dominated_blocks to only remove edges,
743 734 not dominated blocks which it does when dom info isn't available.
744 /* Prepare the worklists of altered blocks. */ 735 Pretend so. */
745 cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL); 736 dom_state saved_state = dom_info_state (CDI_DOMINATORS);
746 737 set_dom_info_availability (CDI_DOMINATORS, DOM_NONE);
747 /* During forwarder block cleanup, we may redirect edges out of 738
748 SWITCH_EXPRs, which can get expensive. So we want to enable 739 auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
749 recording of edge to CASE_LABEL_EXPR. */ 740 auto_sbitmap visited (last_basic_block_for_fn (cfun));
750 start_recording_case_labels (); 741 bitmap_clear (visited);
751 742
752 /* We cannot use FOR_EACH_BB_FN for the BB iterations below 743 stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
753 since the basic blocks may get removed. */ 744
754 745 while (! stack.is_empty ())
755 /* Start by iterating over all basic blocks looking for edge removal 746 {
756 opportunities. Do this first because incoming SSA form may be 747 /* Look at the edge on the top of the stack. */
757 invalid and we want to avoid performing SSA related tasks such 748 edge_iterator ei = stack.last ();
758 as propgating out a PHI node during BB merging in that state. */ 749 basic_block dest = ei_edge (ei)->dest;
759 n = last_basic_block_for_fn (cfun); 750
760 for (i = NUM_FIXED_BLOCKS; i < n; i++) 751 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
761 { 752 && ! bitmap_bit_p (visited, dest->index))
762 bb = BASIC_BLOCK_FOR_FN (cfun, i); 753 {
763 if (bb) 754 bitmap_set_bit (visited, dest->index);
764 retval |= cleanup_control_flow_bb (bb, true); 755 /* We only possibly remove edges from DEST here, leaving
765 } 756 possibly unreachable code in the IL. */
766 757 retval |= cleanup_control_flow_bb (dest);
767 /* After doing the above SSA form should be valid (or an update SSA 758 if (EDGE_COUNT (dest->succs) > 0)
768 should be required). */ 759 stack.quick_push (ei_start (dest->succs));
769 760 }
770 /* Continue by iterating over all basic blocks looking for BB merging 761 else
771 opportunities. */ 762 {
772 n = last_basic_block_for_fn (cfun); 763 if (!ei_one_before_end_p (ei))
773 for (i = NUM_FIXED_BLOCKS; i < n; i++) 764 ei_next (&stack.last ());
774 { 765 else
775 bb = BASIC_BLOCK_FOR_FN (cfun, i); 766 stack.pop ();
776 if (bb) 767 }
777 retval |= cleanup_tree_cfg_bb (bb); 768 }
778 } 769
779 770 set_dom_info_availability (CDI_DOMINATORS, saved_state);
780 /* Now process the altered blocks, as long as any are available. */ 771
781 while (!bitmap_empty_p (cfgcleanup_altered_bbs)) 772 /* We are deleting BBs in non-reverse dominator order, make sure
782 { 773 insert_debug_temps_for_defs is prepared for that. */
783 i = bitmap_first_set_bit (cfgcleanup_altered_bbs); 774 if (retval)
784 bitmap_clear_bit (cfgcleanup_altered_bbs, i); 775 free_dominance_info (CDI_DOMINATORS);
785 if (i < NUM_FIXED_BLOCKS) 776
786 continue; 777 /* Remove all now (and previously) unreachable blocks. */
787 778 for (int i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); ++i)
788 bb = BASIC_BLOCK_FOR_FN (cfun, i); 779 {
789 if (!bb) 780 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
790 continue; 781 if (bb && !bitmap_bit_p (visited, bb->index))
791 782 {
792 retval |= cleanup_control_flow_bb (bb, false); 783 if (!retval)
793 retval |= cleanup_tree_cfg_bb (bb); 784 free_dominance_info (CDI_DOMINATORS);
794 } 785 delete_basic_block (bb);
795 786 retval = true;
796 end_recording_case_labels (); 787 }
797 BITMAP_FREE (cfgcleanup_altered_bbs); 788 }
789
798 return retval; 790 return retval;
799 } 791 }
800 792
801 static bool 793 static bool
802 mfb_keep_latches (edge e) 794 mfb_keep_latches (edge e)
803 { 795 {
804 return ! dominated_by_p (CDI_DOMINATORS, e->src, e->dest); 796 return !((dom_info_available_p (CDI_DOMINATORS)
797 && dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
798 || (e->flags & EDGE_DFS_BACK));
805 } 799 }
806 800
807 /* Remove unreachable blocks and other miscellaneous clean up work. 801 /* Remove unreachable blocks and other miscellaneous clean up work.
808 Return true if the flowgraph was modified, false otherwise. */ 802 Return true if the flowgraph was modified, false otherwise. */
809 803
810 static bool 804 static bool
811 cleanup_tree_cfg_noloop (void) 805 cleanup_tree_cfg_noloop (void)
812 { 806 {
813 bool changed;
814
815 timevar_push (TV_TREE_CLEANUP_CFG); 807 timevar_push (TV_TREE_CLEANUP_CFG);
816
817 /* Iterate until there are no more cleanups left to do. If any
818 iteration changed the flowgraph, set CHANGED to true.
819
820 If dominance information is available, there cannot be any unreachable
821 blocks. */
822 if (!dom_info_available_p (CDI_DOMINATORS))
823 {
824 changed = delete_unreachable_blocks ();
825 calculate_dominance_info (CDI_DOMINATORS);
826 }
827 else
828 {
829 checking_verify_dominators (CDI_DOMINATORS);
830 changed = false;
831 }
832 808
833 /* Ensure that we have single entries into loop headers. Otherwise 809 /* Ensure that we have single entries into loop headers. Otherwise
834 if one of the entries is becoming a latch due to CFG cleanup 810 if one of the entries is becoming a latch due to CFG cleanup
835 (from formerly being part of an irreducible region) then we mess 811 (from formerly being part of an irreducible region) then we mess
836 up loop fixup and associate the old loop with a different region 812 up loop fixup and associate the old loop with a different region
837 which makes niter upper bounds invalid. See for example PR80549. 813 which makes niter upper bounds invalid. See for example PR80549.
838 This needs to be done before we remove trivially dead edges as 814 This needs to be done before we remove trivially dead edges as
839 we need to capture the dominance state before the pending transform. */ 815 we need to capture the dominance state before the pending transform. */
840 if (current_loops) 816 if (current_loops)
841 { 817 {
818 /* This needs backedges or dominators. */
819 if (!dom_info_available_p (CDI_DOMINATORS))
820 mark_dfs_back_edges ();
821
842 loop_p loop; 822 loop_p loop;
843 unsigned i; 823 unsigned i;
844 FOR_EACH_VEC_ELT (*get_loops (cfun), i, loop) 824 FOR_EACH_VEC_ELT (*get_loops (cfun), i, loop)
845 if (loop && loop->header) 825 if (loop && loop->header)
846 { 826 {
858 if (e->flags & EDGE_ABNORMAL) 838 if (e->flags & EDGE_ABNORMAL)
859 { 839 {
860 any_abnormal = true; 840 any_abnormal = true;
861 break; 841 break;
862 } 842 }
863 if (dominated_by_p (CDI_DOMINATORS, e->src, bb)) 843 if ((dom_info_available_p (CDI_DOMINATORS)
844 && dominated_by_p (CDI_DOMINATORS, e->src, bb))
845 || (e->flags & EDGE_DFS_BACK))
864 { 846 {
865 found_latch = true; 847 found_latch = true;
866 continue; 848 continue;
867 } 849 }
868 n++; 850 n++;
886 } 868 }
887 } 869 }
888 } 870 }
889 } 871 }
890 872
891 changed |= cleanup_tree_cfg_1 (); 873 /* Prepare the worklists of altered blocks. */
874 cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL);
875
876 /* Start by iterating over all basic blocks in PRE order looking for
877 edge removal opportunities. Do this first because incoming SSA form
878 may be invalid and we want to avoid performing SSA related tasks such
879 as propgating out a PHI node during BB merging in that state. This
880 also gets rid of unreachable blocks. */
881 bool changed = cleanup_control_flow_pre ();
882
883 /* After doing the above SSA form should be valid (or an update SSA
884 should be required). */
885
886 /* Compute dominator info which we need for the iterative process below. */
887 if (!dom_info_available_p (CDI_DOMINATORS))
888 calculate_dominance_info (CDI_DOMINATORS);
889 else
890 checking_verify_dominators (CDI_DOMINATORS);
891
892 /* During forwarder block cleanup, we may redirect edges out of
893 SWITCH_EXPRs, which can get expensive. So we want to enable
894 recording of edge to CASE_LABEL_EXPR. */
895 start_recording_case_labels ();
896
897 /* Continue by iterating over all basic blocks looking for BB merging
898 opportunities. We cannot use FOR_EACH_BB_FN for the BB iteration
899 since the basic blocks may get removed. */
900 unsigned n = last_basic_block_for_fn (cfun);
901 for (unsigned i = NUM_FIXED_BLOCKS; i < n; i++)
902 {
903 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
904 if (bb)
905 changed |= cleanup_tree_cfg_bb (bb);
906 }
907
908 /* Now process the altered blocks, as long as any are available. */
909 while (!bitmap_empty_p (cfgcleanup_altered_bbs))
910 {
911 unsigned i = bitmap_first_set_bit (cfgcleanup_altered_bbs);
912 bitmap_clear_bit (cfgcleanup_altered_bbs, i);
913 if (i < NUM_FIXED_BLOCKS)
914 continue;
915
916 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
917 if (!bb)
918 continue;
919
920 /* BB merging done by cleanup_tree_cfg_bb can end up propagating
921 out single-argument PHIs which in turn can expose
922 cleanup_control_flow_bb opportunities so we have to repeat
923 that here. */
924 changed |= cleanup_control_flow_bb (bb);
925 changed |= cleanup_tree_cfg_bb (bb);
926 }
927
928 end_recording_case_labels ();
929 BITMAP_FREE (cfgcleanup_altered_bbs);
892 930
893 gcc_assert (dom_info_available_p (CDI_DOMINATORS)); 931 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
894 932
895 /* Do not renumber blocks if the SCEV cache is active, it is indexed by 933 /* Do not renumber blocks if the SCEV cache is active, it is indexed by
896 basic-block numbers. */ 934 basic-block numbers. */
1286 int save_unnumbered = flag_dump_unnumbered; 1324 int save_unnumbered = flag_dump_unnumbered;
1287 int save_noaddr = flag_dump_noaddr; 1325 int save_noaddr = flag_dump_noaddr;
1288 1326
1289 flag_dump_noaddr = flag_dump_unnumbered = 1; 1327 flag_dump_noaddr = flag_dump_unnumbered = 1;
1290 fprintf (final_output, "\n"); 1328 fprintf (final_output, "\n");
1291 dump_enumerated_decls (final_output, dump_flags | TDF_NOUID); 1329 dump_enumerated_decls (final_output,
1330 dump_flags | TDF_SLIM | TDF_NOUID);
1292 flag_dump_noaddr = save_noaddr; 1331 flag_dump_noaddr = save_noaddr;
1293 flag_dump_unnumbered = save_unnumbered; 1332 flag_dump_unnumbered = save_unnumbered;
1294 if (fclose (final_output)) 1333 if (fclose (final_output))
1295 { 1334 {
1296 error ("could not close final insn dump file %qs: %m", 1335 error ("could not close final insn dump file %qs: %m",