comparison gcc/sel-sched-ir.c @ 63:b7f97abdc517 gcc-4.6-20100522

update gcc from gcc-4.5.0 to gcc-4.6
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Mon, 24 May 2010 12:47:05 +0900
parents 77e2b8dfacca
children f6334be47118
comparison
equal deleted inserted replaced
56:3c8a44c06a95 63:b7f97abdc517
1 /* Instruction scheduling pass. Selective scheduler and pipeliner. 1 /* Instruction scheduling pass. Selective scheduler and pipeliner.
2 Copyright (C) 2006, 2007, 2008, 2009 Free Software Foundation, Inc. 2 Copyright (C) 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
260 /* Add new fence consisting of INSN and STATE to the list pointed to by LP. */ 260 /* Add new fence consisting of INSN and STATE to the list pointed to by LP. */
261 static void 261 static void
262 flist_add (flist_t *lp, insn_t insn, state_t state, deps_t dc, void *tc, 262 flist_add (flist_t *lp, insn_t insn, state_t state, deps_t dc, void *tc,
263 insn_t last_scheduled_insn, VEC(rtx,gc) *executing_insns, 263 insn_t last_scheduled_insn, VEC(rtx,gc) *executing_insns,
264 int *ready_ticks, int ready_ticks_size, insn_t sched_next, 264 int *ready_ticks, int ready_ticks_size, insn_t sched_next,
265 int cycle, int cycle_issued_insns, 265 int cycle, int cycle_issued_insns, int issue_more,
266 bool starts_cycle_p, bool after_stall_p) 266 bool starts_cycle_p, bool after_stall_p)
267 { 267 {
268 fence_t f; 268 fence_t f;
269 269
270 _list_add (lp); 270 _list_add (lp);
285 285
286 gcc_assert (tc != NULL || targetm.sched.alloc_sched_context == NULL); 286 gcc_assert (tc != NULL || targetm.sched.alloc_sched_context == NULL);
287 FENCE_TC (f) = tc; 287 FENCE_TC (f) = tc;
288 288
289 FENCE_LAST_SCHEDULED_INSN (f) = last_scheduled_insn; 289 FENCE_LAST_SCHEDULED_INSN (f) = last_scheduled_insn;
290 FENCE_ISSUE_MORE (f) = issue_more;
290 FENCE_EXECUTING_INSNS (f) = executing_insns; 291 FENCE_EXECUTING_INSNS (f) = executing_insns;
291 FENCE_READY_TICKS (f) = ready_ticks; 292 FENCE_READY_TICKS (f) = ready_ticks;
292 FENCE_READY_TICKS_SIZE (f) = ready_ticks_size; 293 FENCE_READY_TICKS_SIZE (f) = ready_ticks_size;
293 FENCE_SCHED_NEXT (f) = sched_next; 294 FENCE_SCHED_NEXT (f) = sched_next;
294 295
616 NULL, /* executing_insns */ 617 NULL, /* executing_insns */
617 XCNEWVEC (int, ready_ticks_size), /* ready_ticks */ 618 XCNEWVEC (int, ready_ticks_size), /* ready_ticks */
618 ready_ticks_size, 619 ready_ticks_size,
619 NULL_RTX /* sched_next */, 620 NULL_RTX /* sched_next */,
620 1 /* cycle */, 0 /* cycle_issued_insns */, 621 1 /* cycle */, 0 /* cycle_issued_insns */,
622 issue_rate, /* issue_more */
621 1 /* starts_cycle_p */, 0 /* after_stall_p */); 623 1 /* starts_cycle_p */, 0 /* after_stall_p */);
622 } 624 }
623 } 625 }
624 626
625 /* Merges two fences (filling fields of fence F with resulting values) by 627 /* Merges two fences (filling fields of fence F with resulting values) by
627 propagated from fallthrough edge if it is available; 629 propagated from fallthrough edge if it is available;
628 2) deps context and cycle is propagated from more probable edge; 630 2) deps context and cycle is propagated from more probable edge;
629 3) all other fields are set to corresponding constant values. 631 3) all other fields are set to corresponding constant values.
630 632
631 INSN, STATE, DC, TC, LAST_SCHEDULED_INSN, EXECUTING_INSNS, 633 INSN, STATE, DC, TC, LAST_SCHEDULED_INSN, EXECUTING_INSNS,
632 READY_TICKS, READY_TICKS_SIZE, SCHED_NEXT, CYCLE and AFTER_STALL_P 634 READY_TICKS, READY_TICKS_SIZE, SCHED_NEXT, CYCLE, ISSUE_MORE
633 are the corresponding fields of the second fence. */ 635 and AFTER_STALL_P are the corresponding fields of the second fence. */
634 static void 636 static void
635 merge_fences (fence_t f, insn_t insn, 637 merge_fences (fence_t f, insn_t insn,
636 state_t state, deps_t dc, void *tc, 638 state_t state, deps_t dc, void *tc,
637 rtx last_scheduled_insn, VEC(rtx, gc) *executing_insns, 639 rtx last_scheduled_insn, VEC(rtx, gc) *executing_insns,
638 int *ready_ticks, int ready_ticks_size, 640 int *ready_ticks, int ready_ticks_size,
639 rtx sched_next, int cycle, bool after_stall_p) 641 rtx sched_next, int cycle, int issue_more, bool after_stall_p)
640 { 642 {
641 insn_t last_scheduled_insn_old = FENCE_LAST_SCHEDULED_INSN (f); 643 insn_t last_scheduled_insn_old = FENCE_LAST_SCHEDULED_INSN (f);
642 644
643 gcc_assert (sel_bb_head_p (FENCE_INSN (f)) 645 gcc_assert (sel_bb_head_p (FENCE_INSN (f))
644 && !sched_next && !FENCE_SCHED_NEXT (f)); 646 && !sched_next && !FENCE_SCHED_NEXT (f));
664 666
665 if (cycle > FENCE_CYCLE (f)) 667 if (cycle > FENCE_CYCLE (f))
666 FENCE_CYCLE (f) = cycle; 668 FENCE_CYCLE (f) = cycle;
667 669
668 FENCE_LAST_SCHEDULED_INSN (f) = NULL; 670 FENCE_LAST_SCHEDULED_INSN (f) = NULL;
671 FENCE_ISSUE_MORE (f) = issue_rate;
669 VEC_free (rtx, gc, executing_insns); 672 VEC_free (rtx, gc, executing_insns);
670 free (ready_ticks); 673 free (ready_ticks);
671 if (FENCE_EXECUTING_INSNS (f)) 674 if (FENCE_EXECUTING_INSNS (f))
672 VEC_block_remove (rtx, FENCE_EXECUTING_INSNS (f), 0, 675 VEC_block_remove (rtx, FENCE_EXECUTING_INSNS (f), 0,
673 VEC_length (rtx, FENCE_EXECUTING_INSNS (f))); 676 VEC_length (rtx, FENCE_EXECUTING_INSNS (f)));
695 698
696 reset_target_context (FENCE_TC (f), true); 699 reset_target_context (FENCE_TC (f), true);
697 delete_target_context (tc); 700 delete_target_context (tc);
698 701
699 FENCE_LAST_SCHEDULED_INSN (f) = NULL; 702 FENCE_LAST_SCHEDULED_INSN (f) = NULL;
703 FENCE_ISSUE_MORE (f) = issue_rate;
700 } 704 }
701 else 705 else
702 if (candidate->src == BLOCK_FOR_INSN (last_scheduled_insn)) 706 if (candidate->src == BLOCK_FOR_INSN (last_scheduled_insn))
703 { 707 {
704 /* Would be weird if same insn is successor of several fallthrough 708 /* Would be weird if same insn is successor of several fallthrough
711 715
712 delete_target_context (FENCE_TC (f)); 716 delete_target_context (FENCE_TC (f));
713 FENCE_TC (f) = tc; 717 FENCE_TC (f) = tc;
714 718
715 FENCE_LAST_SCHEDULED_INSN (f) = last_scheduled_insn; 719 FENCE_LAST_SCHEDULED_INSN (f) = last_scheduled_insn;
720 FENCE_ISSUE_MORE (f) = issue_more;
716 } 721 }
717 else 722 else
718 { 723 {
719 /* Leave STATE, TC and LAST_SCHEDULED_INSN fields untouched. */ 724 /* Leave STATE, TC and LAST_SCHEDULED_INSN fields untouched. */
720 state_free (state); 725 state_free (state);
797 static void 802 static void
798 add_to_fences (flist_tail_t new_fences, insn_t insn, 803 add_to_fences (flist_tail_t new_fences, insn_t insn,
799 state_t state, deps_t dc, void *tc, rtx last_scheduled_insn, 804 state_t state, deps_t dc, void *tc, rtx last_scheduled_insn,
800 VEC(rtx, gc) *executing_insns, int *ready_ticks, 805 VEC(rtx, gc) *executing_insns, int *ready_ticks,
801 int ready_ticks_size, rtx sched_next, int cycle, 806 int ready_ticks_size, rtx sched_next, int cycle,
802 int cycle_issued_insns, bool starts_cycle_p, bool after_stall_p) 807 int cycle_issued_insns, int issue_rate,
808 bool starts_cycle_p, bool after_stall_p)
803 { 809 {
804 fence_t f = flist_lookup (FLIST_TAIL_HEAD (new_fences), insn); 810 fence_t f = flist_lookup (FLIST_TAIL_HEAD (new_fences), insn);
805 811
806 if (! f) 812 if (! f)
807 { 813 {
808 flist_add (FLIST_TAIL_TAILP (new_fences), insn, state, dc, tc, 814 flist_add (FLIST_TAIL_TAILP (new_fences), insn, state, dc, tc,
809 last_scheduled_insn, executing_insns, ready_ticks, 815 last_scheduled_insn, executing_insns, ready_ticks,
810 ready_ticks_size, sched_next, cycle, cycle_issued_insns, 816 ready_ticks_size, sched_next, cycle, cycle_issued_insns,
811 starts_cycle_p, after_stall_p); 817 issue_rate, starts_cycle_p, after_stall_p);
812 818
813 FLIST_TAIL_TAILP (new_fences) 819 FLIST_TAIL_TAILP (new_fences)
814 = &FLIST_NEXT (*FLIST_TAIL_TAILP (new_fences)); 820 = &FLIST_NEXT (*FLIST_TAIL_TAILP (new_fences));
815 } 821 }
816 else 822 else
817 { 823 {
818 merge_fences (f, insn, state, dc, tc, last_scheduled_insn, 824 merge_fences (f, insn, state, dc, tc, last_scheduled_insn,
819 executing_insns, ready_ticks, ready_ticks_size, 825 executing_insns, ready_ticks, ready_ticks_size,
820 sched_next, cycle, after_stall_p); 826 sched_next, cycle, issue_rate, after_stall_p);
821 } 827 }
822 } 828 }
823 829
824 /* Move the first fence in the OLD_FENCES list to NEW_FENCES. */ 830 /* Move the first fence in the OLD_FENCES list to NEW_FENCES. */
825 void 831 void
834 if (f) 840 if (f)
835 { 841 {
836 merge_fences (f, old->insn, old->state, old->dc, old->tc, 842 merge_fences (f, old->insn, old->state, old->dc, old->tc,
837 old->last_scheduled_insn, old->executing_insns, 843 old->last_scheduled_insn, old->executing_insns,
838 old->ready_ticks, old->ready_ticks_size, 844 old->ready_ticks, old->ready_ticks_size,
839 old->sched_next, old->cycle, 845 old->sched_next, old->cycle, old->issue_more,
840 old->after_stall_p); 846 old->after_stall_p);
841 } 847 }
842 else 848 else
843 { 849 {
844 _list_add (tailp); 850 _list_add (tailp);
860 succ, state_create (), create_deps_context (), 866 succ, state_create (), create_deps_context (),
861 create_target_context (true), 867 create_target_context (true),
862 NULL_RTX, NULL, 868 NULL_RTX, NULL,
863 XCNEWVEC (int, ready_ticks_size), ready_ticks_size, 869 XCNEWVEC (int, ready_ticks_size), ready_ticks_size,
864 NULL_RTX, FENCE_CYCLE (fence) + 1, 870 NULL_RTX, FENCE_CYCLE (fence) + 1,
865 0, 1, FENCE_AFTER_STALL_P (fence)); 871 0, issue_rate, 1, FENCE_AFTER_STALL_P (fence));
866 } 872 }
867 873
868 /* Add a new fence to NEW_FENCES list and initialize all of its data 874 /* Add a new fence to NEW_FENCES list and initialize all of its data
869 from FENCE and SUCC. */ 875 from FENCE and SUCC. */
870 void 876 void
884 new_ready_ticks, 890 new_ready_ticks,
885 FENCE_READY_TICKS_SIZE (fence), 891 FENCE_READY_TICKS_SIZE (fence),
886 FENCE_SCHED_NEXT (fence), 892 FENCE_SCHED_NEXT (fence),
887 FENCE_CYCLE (fence), 893 FENCE_CYCLE (fence),
888 FENCE_ISSUED_INSNS (fence), 894 FENCE_ISSUED_INSNS (fence),
895 FENCE_ISSUE_MORE (fence),
889 FENCE_STARTS_CYCLE_P (fence), 896 FENCE_STARTS_CYCLE_P (fence),
890 FENCE_AFTER_STALL_P (fence)); 897 FENCE_AFTER_STALL_P (fence));
891 } 898 }
892 899
893 900
3501 #endif 3508 #endif
3502 3509
3503 3510
3504 /* Functions to work with control flow. */ 3511 /* Functions to work with control flow. */
3505 3512
3513 /* Recompute BLOCK_TO_BB and BB_FOR_BLOCK for current region so that blocks
3514 are sorted in topological order (it might have been invalidated by
3515 redirecting an edge). */
3516 static void
3517 sel_recompute_toporder (void)
3518 {
3519 int i, n, rgn;
3520 int *postorder, n_blocks;
3521
3522 postorder = XALLOCAVEC (int, n_basic_blocks);
3523 n_blocks = post_order_compute (postorder, false, false);
3524
3525 rgn = CONTAINING_RGN (BB_TO_BLOCK (0));
3526 for (n = 0, i = n_blocks - 1; i >= 0; i--)
3527 if (CONTAINING_RGN (postorder[i]) == rgn)
3528 {
3529 BLOCK_TO_BB (postorder[i]) = n;
3530 BB_TO_BLOCK (n) = postorder[i];
3531 n++;
3532 }
3533
3534 /* Assert that we updated info for all blocks. We may miss some blocks if
3535 this function is called when redirecting an edge made a block
3536 unreachable, but that block is not deleted yet. */
3537 gcc_assert (n == RGN_NR_BLOCKS (rgn));
3538 }
3539
3506 /* Tidy the possibly empty block BB. */ 3540 /* Tidy the possibly empty block BB. */
3507 bool 3541 static bool
3508 maybe_tidy_empty_bb (basic_block bb) 3542 maybe_tidy_empty_bb (basic_block bb, bool recompute_toporder_p)
3509 { 3543 {
3510 basic_block succ_bb, pred_bb; 3544 basic_block succ_bb, pred_bb;
3511 edge e; 3545 edge e;
3512 edge_iterator ei; 3546 edge_iterator ei;
3513 bool rescan_p; 3547 bool rescan_p;
3514 3548
3515 /* Keep empty bb only if this block immediately precedes EXIT and 3549 /* Keep empty bb only if this block immediately precedes EXIT and
3516 has incoming non-fallthrough edge. Otherwise remove it. */ 3550 has incoming non-fallthrough edge, or it has no predecessors or
3551 successors. Otherwise remove it. */
3517 if (!sel_bb_empty_p (bb) 3552 if (!sel_bb_empty_p (bb)
3518 || (single_succ_p (bb) 3553 || (single_succ_p (bb)
3519 && single_succ (bb) == EXIT_BLOCK_PTR 3554 && single_succ (bb) == EXIT_BLOCK_PTR
3520 && (!single_pred_p (bb) 3555 && (!single_pred_p (bb)
3521 || !(single_pred_edge (bb)->flags & EDGE_FALLTHRU)))) 3556 || !(single_pred_edge (bb)->flags & EDGE_FALLTHRU)))
3557 || EDGE_COUNT (bb->preds) == 0
3558 || EDGE_COUNT (bb->succs) == 0)
3522 return false; 3559 return false;
3523 3560
3524 /* Do not attempt to redirect complex edges. */ 3561 /* Do not attempt to redirect complex edges. */
3525 FOR_EACH_EDGE (e, ei, bb->preds) 3562 FOR_EACH_EDGE (e, ei, bb->preds)
3526 if (e->flags & EDGE_COMPLEX) 3563 if (e->flags & EDGE_COMPLEX)
3550 { 3587 {
3551 pred_bb = e->src; 3588 pred_bb = e->src;
3552 3589
3553 if (!(e->flags & EDGE_FALLTHRU)) 3590 if (!(e->flags & EDGE_FALLTHRU))
3554 { 3591 {
3555 sel_redirect_edge_and_branch (e, succ_bb); 3592 recompute_toporder_p |= sel_redirect_edge_and_branch (e, succ_bb);
3556 rescan_p = true; 3593 rescan_p = true;
3557 break; 3594 break;
3558 } 3595 }
3559 } 3596 }
3560 } 3597 }
3566 /* Otherwise this is a block without fallthru predecessor. 3603 /* Otherwise this is a block without fallthru predecessor.
3567 Just delete it. */ 3604 Just delete it. */
3568 { 3605 {
3569 gcc_assert (pred_bb != NULL); 3606 gcc_assert (pred_bb != NULL);
3570 3607
3571 move_bb_info (pred_bb, bb); 3608 if (in_current_region_p (pred_bb))
3609 move_bb_info (pred_bb, bb);
3572 remove_empty_bb (bb, true); 3610 remove_empty_bb (bb, true);
3573 } 3611 }
3612
3613 if (recompute_toporder_p)
3614 sel_recompute_toporder ();
3574 3615
3575 #ifdef ENABLE_CHECKING 3616 #ifdef ENABLE_CHECKING
3576 verify_backedges (); 3617 verify_backedges ();
3577 #endif 3618 #endif
3578 3619
3587 { 3628 {
3588 bool changed = true; 3629 bool changed = true;
3589 insn_t first, last; 3630 insn_t first, last;
3590 3631
3591 /* First check whether XBB is empty. */ 3632 /* First check whether XBB is empty. */
3592 changed = maybe_tidy_empty_bb (xbb); 3633 changed = maybe_tidy_empty_bb (xbb, false);
3593 if (changed || !full_tidying) 3634 if (changed || !full_tidying)
3594 return changed; 3635 return changed;
3595 3636
3596 /* Check if there is a unnecessary jump after insn left. */ 3637 /* Check if there is a unnecessary jump after insn left. */
3597 if (jump_leads_only_to_bb_p (BB_END (xbb), xbb->next_bb) 3638 if (jump_leads_only_to_bb_p (BB_END (xbb), xbb->next_bb)
3638 && jump_leads_only_to_bb_p (BB_END (xbb->prev_bb), xbb->next_bb) 3679 && jump_leads_only_to_bb_p (BB_END (xbb->prev_bb), xbb->next_bb)
3639 && INSN_SCHED_TIMES (BB_END (xbb->prev_bb)) == 0 3680 && INSN_SCHED_TIMES (BB_END (xbb->prev_bb)) == 0
3640 /* Also this jump is not at the scheduling boundary. */ 3681 /* Also this jump is not at the scheduling boundary. */
3641 && !IN_CURRENT_FENCE_P (BB_END (xbb->prev_bb))) 3682 && !IN_CURRENT_FENCE_P (BB_END (xbb->prev_bb)))
3642 { 3683 {
3684 bool recompute_toporder_p;
3643 /* Clear data structures of jump - jump itself will be removed 3685 /* Clear data structures of jump - jump itself will be removed
3644 by sel_redirect_edge_and_branch. */ 3686 by sel_redirect_edge_and_branch. */
3645 clear_expr (INSN_EXPR (BB_END (xbb->prev_bb))); 3687 clear_expr (INSN_EXPR (BB_END (xbb->prev_bb)));
3646 sel_redirect_edge_and_branch (EDGE_SUCC (xbb->prev_bb, 0), xbb); 3688 recompute_toporder_p
3689 = sel_redirect_edge_and_branch (EDGE_SUCC (xbb->prev_bb, 0), xbb);
3690
3647 gcc_assert (EDGE_SUCC (xbb->prev_bb, 0)->flags & EDGE_FALLTHRU); 3691 gcc_assert (EDGE_SUCC (xbb->prev_bb, 0)->flags & EDGE_FALLTHRU);
3648 3692
3649 /* It can turn out that after removing unused jump, basic block 3693 /* It can turn out that after removing unused jump, basic block
3650 that contained that jump, becomes empty too. In such case 3694 that contained that jump, becomes empty too. In such case
3651 remove it too. */ 3695 remove it too. */
3652 if (sel_bb_empty_p (xbb->prev_bb)) 3696 if (sel_bb_empty_p (xbb->prev_bb))
3653 changed = maybe_tidy_empty_bb (xbb->prev_bb); 3697 changed = maybe_tidy_empty_bb (xbb->prev_bb, recompute_toporder_p);
3698 else if (recompute_toporder_p)
3699 sel_recompute_toporder ();
3654 } 3700 }
3655 3701
3656 return changed; 3702 return changed;
3703 }
3704
3705 /* Purge meaningless empty blocks in the middle of a region. */
3706 void
3707 purge_empty_blocks (void)
3708 {
3709 /* Do not attempt to delete preheader. */
3710 int i = sel_is_loop_preheader_p (BASIC_BLOCK (BB_TO_BLOCK (0))) ? 1 : 0;
3711
3712 while (i < current_nr_blocks)
3713 {
3714 basic_block b = BASIC_BLOCK (BB_TO_BLOCK (i));
3715
3716 if (maybe_tidy_empty_bb (b, false))
3717 continue;
3718
3719 i++;
3720 }
3657 } 3721 }
3658 3722
3659 /* Rip-off INSN from the insn stream. When ONLY_DISCONNECT is true, 3723 /* Rip-off INSN from the insn stream. When ONLY_DISCONNECT is true,
3660 do not delete insn's data, because it will be later re-emitted. 3724 do not delete insn's data, because it will be later re-emitted.
3661 Return true if we have removed some blocks afterwards. */ 3725 Return true if we have removed some blocks afterwards. */
4351 }; 4415 };
4352 4416
4353 sched_scan (&ssi, bbs, bb, new_insns, NULL); 4417 sched_scan (&ssi, bbs, bb, new_insns, NULL);
4354 } 4418 }
4355 4419
4356 /* Restore other notes for the whole region. */ 4420 /* Restore notes for the whole region. */
4357 static void 4421 static void
4358 sel_restore_other_notes (void) 4422 sel_restore_notes (void)
4359 { 4423 {
4360 int bb; 4424 int bb;
4425 insn_t insn;
4361 4426
4362 for (bb = 0; bb < current_nr_blocks; bb++) 4427 for (bb = 0; bb < current_nr_blocks; bb++)
4363 { 4428 {
4364 basic_block first, last; 4429 basic_block first, last;
4365 4430
4370 { 4435 {
4371 note_list = BB_NOTE_LIST (first); 4436 note_list = BB_NOTE_LIST (first);
4372 restore_other_notes (NULL, first); 4437 restore_other_notes (NULL, first);
4373 BB_NOTE_LIST (first) = NULL_RTX; 4438 BB_NOTE_LIST (first) = NULL_RTX;
4374 4439
4440 FOR_BB_INSNS (first, insn)
4441 if (NONDEBUG_INSN_P (insn))
4442 reemit_notes (insn);
4443
4375 first = first->next_bb; 4444 first = first->next_bb;
4376 } 4445 }
4377 while (first != last); 4446 while (first != last);
4378 } 4447 }
4379 } 4448 }
4380 4449
4381 /* Free per-bb data structures. */ 4450 /* Free per-bb data structures. */
4382 void 4451 void
4383 sel_finish_bbs (void) 4452 sel_finish_bbs (void)
4384 { 4453 {
4385 sel_restore_other_notes (); 4454 sel_restore_notes ();
4386 4455
4387 /* Remove current loop preheader from this loop. */ 4456 /* Remove current loop preheader from this loop. */
4388 if (current_loop_nest) 4457 if (current_loop_nest)
4389 sel_remove_loop_preheader (); 4458 sel_remove_loop_preheader ();
4390 4459
5353 jump = find_new_jump (src, jump_bb, prev_max_uid); 5422 jump = find_new_jump (src, jump_bb, prev_max_uid);
5354 if (jump) 5423 if (jump)
5355 sel_init_new_insn (jump, INSN_INIT_TODO_LUID | INSN_INIT_TODO_SIMPLEJUMP); 5424 sel_init_new_insn (jump, INSN_INIT_TODO_LUID | INSN_INIT_TODO_SIMPLEJUMP);
5356 } 5425 }
5357 5426
5358 /* A wrapper for redirect_edge_and_branch. */ 5427 /* A wrapper for redirect_edge_and_branch. Return TRUE if blocks connected by
5359 void 5428 redirected edge are in reverse topological order. */
5429 bool
5360 sel_redirect_edge_and_branch (edge e, basic_block to) 5430 sel_redirect_edge_and_branch (edge e, basic_block to)
5361 { 5431 {
5362 bool latch_edge_p; 5432 bool latch_edge_p;
5363 basic_block src; 5433 basic_block src;
5364 int prev_max_uid; 5434 int prev_max_uid;
5365 rtx jump; 5435 rtx jump;
5366 edge redirected; 5436 edge redirected;
5437 bool recompute_toporder_p = false;
5367 5438
5368 latch_edge_p = (pipelining_p 5439 latch_edge_p = (pipelining_p
5369 && current_loop_nest 5440 && current_loop_nest
5370 && e == loop_latch_edge (current_loop_nest)); 5441 && e == loop_latch_edge (current_loop_nest));
5371 5442
5381 { 5452 {
5382 current_loop_nest->header = to; 5453 current_loop_nest->header = to;
5383 gcc_assert (loop_latch_edge (current_loop_nest)); 5454 gcc_assert (loop_latch_edge (current_loop_nest));
5384 } 5455 }
5385 5456
5457 /* In rare situations, the topological relation between the blocks connected
5458 by the redirected edge can change (see PR42245 for an example). Update
5459 block_to_bb/bb_to_block. */
5460 if (CONTAINING_RGN (e->src->index) == CONTAINING_RGN (to->index)
5461 && BLOCK_TO_BB (e->src->index) > BLOCK_TO_BB (to->index))
5462 recompute_toporder_p = true;
5463
5386 jump = find_new_jump (src, NULL, prev_max_uid); 5464 jump = find_new_jump (src, NULL, prev_max_uid);
5387 if (jump) 5465 if (jump)
5388 sel_init_new_insn (jump, INSN_INIT_TODO_LUID | INSN_INIT_TODO_SIMPLEJUMP); 5466 sel_init_new_insn (jump, INSN_INIT_TODO_LUID | INSN_INIT_TODO_SIMPLEJUMP);
5467
5468 return recompute_toporder_p;
5389 } 5469 }
5390 5470
5391 /* This variable holds the cfg hooks used by the selective scheduler. */ 5471 /* This variable holds the cfg hooks used by the selective scheduler. */
5392 static struct cfg_hooks sel_cfg_hooks; 5472 static struct cfg_hooks sel_cfg_hooks;
5393 5473
5817 region is in LOOP_NESTS. 5897 region is in LOOP_NESTS.
5818 We determine the region number of LOOP as the region number of its 5898 We determine the region number of LOOP as the region number of its
5819 latch. We can't use header here, because this header could be 5899 latch. We can't use header here, because this header could be
5820 just removed preheader and it will give us the wrong region number. 5900 just removed preheader and it will give us the wrong region number.
5821 Latch can't be used because it could be in the inner loop too. */ 5901 Latch can't be used because it could be in the inner loop too. */
5822 if (LOOP_MARKED_FOR_PIPELINING_P (loop) && pipelining_p) 5902 if (LOOP_MARKED_FOR_PIPELINING_P (loop))
5823 { 5903 {
5824 int rgn = CONTAINING_RGN (loop->latch->index); 5904 int rgn = CONTAINING_RGN (loop->latch->index);
5825 5905
5826 gcc_assert ((unsigned) rgn < VEC_length (loop_p, loop_nests)); 5906 gcc_assert ((unsigned) rgn < VEC_length (loop_p, loop_nests));
5827 return true; 5907 return true;
5966 = LOOP_PREHEADER_BLOCKS (current_loop_nest); 6046 = LOOP_PREHEADER_BLOCKS (current_loop_nest);
5967 6047
5968 for (i = 0; 6048 for (i = 0;
5969 VEC_iterate (basic_block, preheader_blocks, i, bb); 6049 VEC_iterate (basic_block, preheader_blocks, i, bb);
5970 i++) 6050 i++)
6051 {
6052 VEC_safe_push (basic_block, heap, last_added_blocks, bb);
5971 sel_add_bb (bb); 6053 sel_add_bb (bb);
6054 }
5972 6055
5973 VEC_free (basic_block, heap, preheader_blocks); 6056 VEC_free (basic_block, heap, preheader_blocks);
5974 } 6057 }
5975 6058
5976 /* While pipelining outer loops, returns TRUE if BB is a loop preheader. 6059 /* While pipelining outer loops, returns TRUE if BB is a loop preheader.