Mercurial > hg > CbC > CbC_gcc
comparison gcc/sel-sched.c @ 67:f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author | nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Tue, 22 Mar 2011 17:18:12 +0900 |
parents | b7f97abdc517 |
children | 04ced10e8804 |
comparison
equal
deleted
inserted
replaced
65:65488c3d617d | 67:f6334be47118 |
---|---|
1 /* Instruction scheduling pass. Selective scheduler and pipeliner. | 1 /* Instruction scheduling pass. Selective scheduler and pipeliner. |
2 Copyright (C) 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc. | 2 Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 |
3 Free Software Foundation, Inc. | |
3 | 4 |
4 This file is part of GCC. | 5 This file is part of GCC. |
5 | 6 |
6 GCC is free software; you can redistribute it and/or modify it under | 7 GCC is free software; you can redistribute it and/or modify it under |
7 the terms of the GNU General Public License as published by the Free | 8 the terms of the GNU General Public License as published by the Free |
19 | 20 |
20 #include "config.h" | 21 #include "config.h" |
21 #include "system.h" | 22 #include "system.h" |
22 #include "coretypes.h" | 23 #include "coretypes.h" |
23 #include "tm.h" | 24 #include "tm.h" |
24 #include "toplev.h" | 25 #include "rtl-error.h" |
25 #include "rtl.h" | |
26 #include "tm_p.h" | 26 #include "tm_p.h" |
27 #include "hard-reg-set.h" | 27 #include "hard-reg-set.h" |
28 #include "regs.h" | 28 #include "regs.h" |
29 #include "function.h" | 29 #include "function.h" |
30 #include "flags.h" | 30 #include "flags.h" |
31 #include "insn-config.h" | 31 #include "insn-config.h" |
32 #include "insn-attr.h" | 32 #include "insn-attr.h" |
33 #include "except.h" | 33 #include "except.h" |
34 #include "toplev.h" | |
35 #include "recog.h" | 34 #include "recog.h" |
36 #include "params.h" | 35 #include "params.h" |
37 #include "target.h" | 36 #include "target.h" |
38 #include "output.h" | 37 #include "output.h" |
39 #include "timevar.h" | 38 #include "timevar.h" |
43 #include "tree.h" | 42 #include "tree.h" |
44 #include "vec.h" | 43 #include "vec.h" |
45 #include "langhooks.h" | 44 #include "langhooks.h" |
46 #include "rtlhooks-def.h" | 45 #include "rtlhooks-def.h" |
47 #include "output.h" | 46 #include "output.h" |
47 #include "emit-rtl.h" | |
48 | 48 |
49 #ifdef INSN_SCHEDULING | 49 #ifdef INSN_SCHEDULING |
50 #include "sel-sched-ir.h" | 50 #include "sel-sched-ir.h" |
51 #include "sel-sched-dump.h" | 51 #include "sel-sched-dump.h" |
52 #include "sel-sched.h" | 52 #include "sel-sched.h" |
611 skipping empty basic blocks. */ | 611 skipping empty basic blocks. */ |
612 static bool | 612 static bool |
613 in_fallthru_bb_p (rtx insn, rtx succ) | 613 in_fallthru_bb_p (rtx insn, rtx succ) |
614 { | 614 { |
615 basic_block bb = BLOCK_FOR_INSN (insn); | 615 basic_block bb = BLOCK_FOR_INSN (insn); |
616 edge e; | |
616 | 617 |
617 if (bb == BLOCK_FOR_INSN (succ)) | 618 if (bb == BLOCK_FOR_INSN (succ)) |
618 return true; | 619 return true; |
619 | 620 |
620 if (find_fallthru_edge (bb)) | 621 e = find_fallthru_edge_from (bb); |
621 bb = find_fallthru_edge (bb)->dest; | 622 if (e) |
623 bb = e->dest; | |
622 else | 624 else |
623 return false; | 625 return false; |
624 | 626 |
625 while (sel_bb_empty_p (bb)) | 627 while (sel_bb_empty_p (bb)) |
626 bb = bb->next_bb; | 628 bb = bb->next_bb; |
834 return -1; | 836 return -1; |
835 } | 837 } |
836 | 838 |
837 if (GET_CODE (*cur_rtx) == SUBREG | 839 if (GET_CODE (*cur_rtx) == SUBREG |
838 && REG_P (p->x) | 840 && REG_P (p->x) |
839 && REGNO (SUBREG_REG (*cur_rtx)) == REGNO (p->x)) | 841 && (!REG_P (SUBREG_REG (*cur_rtx)) |
842 || REGNO (SUBREG_REG (*cur_rtx)) == REGNO (p->x))) | |
840 { | 843 { |
841 /* ??? Do not support substituting regs inside subregs. In that case, | 844 /* ??? Do not support substituting regs inside subregs. In that case, |
842 simplify_subreg will be called by validate_replace_rtx, and | 845 simplify_subreg will be called by validate_replace_rtx, and |
843 unsubstitution will fail later. */ | 846 unsubstitution will fail later. */ |
844 p->n = 0; | 847 p->n = 0; |
1135 if (fixed_regs[cur_reg + i] | 1138 if (fixed_regs[cur_reg + i] |
1136 || global_regs[cur_reg + i] | 1139 || global_regs[cur_reg + i] |
1137 /* Can't use regs which aren't saved by | 1140 /* Can't use regs which aren't saved by |
1138 the prologue. */ | 1141 the prologue. */ |
1139 || !TEST_HARD_REG_BIT (sel_hrd.regs_ever_used, cur_reg + i) | 1142 || !TEST_HARD_REG_BIT (sel_hrd.regs_ever_used, cur_reg + i) |
1143 /* Can't use regs with non-null REG_BASE_VALUE, because adjusting | |
1144 it affects aliasing globally and invalidates all AV sets. */ | |
1145 || get_reg_base_value (cur_reg + i) | |
1140 #ifdef LEAF_REGISTERS | 1146 #ifdef LEAF_REGISTERS |
1141 /* We can't use a non-leaf register if we're in a | 1147 /* We can't use a non-leaf register if we're in a |
1142 leaf function. */ | 1148 leaf function. */ |
1143 || (current_function_is_leaf | 1149 || (current_function_is_leaf |
1144 && !LEAF_REGISTERS[cur_reg + i]) | 1150 && !LEAF_REGISTERS[cur_reg + i]) |
1234 | 1240 |
1235 /* Stop if the original register is one of the fixed_regs, global_regs or | 1241 /* Stop if the original register is one of the fixed_regs, global_regs or |
1236 frame pointer, or we could not discover its class. */ | 1242 frame pointer, or we could not discover its class. */ |
1237 if (fixed_regs[regno] | 1243 if (fixed_regs[regno] |
1238 || global_regs[regno] | 1244 || global_regs[regno] |
1239 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | 1245 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER |
1240 || (frame_pointer_needed && regno == HARD_FRAME_POINTER_REGNUM) | 1246 || (frame_pointer_needed && regno == HARD_FRAME_POINTER_REGNUM) |
1241 #else | 1247 #else |
1242 || (frame_pointer_needed && regno == FRAME_POINTER_REGNUM) | 1248 || (frame_pointer_needed && regno == FRAME_POINTER_REGNUM) |
1243 #endif | 1249 #endif |
1244 || (reload_completed && cl == NO_REGS)) | 1250 || (reload_completed && cl == NO_REGS)) |
1261 | 1267 |
1262 for (i = hard_regno_nregs[FRAME_POINTER_REGNUM][Pmode]; i--;) | 1268 for (i = hard_regno_nregs[FRAME_POINTER_REGNUM][Pmode]; i--;) |
1263 SET_HARD_REG_BIT (reg_rename_p->unavailable_hard_regs, | 1269 SET_HARD_REG_BIT (reg_rename_p->unavailable_hard_regs, |
1264 FRAME_POINTER_REGNUM + i); | 1270 FRAME_POINTER_REGNUM + i); |
1265 | 1271 |
1266 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | 1272 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER |
1267 for (i = hard_regno_nregs[HARD_FRAME_POINTER_REGNUM][Pmode]; i--;) | 1273 for (i = hard_regno_nregs[HARD_FRAME_POINTER_REGNUM][Pmode]; i--;) |
1268 SET_HARD_REG_BIT (reg_rename_p->unavailable_hard_regs, | 1274 SET_HARD_REG_BIT (reg_rename_p->unavailable_hard_regs, |
1269 HARD_FRAME_POINTER_REGNUM + i); | 1275 HARD_FRAME_POINTER_REGNUM + i); |
1270 #endif | 1276 #endif |
1271 } | 1277 } |
2134 bool was_changed = false; | 2140 bool was_changed = false; |
2135 bool as_rhs = false; | 2141 bool as_rhs = false; |
2136 ds_t *has_dep_p; | 2142 ds_t *has_dep_p; |
2137 ds_t full_ds; | 2143 ds_t full_ds; |
2138 | 2144 |
2145 /* ??? We use dependencies of non-debug insns on debug insns to | |
2146 indicate that the debug insns need to be reset if the non-debug | |
2147 insn is pulled ahead of it. It's hard to figure out how to | |
2148 introduce such a notion in sel-sched, but it already fails to | |
2149 support debug insns in other ways, so we just go ahead and | |
2150 let the deug insns go corrupt for now. */ | |
2151 if (DEBUG_INSN_P (through_insn) && !DEBUG_INSN_P (insn)) | |
2152 return MOVEUP_EXPR_SAME; | |
2153 | |
2139 /* When inside_insn_group, delegate to the helper. */ | 2154 /* When inside_insn_group, delegate to the helper. */ |
2140 if (inside_insn_group) | 2155 if (inside_insn_group) |
2141 return moveup_expr_inside_insn_group (expr, through_insn); | 2156 return moveup_expr_inside_insn_group (expr, through_insn); |
2142 | 2157 |
2143 /* Deal with unique insns and control dependencies. */ | 2158 /* Deal with unique insns and control dependencies. */ |
2164 this block should be in the current region. */ | 2179 this block should be in the current region. */ |
2165 if ((fallthru_bb = fallthru_bb_of_jump (insn)) == NULL | 2180 if ((fallthru_bb = fallthru_bb_of_jump (insn)) == NULL |
2166 || ! in_current_region_p (fallthru_bb)) | 2181 || ! in_current_region_p (fallthru_bb)) |
2167 return MOVEUP_EXPR_NULL; | 2182 return MOVEUP_EXPR_NULL; |
2168 | 2183 |
2169 /* And it should be mutually exclusive with through_insn, or | 2184 /* And it should be mutually exclusive with through_insn. */ |
2170 be an unconditional jump. */ | 2185 if (! sched_insns_conditions_mutex_p (insn, through_insn) |
2171 if (! any_uncondjump_p (insn) | |
2172 && ! sched_insns_conditions_mutex_p (insn, through_insn) | |
2173 && ! DEBUG_INSN_P (through_insn)) | 2186 && ! DEBUG_INSN_P (through_insn)) |
2174 return MOVEUP_EXPR_NULL; | 2187 return MOVEUP_EXPR_NULL; |
2175 } | 2188 } |
2176 | 2189 |
2177 /* Don't move what we can't move. */ | 2190 /* Don't move what we can't move. */ |
2734 } | 2747 } |
2735 | 2748 |
2736 /* Add insn to to the tail of current path. */ | 2749 /* Add insn to to the tail of current path. */ |
2737 ilist_add (&p, insn); | 2750 ilist_add (&p, insn); |
2738 | 2751 |
2739 for (is = 0; VEC_iterate (rtx, sinfo->succs_ok, is, succ); is++) | 2752 FOR_EACH_VEC_ELT (rtx, sinfo->succs_ok, is, succ) |
2740 { | 2753 { |
2741 av_set_t succ_set; | 2754 av_set_t succ_set; |
2742 | 2755 |
2743 /* We will edit SUCC_SET and EXPR_SPEC field of its elements. */ | 2756 /* We will edit SUCC_SET and EXPR_SPEC field of its elements. */ |
2744 succ_set = compute_av_set_inside_bb (succ, p, ws, true); | 2757 succ_set = compute_av_set_inside_bb (succ, p, ws, true); |
2788 } | 2801 } |
2789 | 2802 |
2790 /* Check liveness restrictions via hard way when there are more than | 2803 /* Check liveness restrictions via hard way when there are more than |
2791 two successors. */ | 2804 two successors. */ |
2792 if (sinfo->succs_ok_n > 2) | 2805 if (sinfo->succs_ok_n > 2) |
2793 for (is = 0; VEC_iterate (rtx, sinfo->succs_ok, is, succ); is++) | 2806 FOR_EACH_VEC_ELT (rtx, sinfo->succs_ok, is, succ) |
2794 { | 2807 { |
2795 basic_block succ_bb = BLOCK_FOR_INSN (succ); | 2808 basic_block succ_bb = BLOCK_FOR_INSN (succ); |
2796 | 2809 |
2797 gcc_assert (BB_LV_SET_VALID_P (succ_bb)); | 2810 gcc_assert (BB_LV_SET_VALID_P (succ_bb)); |
2798 mark_unavailable_targets (av1, BB_AV_SET (succ_bb), | 2811 mark_unavailable_targets (av1, BB_AV_SET (succ_bb), |
2799 BB_LV_SET (succ_bb)); | 2812 BB_LV_SET (succ_bb)); |
2800 } | 2813 } |
2801 | 2814 |
2802 /* Finally, check liveness restrictions on paths leaving the region. */ | 2815 /* Finally, check liveness restrictions on paths leaving the region. */ |
2803 if (sinfo->all_succs_n > sinfo->succs_ok_n) | 2816 if (sinfo->all_succs_n > sinfo->succs_ok_n) |
2804 for (is = 0; VEC_iterate (rtx, sinfo->succs_other, is, succ); is++) | 2817 FOR_EACH_VEC_ELT (rtx, sinfo->succs_other, is, succ) |
2805 mark_unavailable_targets | 2818 mark_unavailable_targets |
2806 (av1, NULL, BB_LV_SET (BLOCK_FOR_INSN (succ))); | 2819 (av1, NULL, BB_LV_SET (BLOCK_FOR_INSN (succ))); |
2807 | 2820 |
2808 if (sinfo->all_succs_n > 1) | 2821 if (sinfo->all_succs_n > 1) |
2809 { | 2822 { |
3570 vinsn_vec_has_expr_p (vinsn_vec_t vinsn_vec, expr_t expr) | 3583 vinsn_vec_has_expr_p (vinsn_vec_t vinsn_vec, expr_t expr) |
3571 { | 3584 { |
3572 vinsn_t vinsn; | 3585 vinsn_t vinsn; |
3573 int n; | 3586 int n; |
3574 | 3587 |
3575 for (n = 0; VEC_iterate (vinsn_t, vinsn_vec, n, vinsn); n++) | 3588 FOR_EACH_VEC_ELT (vinsn_t, vinsn_vec, n, vinsn) |
3576 if (VINSN_SEPARABLE_P (vinsn)) | 3589 if (VINSN_SEPARABLE_P (vinsn)) |
3577 { | 3590 { |
3578 if (vinsn_equal_p (vinsn, EXPR_VINSN (expr))) | 3591 if (vinsn_equal_p (vinsn, EXPR_VINSN (expr))) |
3579 return true; | 3592 return true; |
3580 } | 3593 } |
3644 if (len > 0) | 3657 if (len > 0) |
3645 { | 3658 { |
3646 vinsn_t vinsn; | 3659 vinsn_t vinsn; |
3647 int n; | 3660 int n; |
3648 | 3661 |
3649 for (n = 0; VEC_iterate (vinsn_t, *vinsn_vec, n, vinsn); n++) | 3662 FOR_EACH_VEC_ELT (vinsn_t, *vinsn_vec, n, vinsn) |
3650 vinsn_detach (vinsn); | 3663 vinsn_detach (vinsn); |
3651 VEC_block_remove (vinsn_t, *vinsn_vec, 0, len); | 3664 VEC_block_remove (vinsn_t, *vinsn_vec, 0, len); |
3652 } | 3665 } |
3653 } | 3666 } |
3654 | 3667 |
3717 /* Adjust priority using target backend hook. */ | 3730 /* Adjust priority using target backend hook. */ |
3718 sel_target_adjust_priority (expr); | 3731 sel_target_adjust_priority (expr); |
3719 } | 3732 } |
3720 | 3733 |
3721 /* Sort the vector. */ | 3734 /* Sort the vector. */ |
3722 qsort (VEC_address (expr_t, vec_av_set), VEC_length (expr_t, vec_av_set), | 3735 VEC_qsort (expr_t, vec_av_set, sel_rank_for_schedule); |
3723 sizeof (expr_t), sel_rank_for_schedule); | |
3724 | 3736 |
3725 /* We record maximal priority of insns in av set for current instruction | 3737 /* We record maximal priority of insns in av set for current instruction |
3726 group. */ | 3738 group. */ |
3727 if (FENCE_STARTS_CYCLE_P (fence)) | 3739 if (FENCE_STARTS_CYCLE_P (fence)) |
3728 av_max_prio = est_ticks_till_branch = INT_MIN; | 3740 av_max_prio = est_ticks_till_branch = INT_MIN; |
3732 moves last element in place of one being deleted. */ | 3744 moves last element in place of one being deleted. */ |
3733 for (n = VEC_length (expr_t, vec_av_set) - 1, stalled = 0; n >= 0; n--) | 3745 for (n = VEC_length (expr_t, vec_av_set) - 1, stalled = 0; n >= 0; n--) |
3734 { | 3746 { |
3735 expr_t expr = VEC_index (expr_t, vec_av_set, n); | 3747 expr_t expr = VEC_index (expr_t, vec_av_set, n); |
3736 insn_t insn = EXPR_INSN_RTX (expr); | 3748 insn_t insn = EXPR_INSN_RTX (expr); |
3737 char target_available; | 3749 signed char target_available; |
3738 bool is_orig_reg_p = true; | 3750 bool is_orig_reg_p = true; |
3739 int need_cycles, new_prio; | 3751 int need_cycles, new_prio; |
3740 | 3752 |
3741 /* Don't allow any insns other than from SCHED_GROUP if we have one. */ | 3753 /* Don't allow any insns other than from SCHED_GROUP if we have one. */ |
3742 if (FENCE_SCHED_NEXT (fence) && insn != FENCE_SCHED_NEXT (fence)) | 3754 if (FENCE_SCHED_NEXT (fence) && insn != FENCE_SCHED_NEXT (fence)) |
3932 } | 3944 } |
3933 else | 3945 else |
3934 gcc_assert (min_need_stall == 0); | 3946 gcc_assert (min_need_stall == 0); |
3935 | 3947 |
3936 /* Sort the vector. */ | 3948 /* Sort the vector. */ |
3937 qsort (VEC_address (expr_t, vec_av_set), VEC_length (expr_t, vec_av_set), | 3949 VEC_qsort (expr_t, vec_av_set, sel_rank_for_schedule); |
3938 sizeof (expr_t), sel_rank_for_schedule); | |
3939 | 3950 |
3940 if (sched_verbose >= 4) | 3951 if (sched_verbose >= 4) |
3941 { | 3952 { |
3942 sel_print ("Total ready exprs: %d, stalled: %d\n", | 3953 sel_print ("Total ready exprs: %d, stalled: %d\n", |
3943 VEC_length (expr_t, vec_av_set), stalled); | 3954 VEC_length (expr_t, vec_av_set), stalled); |
3944 sel_print ("Sorted av set (%d): ", VEC_length (expr_t, vec_av_set)); | 3955 sel_print ("Sorted av set (%d): ", VEC_length (expr_t, vec_av_set)); |
3945 for (n = 0; VEC_iterate (expr_t, vec_av_set, n, expr); n++) | 3956 FOR_EACH_VEC_ELT (expr_t, vec_av_set, n, expr) |
3946 dump_expr (expr); | 3957 dump_expr (expr); |
3947 sel_print ("\n"); | 3958 sel_print ("\n"); |
3948 } | 3959 } |
3949 | 3960 |
3950 *pneed_stall = 0; | 3961 *pneed_stall = 0; |
3969 { | 3980 { |
3970 max_issue_size = ready.n_ready; | 3981 max_issue_size = ready.n_ready; |
3971 sched_extend_ready_list (ready.n_ready); | 3982 sched_extend_ready_list (ready.n_ready); |
3972 } | 3983 } |
3973 | 3984 |
3974 for (n = 0; VEC_iterate (expr_t, vec_av_set, n, expr); n++) | 3985 FOR_EACH_VEC_ELT (expr_t, vec_av_set, n, expr) |
3975 { | 3986 { |
3976 vinsn_t vi = EXPR_VINSN (expr); | 3987 vinsn_t vi = EXPR_VINSN (expr); |
3977 insn_t insn = VINSN_INSN_RTX (vi); | 3988 insn_t insn = VINSN_INSN_RTX (vi); |
3978 | 3989 |
3979 ready_try[n] = 0; | 3990 ready_try[n] = 0; |
4318 int can_issue = 0; | 4329 int can_issue = 0; |
4319 | 4330 |
4320 if (dfa_lookahead > 0) | 4331 if (dfa_lookahead > 0) |
4321 { | 4332 { |
4322 cycle_issued_insns = FENCE_ISSUED_INSNS (fence); | 4333 cycle_issued_insns = FENCE_ISSUED_INSNS (fence); |
4334 /* TODO: pass equivalent of first_cycle_insn_p to max_issue (). */ | |
4323 can_issue = max_issue (&ready, privileged_n, | 4335 can_issue = max_issue (&ready, privileged_n, |
4324 FENCE_STATE (fence), index); | 4336 FENCE_STATE (fence), true, index); |
4325 if (sched_verbose >= 2) | 4337 if (sched_verbose >= 2) |
4326 sel_print ("max_issue: we can issue %d insns, already did %d insns\n", | 4338 sel_print ("max_issue: we can issue %d insns, already did %d insns\n", |
4327 can_issue, FENCE_ISSUED_INSNS (fence)); | 4339 can_issue, FENCE_ISSUED_INSNS (fence)); |
4328 } | 4340 } |
4329 else | 4341 else |
4400 | 4412 |
4401 if (best != NULL) | 4413 if (best != NULL) |
4402 { | 4414 { |
4403 can_issue_more = invoke_aftermath_hooks (fence, EXPR_INSN_RTX (best), | 4415 can_issue_more = invoke_aftermath_hooks (fence, EXPR_INSN_RTX (best), |
4404 can_issue_more); | 4416 can_issue_more); |
4405 if (can_issue_more == 0) | 4417 if (targetm.sched.variable_issue |
4418 && can_issue_more == 0) | |
4406 *pneed_stall = 1; | 4419 *pneed_stall = 1; |
4407 } | 4420 } |
4408 | 4421 |
4409 if (sched_verbose >= 2) | 4422 if (sched_verbose >= 2) |
4410 { | 4423 { |
4630 | 4643 |
4631 FOR_BB_INSNS (succ, insn) | 4644 FOR_BB_INSNS (succ, insn) |
4632 if (INSN_P (insn)) | 4645 if (INSN_P (insn)) |
4633 EXPR_ORIG_BB_INDEX (INSN_EXPR (insn)) = succ->index; | 4646 EXPR_ORIG_BB_INDEX (INSN_EXPR (insn)) = succ->index; |
4634 | 4647 |
4635 if (bitmap_bit_p (code_motion_visited_blocks, new_bb->index)) | 4648 if (bitmap_clear_bit (code_motion_visited_blocks, new_bb->index)) |
4636 { | 4649 bitmap_set_bit (code_motion_visited_blocks, succ->index); |
4637 bitmap_set_bit (code_motion_visited_blocks, succ->index); | |
4638 bitmap_clear_bit (code_motion_visited_blocks, new_bb->index); | |
4639 } | |
4640 | 4650 |
4641 gcc_assert (LABEL_P (BB_HEAD (new_bb)) | 4651 gcc_assert (LABEL_P (BB_HEAD (new_bb)) |
4642 && LABEL_P (BB_HEAD (succ))); | 4652 && LABEL_P (BB_HEAD (succ))); |
4643 | 4653 |
4644 if (sched_verbose >= 4) | 4654 if (sched_verbose >= 4) |
4876 */ | 4886 */ |
4877 static void | 4887 static void |
4878 move_cond_jump (rtx insn, bnd_t bnd) | 4888 move_cond_jump (rtx insn, bnd_t bnd) |
4879 { | 4889 { |
4880 edge ft_edge; | 4890 edge ft_edge; |
4881 basic_block block_from, block_next, block_new; | 4891 basic_block block_from, block_next, block_new, block_bnd, bb; |
4882 rtx next, prev, link; | 4892 rtx next, prev, link, head; |
4883 | 4893 |
4884 /* BLOCK_FROM holds basic block of the jump. */ | |
4885 block_from = BLOCK_FOR_INSN (insn); | 4894 block_from = BLOCK_FOR_INSN (insn); |
4886 | 4895 block_bnd = BLOCK_FOR_INSN (BND_TO (bnd)); |
4887 /* Moving of jump should not cross any other jumps or | 4896 prev = BND_TO (bnd); |
4888 beginnings of new basic blocks. */ | 4897 |
4889 gcc_assert (block_from == BLOCK_FOR_INSN (BND_TO (bnd))); | 4898 #ifdef ENABLE_CHECKING |
4899 /* Moving of jump should not cross any other jumps or beginnings of new | |
4900 basic blocks. The only exception is when we move a jump through | |
4901 mutually exclusive insns along fallthru edges. */ | |
4902 if (block_from != block_bnd) | |
4903 { | |
4904 bb = block_from; | |
4905 for (link = PREV_INSN (insn); link != PREV_INSN (prev); | |
4906 link = PREV_INSN (link)) | |
4907 { | |
4908 if (INSN_P (link)) | |
4909 gcc_assert (sched_insns_conditions_mutex_p (insn, link)); | |
4910 if (BLOCK_FOR_INSN (link) && BLOCK_FOR_INSN (link) != bb) | |
4911 { | |
4912 gcc_assert (single_pred (bb) == BLOCK_FOR_INSN (link)); | |
4913 bb = BLOCK_FOR_INSN (link); | |
4914 } | |
4915 } | |
4916 } | |
4917 #endif | |
4890 | 4918 |
4891 /* Jump is moved to the boundary. */ | 4919 /* Jump is moved to the boundary. */ |
4892 prev = BND_TO (bnd); | |
4893 next = PREV_INSN (insn); | 4920 next = PREV_INSN (insn); |
4894 BND_TO (bnd) = insn; | 4921 BND_TO (bnd) = insn; |
4895 | 4922 |
4896 ft_edge = find_fallthru_edge (block_from); | 4923 ft_edge = find_fallthru_edge_from (block_from); |
4897 block_next = ft_edge->dest; | 4924 block_next = ft_edge->dest; |
4898 /* There must be a fallthrough block (or where should go | 4925 /* There must be a fallthrough block (or where should go |
4899 control flow in case of false jump predicate otherwise?). */ | 4926 control flow in case of false jump predicate otherwise?). */ |
4900 gcc_assert (block_next); | 4927 gcc_assert (block_next); |
4901 | 4928 |
4902 /* Create new empty basic block after source block. */ | 4929 /* Create new empty basic block after source block. */ |
4903 block_new = sel_split_edge (ft_edge); | 4930 block_new = sel_split_edge (ft_edge); |
4904 gcc_assert (block_new->next_bb == block_next | 4931 gcc_assert (block_new->next_bb == block_next |
4905 && block_from->next_bb == block_new); | 4932 && block_from->next_bb == block_new); |
4906 | 4933 |
4907 gcc_assert (BB_END (block_from) == insn); | 4934 /* Move all instructions except INSN to BLOCK_NEW. */ |
4908 | 4935 bb = block_bnd; |
4909 /* Move all instructions except INSN from BLOCK_FROM to | 4936 head = BB_HEAD (block_new); |
4910 BLOCK_NEW. */ | 4937 while (bb != block_from->next_bb) |
4911 for (link = prev; link != insn; link = NEXT_INSN (link)) | 4938 { |
4912 { | 4939 rtx from, to; |
4913 EXPR_ORIG_BB_INDEX (INSN_EXPR (link)) = block_new->index; | 4940 from = bb == block_bnd ? prev : sel_bb_head (bb); |
4914 df_insn_change_bb (link, block_new); | 4941 to = bb == block_from ? next : sel_bb_end (bb); |
4915 } | 4942 |
4916 | 4943 /* The jump being moved can be the first insn in the block. |
4917 /* Set correct basic block and instructions properties. */ | 4944 In this case we don't have to move anything in this block. */ |
4918 BB_END (block_new) = PREV_INSN (insn); | 4945 if (NEXT_INSN (to) != from) |
4919 | 4946 { |
4920 NEXT_INSN (PREV_INSN (prev)) = insn; | 4947 reorder_insns (from, to, head); |
4921 PREV_INSN (insn) = PREV_INSN (prev); | 4948 |
4949 for (link = to; link != head; link = PREV_INSN (link)) | |
4950 EXPR_ORIG_BB_INDEX (INSN_EXPR (link)) = block_new->index; | |
4951 head = to; | |
4952 } | |
4953 | |
4954 /* Cleanup possibly empty blocks left. */ | |
4955 block_next = bb->next_bb; | |
4956 if (bb != block_from) | |
4957 tidy_control_flow (bb, false); | |
4958 bb = block_next; | |
4959 } | |
4922 | 4960 |
4923 /* Assert there is no jump to BLOCK_NEW, only fallthrough edge. */ | 4961 /* Assert there is no jump to BLOCK_NEW, only fallthrough edge. */ |
4924 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (BB_HEAD (block_new))); | 4962 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (BB_HEAD (block_new))); |
4925 PREV_INSN (prev) = BB_HEAD (block_new); | |
4926 NEXT_INSN (next) = NEXT_INSN (BB_HEAD (block_new)); | |
4927 NEXT_INSN (BB_HEAD (block_new)) = prev; | |
4928 PREV_INSN (NEXT_INSN (next)) = next; | |
4929 | 4963 |
4930 gcc_assert (!sel_bb_empty_p (block_from) | 4964 gcc_assert (!sel_bb_empty_p (block_from) |
4931 && !sel_bb_empty_p (block_new)); | 4965 && !sel_bb_empty_p (block_new)); |
4932 | 4966 |
4933 /* Update data sets for BLOCK_NEW to represent that INSN and | 4967 /* Update data sets for BLOCK_NEW to represent that INSN and |
4952 remove_temp_moveop_nops (bool full_tidying) | 4986 remove_temp_moveop_nops (bool full_tidying) |
4953 { | 4987 { |
4954 int i; | 4988 int i; |
4955 insn_t insn; | 4989 insn_t insn; |
4956 | 4990 |
4957 for (i = 0; VEC_iterate (insn_t, vec_temp_moveop_nops, i, insn); i++) | 4991 FOR_EACH_VEC_ELT (insn_t, vec_temp_moveop_nops, i, insn) |
4958 { | 4992 { |
4959 gcc_assert (INSN_NOP_P (insn)); | 4993 gcc_assert (INSN_NOP_P (insn)); |
4960 return_nop_to_pool (insn, full_tidying); | 4994 return_nop_to_pool (insn, full_tidying); |
4961 } | 4995 } |
4962 | 4996 |
5487 do | 5521 do |
5488 { | 5522 { |
5489 blist_t *bnds_tailp1, *bndsp; | 5523 blist_t *bnds_tailp1, *bndsp; |
5490 expr_t expr_vliw; | 5524 expr_t expr_vliw; |
5491 int need_stall; | 5525 int need_stall; |
5492 int was_stall = 0, scheduled_insns = 0, stall_iterations = 0; | 5526 int was_stall = 0, scheduled_insns = 0; |
5493 int max_insns = pipelining_p ? issue_rate : 2 * issue_rate; | 5527 int max_insns = pipelining_p ? issue_rate : 2 * issue_rate; |
5494 int max_stall = pipelining_p ? 1 : 3; | 5528 int max_stall = pipelining_p ? 1 : 3; |
5495 bool last_insn_was_debug = false; | 5529 bool last_insn_was_debug = false; |
5496 bool was_debug_bb_end_p = false; | 5530 bool was_debug_bb_end_p = false; |
5497 | 5531 |
5506 /* Choose the best expression and, if needed, destination register | 5540 /* Choose the best expression and, if needed, destination register |
5507 for it. */ | 5541 for it. */ |
5508 do | 5542 do |
5509 { | 5543 { |
5510 expr_vliw = find_best_expr (&av_vliw, bnds, fence, &need_stall); | 5544 expr_vliw = find_best_expr (&av_vliw, bnds, fence, &need_stall); |
5511 if (!expr_vliw && need_stall) | 5545 if (! expr_vliw && need_stall) |
5512 { | 5546 { |
5513 /* All expressions required a stall. Do not recompute av sets | 5547 /* All expressions required a stall. Do not recompute av sets |
5514 as we'll get the same answer (modulo the insns between | 5548 as we'll get the same answer (modulo the insns between |
5515 the fence and its boundary, which will not be available for | 5549 the fence and its boundary, which will not be available for |
5516 pipelining). */ | 5550 pipelining). |
5517 gcc_assert (! expr_vliw && stall_iterations < 2); | 5551 If we are going to stall for too long, break to recompute av |
5552 sets and bring more insns for pipelining. */ | |
5518 was_stall++; | 5553 was_stall++; |
5519 /* If we are going to stall for too long, break to recompute av | |
5520 sets and bring more insns for pipelining. */ | |
5521 if (need_stall <= 3) | 5554 if (need_stall <= 3) |
5522 stall_for_cycles (fence, need_stall); | 5555 stall_for_cycles (fence, need_stall); |
5523 else | 5556 else |
5524 { | 5557 { |
5525 stall_for_cycles (fence, 1); | 5558 stall_for_cycles (fence, 1); |
5783 { | 5816 { |
5784 /* Even if this insn can be a copy that will be removed during current move_op, | 5817 /* Even if this insn can be a copy that will be removed during current move_op, |
5785 we still need to count it as an originator. */ | 5818 we still need to count it as an originator. */ |
5786 bitmap_set_bit (current_originators, INSN_UID (insn)); | 5819 bitmap_set_bit (current_originators, INSN_UID (insn)); |
5787 | 5820 |
5788 if (!bitmap_bit_p (current_copies, INSN_UID (insn))) | 5821 if (!bitmap_clear_bit (current_copies, INSN_UID (insn))) |
5789 { | 5822 { |
5790 /* Note that original block needs to be rescheduled, as we pulled an | 5823 /* Note that original block needs to be rescheduled, as we pulled an |
5791 instruction out of it. */ | 5824 instruction out of it. */ |
5792 if (INSN_SCHED_TIMES (insn) > 0) | 5825 if (INSN_SCHED_TIMES (insn) > 0) |
5793 bitmap_set_bit (blocks_to_reschedule, BLOCK_FOR_INSN (insn)->index); | 5826 bitmap_set_bit (blocks_to_reschedule, BLOCK_FOR_INSN (insn)->index); |
5794 else if (INSN_UID (insn) < first_emitted_uid && !DEBUG_INSN_P (insn)) | 5827 else if (INSN_UID (insn) < first_emitted_uid && !DEBUG_INSN_P (insn)) |
5795 num_insns_scheduled++; | 5828 num_insns_scheduled++; |
5796 } | 5829 } |
5797 else | |
5798 bitmap_clear_bit (current_copies, INSN_UID (insn)); | |
5799 | 5830 |
5800 /* For instructions we must immediately remove insn from the | 5831 /* For instructions we must immediately remove insn from the |
5801 stream, so subsequent update_data_sets () won't include this | 5832 stream, so subsequent update_data_sets () won't include this |
5802 insn into av_set. | 5833 insn into av_set. |
5803 For expr we must make insn look like "INSN_REG (insn) := c_expr". */ | 5834 For expr we must make insn look like "INSN_REG (insn) := c_expr". */ |
6348 not found below. In most cases, this situation is an error. | 6379 not found below. In most cases, this situation is an error. |
6349 The exception is when the original operation is blocked by | 6380 The exception is when the original operation is blocked by |
6350 bookkeeping generated for another fence or for another path in current | 6381 bookkeeping generated for another fence or for another path in current |
6351 move_op. */ | 6382 move_op. */ |
6352 gcc_assert (res == 1 | 6383 gcc_assert (res == 1 |
6353 || (res == 0 | 6384 || (res == 0 |
6354 && av_set_could_be_blocked_by_bookkeeping_p (orig_ops, | 6385 && av_set_could_be_blocked_by_bookkeeping_p (orig_ops, |
6355 static_params)) | 6386 static_params)) |
6356 || res == -1); | 6387 || res == -1); |
6357 #endif | 6388 #endif |
6358 | 6389 |
6359 /* Merge data, clean up, etc. */ | 6390 /* Merge data, clean up, etc. */ |
6360 if (res != -1 && code_motion_path_driver_info->after_merge_succs) | 6391 if (res != -1 && code_motion_path_driver_info->after_merge_succs) |
6361 code_motion_path_driver_info->after_merge_succs (&lparams, static_params); | 6392 code_motion_path_driver_info->after_merge_succs (&lparams, static_params); |
6690 { | 6721 { |
6691 gcc_assert (succ_bbi > bbi); | 6722 gcc_assert (succ_bbi > bbi); |
6692 | 6723 |
6693 init_seqno_1 (succ, visited_bbs, blocks_to_reschedule); | 6724 init_seqno_1 (succ, visited_bbs, blocks_to_reschedule); |
6694 } | 6725 } |
6726 else if (blocks_to_reschedule) | |
6727 bitmap_set_bit (forced_ebb_heads, succ->index); | |
6695 } | 6728 } |
6696 | 6729 |
6697 for (insn = BB_END (bb); insn != note; insn = PREV_INSN (insn)) | 6730 for (insn = BB_END (bb); insn != note; insn = PREV_INSN (insn)) |
6698 INSN_SEQNO (insn) = cur_seqno--; | 6731 INSN_SEQNO (insn) = cur_seqno--; |
6699 } | 6732 } |
6860 } | 6893 } |
6861 | 6894 |
6862 /* Set hooks so that no newly generated insn will go out unnoticed. */ | 6895 /* Set hooks so that no newly generated insn will go out unnoticed. */ |
6863 sel_register_cfg_hooks (); | 6896 sel_register_cfg_hooks (); |
6864 | 6897 |
6865 /* !!! We call target.sched.md_init () for the whole region, but we invoke | 6898 /* !!! We call target.sched.init () for the whole region, but we invoke |
6866 targetm.sched.md_finish () for every ebb. */ | 6899 targetm.sched.finish () for every ebb. */ |
6867 if (targetm.sched.md_init) | 6900 if (targetm.sched.init) |
6868 /* None of the arguments are actually used in any target. */ | 6901 /* None of the arguments are actually used in any target. */ |
6869 targetm.sched.md_init (sched_dump, sched_verbose, -1); | 6902 targetm.sched.init (sched_dump, sched_verbose, -1); |
6870 | 6903 |
6871 first_emitted_uid = get_max_uid () + 1; | 6904 first_emitted_uid = get_max_uid () + 1; |
6872 preheader_removed = false; | 6905 preheader_removed = false; |
6873 | 6906 |
6874 /* Reset register allocation ticks array. */ | 6907 /* Reset register allocation ticks array. */ |
6944 reset_sched_cycles_in_current_ebb (void) | 6977 reset_sched_cycles_in_current_ebb (void) |
6945 { | 6978 { |
6946 int last_clock = 0; | 6979 int last_clock = 0; |
6947 int haifa_last_clock = -1; | 6980 int haifa_last_clock = -1; |
6948 int haifa_clock = 0; | 6981 int haifa_clock = 0; |
6982 int issued_insns = 0; | |
6949 insn_t insn; | 6983 insn_t insn; |
6950 | 6984 |
6951 if (targetm.sched.md_init) | 6985 if (targetm.sched.init) |
6952 { | 6986 { |
6953 /* None of the arguments are actually used in any target. | 6987 /* None of the arguments are actually used in any target. |
6954 NB: We should have md_reset () hook for cases like this. */ | 6988 NB: We should have md_reset () hook for cases like this. */ |
6955 targetm.sched.md_init (sched_dump, sched_verbose, -1); | 6989 targetm.sched.init (sched_dump, sched_verbose, -1); |
6956 } | 6990 } |
6957 | 6991 |
6958 state_reset (curr_state); | 6992 state_reset (curr_state); |
6959 advance_state (curr_state); | 6993 advance_state (curr_state); |
6960 | 6994 |
6962 insn != current_sched_info->next_tail; | 6996 insn != current_sched_info->next_tail; |
6963 insn = NEXT_INSN (insn)) | 6997 insn = NEXT_INSN (insn)) |
6964 { | 6998 { |
6965 int cost, haifa_cost; | 6999 int cost, haifa_cost; |
6966 int sort_p; | 7000 int sort_p; |
6967 bool asm_p, real_insn, after_stall; | 7001 bool asm_p, real_insn, after_stall, all_issued; |
6968 int clock; | 7002 int clock; |
6969 | 7003 |
6970 if (!INSN_P (insn)) | 7004 if (!INSN_P (insn)) |
6971 continue; | 7005 continue; |
6972 | 7006 |
6998 if (INSN_AFTER_STALL_P (insn) && cost > haifa_cost) | 7032 if (INSN_AFTER_STALL_P (insn) && cost > haifa_cost) |
6999 { | 7033 { |
7000 haifa_cost = cost; | 7034 haifa_cost = cost; |
7001 after_stall = 1; | 7035 after_stall = 1; |
7002 } | 7036 } |
7003 | 7037 all_issued = issued_insns == issue_rate; |
7038 if (haifa_cost == 0 && all_issued) | |
7039 haifa_cost = 1; | |
7004 if (haifa_cost > 0) | 7040 if (haifa_cost > 0) |
7005 { | 7041 { |
7006 int i = 0; | 7042 int i = 0; |
7007 | 7043 |
7008 while (haifa_cost--) | 7044 while (haifa_cost--) |
7009 { | 7045 { |
7010 advance_state (curr_state); | 7046 advance_state (curr_state); |
7047 issued_insns = 0; | |
7011 i++; | 7048 i++; |
7012 | 7049 |
7013 if (sched_verbose >= 2) | 7050 if (sched_verbose >= 2) |
7014 { | 7051 { |
7015 sel_print ("advance_state (state_transition)\n"); | 7052 sel_print ("advance_state (state_transition)\n"); |
7022 if (!after_stall | 7059 if (!after_stall |
7023 && real_insn | 7060 && real_insn |
7024 && haifa_cost > 0 | 7061 && haifa_cost > 0 |
7025 && estimate_insn_cost (insn, curr_state) == 0) | 7062 && estimate_insn_cost (insn, curr_state) == 0) |
7026 break; | 7063 break; |
7027 } | 7064 |
7065 /* When the data dependency stall is longer than the DFA stall, | |
7066 and when we have issued exactly issue_rate insns and stalled, | |
7067 it could be that after this longer stall the insn will again | |
7068 become unavailable to the DFA restrictions. Looks strange | |
7069 but happens e.g. on x86-64. So recheck DFA on the last | |
7070 iteration. */ | |
7071 if ((after_stall || all_issued) | |
7072 && real_insn | |
7073 && haifa_cost == 0) | |
7074 haifa_cost = estimate_insn_cost (insn, curr_state); | |
7075 } | |
7028 | 7076 |
7029 haifa_clock += i; | 7077 haifa_clock += i; |
7078 if (sched_verbose >= 2) | |
7079 sel_print ("haifa clock: %d\n", haifa_clock); | |
7030 } | 7080 } |
7031 else | 7081 else |
7032 gcc_assert (haifa_cost == 0); | 7082 gcc_assert (haifa_cost == 0); |
7033 | 7083 |
7034 if (sched_verbose >= 2) | 7084 if (sched_verbose >= 2) |
7038 while (targetm.sched.dfa_new_cycle (sched_dump, sched_verbose, insn, | 7088 while (targetm.sched.dfa_new_cycle (sched_dump, sched_verbose, insn, |
7039 haifa_last_clock, haifa_clock, | 7089 haifa_last_clock, haifa_clock, |
7040 &sort_p)) | 7090 &sort_p)) |
7041 { | 7091 { |
7042 advance_state (curr_state); | 7092 advance_state (curr_state); |
7093 issued_insns = 0; | |
7043 haifa_clock++; | 7094 haifa_clock++; |
7044 if (sched_verbose >= 2) | 7095 if (sched_verbose >= 2) |
7045 { | 7096 { |
7046 sel_print ("advance_state (dfa_new_cycle)\n"); | 7097 sel_print ("advance_state (dfa_new_cycle)\n"); |
7047 debug_state (curr_state); | 7098 debug_state (curr_state); |
7099 sel_print ("haifa clock: %d\n", haifa_clock + 1); | |
7048 } | 7100 } |
7049 } | 7101 } |
7050 | 7102 |
7051 if (real_insn) | 7103 if (real_insn) |
7052 { | 7104 { |
7053 cost = state_transition (curr_state, insn); | 7105 cost = state_transition (curr_state, insn); |
7106 issued_insns++; | |
7054 | 7107 |
7055 if (sched_verbose >= 2) | 7108 if (sched_verbose >= 2) |
7056 debug_state (curr_state); | 7109 { |
7057 | 7110 sel_print ("scheduled insn %d, clock %d\n", INSN_UID (insn), |
7111 haifa_clock + 1); | |
7112 debug_state (curr_state); | |
7113 } | |
7058 gcc_assert (cost < 0); | 7114 gcc_assert (cost < 0); |
7059 } | 7115 } |
7060 | 7116 |
7061 if (targetm.sched.variable_issue) | 7117 if (targetm.sched.variable_issue) |
7062 targetm.sched.variable_issue (sched_dump, sched_verbose, insn, 0); | 7118 targetm.sched.variable_issue (sched_dump, sched_verbose, insn, 0); |
7128 continue; | 7184 continue; |
7129 | 7185 |
7130 if (reset_sched_cycles_p) | 7186 if (reset_sched_cycles_p) |
7131 reset_sched_cycles_in_current_ebb (); | 7187 reset_sched_cycles_in_current_ebb (); |
7132 | 7188 |
7133 if (targetm.sched.md_init) | 7189 if (targetm.sched.init) |
7134 targetm.sched.md_init (sched_dump, sched_verbose, -1); | 7190 targetm.sched.init (sched_dump, sched_verbose, -1); |
7135 | 7191 |
7136 put_TImodes (); | 7192 put_TImodes (); |
7137 | 7193 |
7138 if (targetm.sched.md_finish) | 7194 if (targetm.sched.finish) |
7139 { | 7195 { |
7140 targetm.sched.md_finish (sched_dump, sched_verbose); | 7196 targetm.sched.finish (sched_dump, sched_verbose); |
7141 | 7197 |
7142 /* Extend luids so that insns generated by the target will | 7198 /* Extend luids so that insns generated by the target will |
7143 get zero luid. */ | 7199 get zero luid. */ |
7144 sched_init_luids (NULL, NULL, NULL, NULL); | 7200 sched_init_luids (NULL, NULL, NULL, NULL); |
7145 } | 7201 } |
7465 | 7521 |
7466 for (i = 0; i < current_nr_blocks; i++) | 7522 for (i = 0; i < current_nr_blocks; i++) |
7467 { | 7523 { |
7468 basic_block bb = EBB_FIRST_BB (i); | 7524 basic_block bb = EBB_FIRST_BB (i); |
7469 | 7525 |
7470 if (sel_bb_empty_p (bb)) | |
7471 { | |
7472 bitmap_clear_bit (blocks_to_reschedule, bb->index); | |
7473 continue; | |
7474 } | |
7475 | |
7476 if (bitmap_bit_p (blocks_to_reschedule, bb->index)) | 7526 if (bitmap_bit_p (blocks_to_reschedule, bb->index)) |
7477 { | 7527 { |
7528 if (! bb_ends_ebb_p (bb)) | |
7529 bitmap_set_bit (blocks_to_reschedule, bb_next_bb (bb)->index); | |
7530 if (sel_bb_empty_p (bb)) | |
7531 { | |
7532 bitmap_clear_bit (blocks_to_reschedule, bb->index); | |
7533 continue; | |
7534 } | |
7478 clear_outdated_rtx_info (bb); | 7535 clear_outdated_rtx_info (bb); |
7479 if (sel_insn_is_speculation_check (BB_END (bb)) | 7536 if (sel_insn_is_speculation_check (BB_END (bb)) |
7480 && JUMP_P (BB_END (bb))) | 7537 && JUMP_P (BB_END (bb))) |
7481 bitmap_set_bit (blocks_to_reschedule, | 7538 bitmap_set_bit (blocks_to_reschedule, |
7482 BRANCH_EDGE (bb)->dest->index); | 7539 BRANCH_EDGE (bb)->dest->index); |
7483 } | 7540 } |
7484 else if (INSN_SCHED_TIMES (sel_bb_head (bb)) <= 0) | 7541 else if (! sel_bb_empty_p (bb) |
7542 && INSN_SCHED_TIMES (sel_bb_head (bb)) <= 0) | |
7485 bitmap_set_bit (blocks_to_reschedule, bb->index); | 7543 bitmap_set_bit (blocks_to_reschedule, bb->index); |
7486 } | 7544 } |
7487 | 7545 |
7488 for (i = 0; i < current_nr_blocks; i++) | 7546 for (i = 0; i < current_nr_blocks; i++) |
7489 { | 7547 { |
7505 orig_max_seqno = init_seqno (0, blocks_to_reschedule, bb); | 7563 orig_max_seqno = init_seqno (0, blocks_to_reschedule, bb); |
7506 | 7564 |
7507 /* Mark BB as head of the new ebb. */ | 7565 /* Mark BB as head of the new ebb. */ |
7508 bitmap_set_bit (forced_ebb_heads, bb->index); | 7566 bitmap_set_bit (forced_ebb_heads, bb->index); |
7509 | 7567 |
7510 bitmap_clear_bit (blocks_to_reschedule, bb->index); | |
7511 | |
7512 gcc_assert (fences == NULL); | 7568 gcc_assert (fences == NULL); |
7513 | 7569 |
7514 init_fences (bb_note (bb)); | 7570 init_fences (bb_note (bb)); |
7515 | 7571 |
7516 sel_sched_region_2 (orig_max_seqno); | 7572 sel_sched_region_2 (orig_max_seqno); |