comparison gcc/sel-sched-ir.c @ 16:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
15:561a7518be6b 16:04ced10e8804
1 /* Instruction scheduling pass. Selective scheduler and pipeliner. 1 /* Instruction scheduling pass. Selective scheduler and pipeliner.
2 Copyright (C) 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc. 2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
18 <http://www.gnu.org/licenses/>. */ 18 <http://www.gnu.org/licenses/>. */
19 19
20 #include "config.h" 20 #include "config.h"
21 #include "system.h" 21 #include "system.h"
22 #include "coretypes.h" 22 #include "coretypes.h"
23 #include "tm.h" 23 #include "backend.h"
24 #include "diagnostic-core.h" 24 #include "cfghooks.h"
25 #include "tree.h"
25 #include "rtl.h" 26 #include "rtl.h"
27 #include "df.h"
28 #include "memmodel.h"
26 #include "tm_p.h" 29 #include "tm_p.h"
27 #include "hard-reg-set.h" 30 #include "cfgrtl.h"
28 #include "regs.h" 31 #include "cfganal.h"
29 #include "function.h" 32 #include "cfgbuild.h"
30 #include "flags.h"
31 #include "insn-config.h" 33 #include "insn-config.h"
32 #include "insn-attr.h" 34 #include "insn-attr.h"
33 #include "except.h"
34 #include "recog.h" 35 #include "recog.h"
35 #include "params.h" 36 #include "params.h"
36 #include "target.h" 37 #include "target.h"
37 #include "timevar.h"
38 #include "tree-pass.h"
39 #include "sched-int.h" 38 #include "sched-int.h"
40 #include "ggc.h"
41 #include "tree.h"
42 #include "vec.h"
43 #include "langhooks.h"
44 #include "rtlhooks-def.h"
45 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */ 39 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
46 40
47 #ifdef INSN_SCHEDULING 41 #ifdef INSN_SCHEDULING
42 #include "regset.h"
43 #include "cfgloop.h"
48 #include "sel-sched-ir.h" 44 #include "sel-sched-ir.h"
49 /* We don't have to use it except for sel_print_insn. */ 45 /* We don't have to use it except for sel_print_insn. */
50 #include "sel-sched-dump.h" 46 #include "sel-sched-dump.h"
51 47
52 /* A vector holding bb info for whole scheduling pass. */ 48 /* A vector holding bb info for whole scheduling pass. */
53 VEC(sel_global_bb_info_def, heap) *sel_global_bb_info = NULL; 49 vec<sel_global_bb_info_def> sel_global_bb_info;
54 50
55 /* A vector holding bb info. */ 51 /* A vector holding bb info. */
56 VEC(sel_region_bb_info_def, heap) *sel_region_bb_info = NULL; 52 vec<sel_region_bb_info_def> sel_region_bb_info;
57 53
58 /* A pool for allocating all lists. */ 54 /* A pool for allocating all lists. */
59 alloc_pool sched_lists_pool; 55 object_allocator<_list_node> sched_lists_pool ("sel-sched-lists");
60 56
61 /* This contains information about successors for compute_av_set. */ 57 /* This contains information about successors for compute_av_set. */
62 struct succs_info current_succs; 58 struct succs_info current_succs;
63 59
64 /* Data structure to describe interaction with the generic scheduler utils. */ 60 /* Data structure to describe interaction with the generic scheduler utils. */
67 /* The loop nest being pipelined. */ 63 /* The loop nest being pipelined. */
68 struct loop *current_loop_nest; 64 struct loop *current_loop_nest;
69 65
70 /* LOOP_NESTS is a vector containing the corresponding loop nest for 66 /* LOOP_NESTS is a vector containing the corresponding loop nest for
71 each region. */ 67 each region. */
72 static VEC(loop_p, heap) *loop_nests = NULL; 68 static vec<loop_p> loop_nests;
73 69
74 /* Saves blocks already in loop regions, indexed by bb->index. */ 70 /* Saves blocks already in loop regions, indexed by bb->index. */
75 static sbitmap bbs_in_loop_rgns = NULL; 71 static sbitmap bbs_in_loop_rgns = NULL;
76 72
77 /* CFG hooks that are saved before changing create_basic_block hook. */ 73 /* CFG hooks that are saved before changing create_basic_block hook. */
124 /* Its size. */ 120 /* Its size. */
125 int s; 121 int s;
126 } nop_pool = { NULL, 0, 0 }; 122 } nop_pool = { NULL, 0, 0 };
127 123
128 /* The pool for basic block notes. */ 124 /* The pool for basic block notes. */
129 static rtx_vec_t bb_note_pool; 125 static vec<rtx_note *> bb_note_pool;
130 126
131 /* A NOP pattern used to emit placeholder insns. */ 127 /* A NOP pattern used to emit placeholder insns. */
132 rtx nop_pattern = NULL_RTX; 128 rtx nop_pattern = NULL_RTX;
133 /* A special instruction that resides in EXIT_BLOCK. 129 /* A special instruction that resides in EXIT_BLOCK.
134 EXIT_INSN is successor of the insns that lead to EXIT_BLOCK. */ 130 EXIT_INSN is successor of the insns that lead to EXIT_BLOCK. */
135 rtx exit_insn = NULL_RTX; 131 rtx_insn *exit_insn = NULL;
136 132
137 /* TRUE if while scheduling current region, which is loop, its preheader 133 /* TRUE if while scheduling current region, which is loop, its preheader
138 was removed. */ 134 was removed. */
139 bool preheader_removed = false; 135 bool preheader_removed = false;
140 136
146 static void init_id_from_df (idata_t, insn_t, bool); 142 static void init_id_from_df (idata_t, insn_t, bool);
147 static expr_t set_insn_init (expr_t, vinsn_t, int); 143 static expr_t set_insn_init (expr_t, vinsn_t, int);
148 144
149 static void cfg_preds (basic_block, insn_t **, int *); 145 static void cfg_preds (basic_block, insn_t **, int *);
150 static void prepare_insn_expr (insn_t, int); 146 static void prepare_insn_expr (insn_t, int);
151 static void free_history_vect (VEC (expr_history_def, heap) **); 147 static void free_history_vect (vec<expr_history_def> &);
152 148
153 static void move_bb_info (basic_block, basic_block); 149 static void move_bb_info (basic_block, basic_block);
154 static void remove_empty_bb (basic_block, bool); 150 static void remove_empty_bb (basic_block, bool);
155 static void sel_merge_blocks (basic_block, basic_block); 151 static void sel_merge_blocks (basic_block, basic_block);
156 static void sel_remove_loop_preheader (void); 152 static void sel_remove_loop_preheader (void);
160 static void create_initial_data_sets (basic_block); 156 static void create_initial_data_sets (basic_block);
161 157
162 static void free_av_set (basic_block); 158 static void free_av_set (basic_block);
163 static void invalidate_av_set (basic_block); 159 static void invalidate_av_set (basic_block);
164 static void extend_insn_data (void); 160 static void extend_insn_data (void);
165 static void sel_init_new_insn (insn_t, int); 161 static void sel_init_new_insn (insn_t, int, int = -1);
166 static void finish_insns (void); 162 static void finish_insns (void);
167 163
168 /* Various list functions. */ 164 /* Various list functions. */
169 165
170 /* Copy an instruction list L. */ 166 /* Copy an instruction list L. */
260 } 256 }
261 257
262 /* Add new fence consisting of INSN and STATE to the list pointed to by LP. */ 258 /* Add new fence consisting of INSN and STATE to the list pointed to by LP. */
263 static void 259 static void
264 flist_add (flist_t *lp, insn_t insn, state_t state, deps_t dc, void *tc, 260 flist_add (flist_t *lp, insn_t insn, state_t state, deps_t dc, void *tc,
265 insn_t last_scheduled_insn, VEC(rtx,gc) *executing_insns, 261 insn_t last_scheduled_insn, vec<rtx_insn *, va_gc> *executing_insns,
266 int *ready_ticks, int ready_ticks_size, insn_t sched_next, 262 int *ready_ticks, int ready_ticks_size, insn_t sched_next,
267 int cycle, int cycle_issued_insns, int issue_more, 263 int cycle, int cycle_issued_insns, int issue_more,
268 bool starts_cycle_p, bool after_stall_p) 264 bool starts_cycle_p, bool after_stall_p)
269 { 265 {
270 fence_t f; 266 fence_t f;
578 ilist_clear (&FENCE_BNDS (f)); 574 ilist_clear (&FENCE_BNDS (f));
579 575
580 gcc_assert ((s != NULL && dc != NULL && tc != NULL) 576 gcc_assert ((s != NULL && dc != NULL && tc != NULL)
581 || (s == NULL && dc == NULL && tc == NULL)); 577 || (s == NULL && dc == NULL && tc == NULL));
582 578
583 if (s != NULL) 579 free (s);
584 free (s);
585 580
586 if (dc != NULL) 581 if (dc != NULL)
587 delete_deps_context (dc); 582 delete_deps_context (dc);
588 583
589 if (tc != NULL) 584 if (tc != NULL)
590 delete_target_context (tc); 585 delete_target_context (tc);
591 VEC_free (rtx, gc, FENCE_EXECUTING_INSNS (f)); 586 vec_free (FENCE_EXECUTING_INSNS (f));
592 free (FENCE_READY_TICKS (f)); 587 free (FENCE_READY_TICKS (f));
593 FENCE_READY_TICKS (f) = NULL; 588 FENCE_READY_TICKS (f) = NULL;
594 } 589 }
595 590
596 /* Init a list of fences with successors of OLD_FENCE. */ 591 /* Init a list of fences with successors of OLD_FENCE. */
613 608
614 flist_add (&fences, succ, 609 flist_add (&fences, succ,
615 state_create (), 610 state_create (),
616 create_deps_context () /* dc */, 611 create_deps_context () /* dc */,
617 create_target_context (true) /* tc */, 612 create_target_context (true) /* tc */,
618 NULL_RTX /* last_scheduled_insn */, 613 NULL /* last_scheduled_insn */,
619 NULL, /* executing_insns */ 614 NULL, /* executing_insns */
620 XCNEWVEC (int, ready_ticks_size), /* ready_ticks */ 615 XCNEWVEC (int, ready_ticks_size), /* ready_ticks */
621 ready_ticks_size, 616 ready_ticks_size,
622 NULL_RTX /* sched_next */, 617 NULL /* sched_next */,
623 1 /* cycle */, 0 /* cycle_issued_insns */, 618 1 /* cycle */, 0 /* cycle_issued_insns */,
624 issue_rate, /* issue_more */ 619 issue_rate, /* issue_more */
625 1 /* starts_cycle_p */, 0 /* after_stall_p */); 620 1 /* starts_cycle_p */, 0 /* after_stall_p */);
626 } 621 }
627 } 622 }
636 READY_TICKS, READY_TICKS_SIZE, SCHED_NEXT, CYCLE, ISSUE_MORE 631 READY_TICKS, READY_TICKS_SIZE, SCHED_NEXT, CYCLE, ISSUE_MORE
637 and AFTER_STALL_P are the corresponding fields of the second fence. */ 632 and AFTER_STALL_P are the corresponding fields of the second fence. */
638 static void 633 static void
639 merge_fences (fence_t f, insn_t insn, 634 merge_fences (fence_t f, insn_t insn,
640 state_t state, deps_t dc, void *tc, 635 state_t state, deps_t dc, void *tc,
641 rtx last_scheduled_insn, VEC(rtx, gc) *executing_insns, 636 rtx_insn *last_scheduled_insn,
637 vec<rtx_insn *, va_gc> *executing_insns,
642 int *ready_ticks, int ready_ticks_size, 638 int *ready_ticks, int ready_ticks_size,
643 rtx sched_next, int cycle, int issue_more, bool after_stall_p) 639 rtx sched_next, int cycle, int issue_more, bool after_stall_p)
644 { 640 {
645 insn_t last_scheduled_insn_old = FENCE_LAST_SCHEDULED_INSN (f); 641 insn_t last_scheduled_insn_old = FENCE_LAST_SCHEDULED_INSN (f);
646 642
669 if (cycle > FENCE_CYCLE (f)) 665 if (cycle > FENCE_CYCLE (f))
670 FENCE_CYCLE (f) = cycle; 666 FENCE_CYCLE (f) = cycle;
671 667
672 FENCE_LAST_SCHEDULED_INSN (f) = NULL; 668 FENCE_LAST_SCHEDULED_INSN (f) = NULL;
673 FENCE_ISSUE_MORE (f) = issue_rate; 669 FENCE_ISSUE_MORE (f) = issue_rate;
674 VEC_free (rtx, gc, executing_insns); 670 vec_free (executing_insns);
675 free (ready_ticks); 671 free (ready_ticks);
676 if (FENCE_EXECUTING_INSNS (f)) 672 if (FENCE_EXECUTING_INSNS (f))
677 VEC_block_remove (rtx, FENCE_EXECUTING_INSNS (f), 0, 673 FENCE_EXECUTING_INSNS (f)->block_remove (0,
678 VEC_length (rtx, FENCE_EXECUTING_INSNS (f))); 674 FENCE_EXECUTING_INSNS (f)->length ());
679 if (FENCE_READY_TICKS (f)) 675 if (FENCE_READY_TICKS (f))
680 memset (FENCE_READY_TICKS (f), 0, FENCE_READY_TICKS_SIZE (f)); 676 memset (FENCE_READY_TICKS (f), 0, FENCE_READY_TICKS_SIZE (f));
681 } 677 }
682 else 678 else
683 { 679 {
757 /* Check if we can choose most probable predecessor. */ 753 /* Check if we can choose most probable predecessor. */
758 if (edge_old == NULL || edge_new == NULL) 754 if (edge_old == NULL || edge_new == NULL)
759 { 755 {
760 reset_deps_context (FENCE_DC (f)); 756 reset_deps_context (FENCE_DC (f));
761 delete_deps_context (dc); 757 delete_deps_context (dc);
762 VEC_free (rtx, gc, executing_insns); 758 vec_free (executing_insns);
763 free (ready_ticks); 759 free (ready_ticks);
764 760
765 FENCE_CYCLE (f) = MAX (FENCE_CYCLE (f), cycle); 761 FENCE_CYCLE (f) = MAX (FENCE_CYCLE (f), cycle);
766 if (FENCE_EXECUTING_INSNS (f)) 762 if (FENCE_EXECUTING_INSNS (f))
767 VEC_block_remove (rtx, FENCE_EXECUTING_INSNS (f), 0, 763 FENCE_EXECUTING_INSNS (f)->block_remove (0,
768 VEC_length (rtx, FENCE_EXECUTING_INSNS (f))); 764 FENCE_EXECUTING_INSNS (f)->length ());
769 if (FENCE_READY_TICKS (f)) 765 if (FENCE_READY_TICKS (f))
770 memset (FENCE_READY_TICKS (f), 0, FENCE_READY_TICKS_SIZE (f)); 766 memset (FENCE_READY_TICKS (f), 0, FENCE_READY_TICKS_SIZE (f));
771 } 767 }
772 else 768 else
773 if (edge_new->probability > edge_old->probability) 769 if (edge_new->probability > edge_old->probability)
774 { 770 {
775 delete_deps_context (FENCE_DC (f)); 771 delete_deps_context (FENCE_DC (f));
776 FENCE_DC (f) = dc; 772 FENCE_DC (f) = dc;
777 VEC_free (rtx, gc, FENCE_EXECUTING_INSNS (f)); 773 vec_free (FENCE_EXECUTING_INSNS (f));
778 FENCE_EXECUTING_INSNS (f) = executing_insns; 774 FENCE_EXECUTING_INSNS (f) = executing_insns;
779 free (FENCE_READY_TICKS (f)); 775 free (FENCE_READY_TICKS (f));
780 FENCE_READY_TICKS (f) = ready_ticks; 776 FENCE_READY_TICKS (f) = ready_ticks;
781 FENCE_READY_TICKS_SIZE (f) = ready_ticks_size; 777 FENCE_READY_TICKS_SIZE (f) = ready_ticks_size;
782 FENCE_CYCLE (f) = cycle; 778 FENCE_CYCLE (f) = cycle;
783 } 779 }
784 else 780 else
785 { 781 {
786 /* Leave DC and CYCLE untouched. */ 782 /* Leave DC and CYCLE untouched. */
787 delete_deps_context (dc); 783 delete_deps_context (dc);
788 VEC_free (rtx, gc, executing_insns); 784 vec_free (executing_insns);
789 free (ready_ticks); 785 free (ready_ticks);
790 } 786 }
791 } 787 }
792 788
793 /* Fill remaining invariant fields. */ 789 /* Fill remaining invariant fields. */
801 797
802 /* Add a new fence to NEW_FENCES list, initializing it from all 798 /* Add a new fence to NEW_FENCES list, initializing it from all
803 other parameters. */ 799 other parameters. */
804 static void 800 static void
805 add_to_fences (flist_tail_t new_fences, insn_t insn, 801 add_to_fences (flist_tail_t new_fences, insn_t insn,
806 state_t state, deps_t dc, void *tc, rtx last_scheduled_insn, 802 state_t state, deps_t dc, void *tc,
807 VEC(rtx, gc) *executing_insns, int *ready_ticks, 803 rtx_insn *last_scheduled_insn,
808 int ready_ticks_size, rtx sched_next, int cycle, 804 vec<rtx_insn *, va_gc> *executing_insns, int *ready_ticks,
805 int ready_ticks_size, rtx_insn *sched_next, int cycle,
809 int cycle_issued_insns, int issue_rate, 806 int cycle_issued_insns, int issue_rate,
810 bool starts_cycle_p, bool after_stall_p) 807 bool starts_cycle_p, bool after_stall_p)
811 { 808 {
812 fence_t f = flist_lookup (FLIST_TAIL_HEAD (new_fences), insn); 809 fence_t f = flist_lookup (FLIST_TAIL_HEAD (new_fences), insn);
813 810
865 int ready_ticks_size = get_max_uid () + 1; 862 int ready_ticks_size = get_max_uid () + 1;
866 863
867 add_to_fences (new_fences, 864 add_to_fences (new_fences,
868 succ, state_create (), create_deps_context (), 865 succ, state_create (), create_deps_context (),
869 create_target_context (true), 866 create_target_context (true),
870 NULL_RTX, NULL, 867 NULL, NULL,
871 XCNEWVEC (int, ready_ticks_size), ready_ticks_size, 868 XCNEWVEC (int, ready_ticks_size), ready_ticks_size,
872 NULL_RTX, FENCE_CYCLE (fence) + 1, 869 NULL, FENCE_CYCLE (fence) + 1,
873 0, issue_rate, 1, FENCE_AFTER_STALL_P (fence)); 870 0, issue_rate, 1, FENCE_AFTER_STALL_P (fence));
874 } 871 }
875 872
876 /* Add a new fence to NEW_FENCES list and initialize all of its data 873 /* Add a new fence to NEW_FENCES list and initialize all of its data
877 from FENCE and SUCC. */ 874 from FENCE and SUCC. */
886 add_to_fences (new_fences, 883 add_to_fences (new_fences,
887 succ, state_create_copy (FENCE_STATE (fence)), 884 succ, state_create_copy (FENCE_STATE (fence)),
888 create_copy_of_deps_context (FENCE_DC (fence)), 885 create_copy_of_deps_context (FENCE_DC (fence)),
889 create_copy_of_target_context (FENCE_TC (fence)), 886 create_copy_of_target_context (FENCE_TC (fence)),
890 FENCE_LAST_SCHEDULED_INSN (fence), 887 FENCE_LAST_SCHEDULED_INSN (fence),
891 VEC_copy (rtx, gc, FENCE_EXECUTING_INSNS (fence)), 888 vec_safe_copy (FENCE_EXECUTING_INSNS (fence)),
892 new_ready_ticks, 889 new_ready_ticks,
893 FENCE_READY_TICKS_SIZE (fence), 890 FENCE_READY_TICKS_SIZE (fence),
894 FENCE_SCHED_NEXT (fence), 891 FENCE_SCHED_NEXT (fence),
895 FENCE_CYCLE (fence), 892 FENCE_CYCLE (fence),
896 FENCE_ISSUED_INSNS (fence), 893 FENCE_ISSUED_INSNS (fence),
948 regset_pool.v = XRESIZEVEC (regset, regset_pool.v, 945 regset_pool.v = XRESIZEVEC (regset, regset_pool.v,
949 (regset_pool.s = 2 * regset_pool.s + 1)); 946 (regset_pool.s = 2 * regset_pool.s + 1));
950 regset_pool.v[regset_pool.n++] = rs; 947 regset_pool.v[regset_pool.n++] = rs;
951 } 948 }
952 949
953 #ifdef ENABLE_CHECKING
954 /* This is used as a qsort callback for sorting regset pool stacks. 950 /* This is used as a qsort callback for sorting regset pool stacks.
955 X and XX are addresses of two regsets. They are never equal. */ 951 X and XX are addresses of two regsets. They are never equal. */
956 static int 952 static int
957 cmp_v_in_regset_pool (const void *x, const void *xx) 953 cmp_v_in_regset_pool (const void *x, const void *xx)
958 { 954 {
959 return *((const regset *) x) - *((const regset *) xx); 955 uintptr_t r1 = (uintptr_t) *((const regset *) x);
960 } 956 uintptr_t r2 = (uintptr_t) *((const regset *) xx);
961 #endif 957 if (r1 > r2)
962 958 return 1;
963 /* Free the regset pool possibly checking for memory leaks. */ 959 else if (r1 < r2)
960 return -1;
961 gcc_unreachable ();
962 }
963
964 /* Free the regset pool possibly checking for memory leaks. */
964 void 965 void
965 free_regset_pool (void) 966 free_regset_pool (void)
966 { 967 {
967 #ifdef ENABLE_CHECKING 968 if (flag_checking)
968 { 969 {
969 regset *v = regset_pool.v; 970 regset *v = regset_pool.v;
970 int i = 0; 971 int i = 0;
971 int n = regset_pool.n; 972 int n = regset_pool.n;
972 973
973 regset *vv = regset_pool.vv; 974 regset *vv = regset_pool.vv;
974 int ii = 0; 975 int ii = 0;
975 int nn = regset_pool.nn; 976 int nn = regset_pool.nn;
976 977
977 int diff = 0; 978 int diff = 0;
978 979
979 gcc_assert (n <= nn); 980 gcc_assert (n <= nn);
980 981
981 /* Sort both vectors so it will be possible to compare them. */ 982 /* Sort both vectors so it will be possible to compare them. */
982 qsort (v, n, sizeof (*v), cmp_v_in_regset_pool); 983 qsort (v, n, sizeof (*v), cmp_v_in_regset_pool);
983 qsort (vv, nn, sizeof (*vv), cmp_v_in_regset_pool); 984 qsort (vv, nn, sizeof (*vv), cmp_v_in_regset_pool);
984 985
985 while (ii < nn) 986 while (ii < nn)
986 { 987 {
987 if (v[i] == vv[ii]) 988 if (v[i] == vv[ii])
988 i++; 989 i++;
989 else 990 else
990 /* VV[II] was lost. */ 991 /* VV[II] was lost. */
991 diff++; 992 diff++;
992 993
993 ii++; 994 ii++;
994 } 995 }
995 996
996 gcc_assert (diff == regset_pool.diff); 997 gcc_assert (diff == regset_pool.diff);
997 } 998 }
998 #endif
999 999
1000 /* If not true - we have a memory leak. */ 1000 /* If not true - we have a memory leak. */
1001 gcc_assert (regset_pool.diff == 0); 1001 gcc_assert (regset_pool.diff == 0);
1002 1002
1003 while (regset_pool.n) 1003 while (regset_pool.n)
1029 1029
1030 /* Emit a nop before INSN, taking it from pool. */ 1030 /* Emit a nop before INSN, taking it from pool. */
1031 insn_t 1031 insn_t
1032 get_nop_from_pool (insn_t insn) 1032 get_nop_from_pool (insn_t insn)
1033 { 1033 {
1034 rtx nop_pat;
1034 insn_t nop; 1035 insn_t nop;
1035 bool old_p = nop_pool.n != 0; 1036 bool old_p = nop_pool.n != 0;
1036 int flags; 1037 int flags;
1037 1038
1038 if (old_p) 1039 if (old_p)
1039 nop = nop_pool.v[--nop_pool.n]; 1040 nop_pat = nop_pool.v[--nop_pool.n];
1040 else 1041 else
1041 nop = nop_pattern; 1042 nop_pat = nop_pattern;
1042 1043
1043 nop = emit_insn_before (nop, insn); 1044 nop = emit_insn_before (nop_pat, insn);
1044 1045
1045 if (old_p) 1046 if (old_p)
1046 flags = INSN_INIT_TODO_SSID; 1047 flags = INSN_INIT_TODO_SSID;
1047 else 1048 else
1048 flags = INSN_INIT_TODO_LUID | INSN_INIT_TODO_SSID; 1049 flags = INSN_INIT_TODO_LUID | INSN_INIT_TODO_SSID;
1058 return_nop_to_pool (insn_t nop, bool full_tidying) 1059 return_nop_to_pool (insn_t nop, bool full_tidying)
1059 { 1060 {
1060 gcc_assert (INSN_IN_STREAM_P (nop)); 1061 gcc_assert (INSN_IN_STREAM_P (nop));
1061 sel_remove_insn (nop, false, full_tidying); 1062 sel_remove_insn (nop, false, full_tidying);
1062 1063
1064 /* We'll recycle this nop. */
1065 nop->set_undeleted ();
1066
1063 if (nop_pool.n == nop_pool.s) 1067 if (nop_pool.n == nop_pool.s)
1064 nop_pool.v = XRESIZEVEC (rtx, nop_pool.v, 1068 nop_pool.v = XRESIZEVEC (rtx_insn *, nop_pool.v,
1065 (nop_pool.s = 2 * nop_pool.s + 1)); 1069 (nop_pool.s = 2 * nop_pool.s + 1));
1066 nop_pool.v[nop_pool.n++] = nop; 1070 nop_pool.v[nop_pool.n++] = nop;
1067 } 1071 }
1068 1072
1069 /* Free the nop pool. */ 1073 /* Free the nop pool. */
1109 1113
1110 /* Callback, called from hash_rtx_cb. Helps to hash UNSPEC rtx X in a correct way 1114 /* Callback, called from hash_rtx_cb. Helps to hash UNSPEC rtx X in a correct way
1111 to support ia64 speculation. When changes are needed, new rtx X and new mode 1115 to support ia64 speculation. When changes are needed, new rtx X and new mode
1112 NMODE are written, and the callback returns true. */ 1116 NMODE are written, and the callback returns true. */
1113 static int 1117 static int
1114 hash_with_unspec_callback (const_rtx x, enum machine_mode mode ATTRIBUTE_UNUSED, 1118 hash_with_unspec_callback (const_rtx x, machine_mode mode ATTRIBUTE_UNUSED,
1115 rtx *nx, enum machine_mode* nmode) 1119 rtx *nx, machine_mode* nmode)
1116 { 1120 {
1117 if (GET_CODE (x) == UNSPEC 1121 if (GET_CODE (x) == UNSPEC
1118 && targetm.sched.skip_rtx_p 1122 && targetm.sched.skip_rtx_p
1119 && targetm.sched.skip_rtx_p (x)) 1123 && targetm.sched.skip_rtx_p (x))
1120 { 1124 {
1131 lhs_and_rhs_separable_p (rtx lhs, rtx rhs) 1135 lhs_and_rhs_separable_p (rtx lhs, rtx rhs)
1132 { 1136 {
1133 if (lhs == NULL || rhs == NULL) 1137 if (lhs == NULL || rhs == NULL)
1134 return false; 1138 return false;
1135 1139
1136 /* Do not schedule CONST, CONST_INT and CONST_DOUBLE etc as rhs: no point 1140 /* Do not schedule constants as rhs: no point to use reg, if const
1137 to use reg, if const can be used. Moreover, scheduling const as rhs may 1141 can be used. Moreover, scheduling const as rhs may lead to mode
1138 lead to mode mismatch cause consts don't have modes but they could be 1142 mismatch cause consts don't have modes but they could be merged
1139 merged from branches where the same const used in different modes. */ 1143 from branches where the same const used in different modes. */
1140 if (CONSTANT_P (rhs)) 1144 if (CONSTANT_P (rhs))
1141 return false; 1145 return false;
1142 1146
1143 /* ??? Do not rename predicate registers to avoid ICEs in bundling. */ 1147 /* ??? Do not rename predicate registers to avoid ICEs in bundling. */
1144 if (COMPARISON_P (rhs)) 1148 if (COMPARISON_P (rhs))
1236 /* Return a copy of VI. When REATTACH_P is true, detach VI and attach 1240 /* Return a copy of VI. When REATTACH_P is true, detach VI and attach
1237 the copy. */ 1241 the copy. */
1238 vinsn_t 1242 vinsn_t
1239 vinsn_copy (vinsn_t vi, bool reattach_p) 1243 vinsn_copy (vinsn_t vi, bool reattach_p)
1240 { 1244 {
1241 rtx copy; 1245 rtx_insn *copy;
1242 bool unique = VINSN_UNIQUE_P (vi); 1246 bool unique = VINSN_UNIQUE_P (vi);
1243 vinsn_t new_vi; 1247 vinsn_t new_vi;
1244 1248
1245 copy = create_copy_of_insn_rtx (VINSN_INSN_RTX (vi)); 1249 copy = create_copy_of_insn_rtx (VINSN_INSN_RTX (vi));
1246 new_vi = create_vinsn_from_insn_rtx (copy, unique); 1250 new_vi = create_vinsn_from_insn_rtx (copy, unique);
1296 return control_flow_insn_p (insn); 1300 return control_flow_insn_p (insn);
1297 } 1301 }
1298 1302
1299 /* Return latency of INSN. */ 1303 /* Return latency of INSN. */
1300 static int 1304 static int
1301 sel_insn_rtx_cost (rtx insn) 1305 sel_insn_rtx_cost (rtx_insn *insn)
1302 { 1306 {
1303 int cost; 1307 int cost;
1304 1308
1305 /* A USE insn, or something else we don't need to 1309 /* A USE insn, or something else we don't need to
1306 understand. We can't pass these directly to 1310 understand. We can't pass these directly to
1318 1322
1319 return cost; 1323 return cost;
1320 } 1324 }
1321 1325
1322 /* Return the cost of the VI. 1326 /* Return the cost of the VI.
1323 !!! FIXME: Unify with haifa-sched.c: insn_cost (). */ 1327 !!! FIXME: Unify with haifa-sched.c: insn_sched_cost (). */
1324 int 1328 int
1325 sel_vinsn_cost (vinsn_t vi) 1329 sel_vinsn_cost (vinsn_t vi)
1326 { 1330 {
1327 int cost = vi->cost; 1331 int cost = vi->cost;
1328 1332
1388 int flags; 1392 int flags;
1389 1393
1390 emit_expr = set_insn_init (expr, vinsn ? vinsn : EXPR_VINSN (expr), 1394 emit_expr = set_insn_init (expr, vinsn ? vinsn : EXPR_VINSN (expr),
1391 seqno); 1395 seqno);
1392 insn = EXPR_INSN_RTX (emit_expr); 1396 insn = EXPR_INSN_RTX (emit_expr);
1397
1398 /* The insn may come from the transformation cache, which may hold already
1399 deleted insns, so mark it as not deleted. */
1400 insn->set_undeleted ();
1401
1393 add_insn_after (insn, after, BLOCK_FOR_INSN (insn)); 1402 add_insn_after (insn, after, BLOCK_FOR_INSN (insn));
1394 1403
1395 flags = INSN_INIT_TODO_SSID; 1404 flags = INSN_INIT_TODO_SSID;
1396 if (INSN_LUID (insn) == 0) 1405 if (INSN_LUID (insn) == 0)
1397 flags |= INSN_INIT_TODO_LUID; 1406 flags |= INSN_INIT_TODO_LUID;
1408 basic_block bb = BLOCK_FOR_INSN (after); 1417 basic_block bb = BLOCK_FOR_INSN (after);
1409 insn_t next = NEXT_INSN (after); 1418 insn_t next = NEXT_INSN (after);
1410 1419
1411 /* Assert that in move_op we disconnected this insn properly. */ 1420 /* Assert that in move_op we disconnected this insn properly. */
1412 gcc_assert (EXPR_VINSN (INSN_EXPR (insn)) != NULL); 1421 gcc_assert (EXPR_VINSN (INSN_EXPR (insn)) != NULL);
1413 PREV_INSN (insn) = after; 1422 SET_PREV_INSN (insn) = after;
1414 NEXT_INSN (insn) = next; 1423 SET_NEXT_INSN (insn) = next;
1415 1424
1416 NEXT_INSN (after) = insn; 1425 SET_NEXT_INSN (after) = insn;
1417 PREV_INSN (next) = insn; 1426 SET_PREV_INSN (next) = insn;
1418 1427
1419 /* Update links from insn to bb and vice versa. */ 1428 /* Update links from insn to bb and vice versa. */
1420 df_insn_change_bb (insn, bb); 1429 df_insn_change_bb (insn, bb);
1421 if (BB_END (bb) == after) 1430 if (BB_END (bb) == after)
1422 BB_END (bb) = insn; 1431 BB_END (bb) = insn;
1432 VECT and return true when found. Use NEW_VINSN for comparison only when 1441 VECT and return true when found. Use NEW_VINSN for comparison only when
1433 COMPARE_VINSNS is true. Write to INDP the index on which 1442 COMPARE_VINSNS is true. Write to INDP the index on which
1434 the search has stopped, such that inserting the new element at INDP will 1443 the search has stopped, such that inserting the new element at INDP will
1435 retain VECT's sort order. */ 1444 retain VECT's sort order. */
1436 static bool 1445 static bool
1437 find_in_history_vect_1 (VEC(expr_history_def, heap) *vect, 1446 find_in_history_vect_1 (vec<expr_history_def> vect,
1438 unsigned uid, vinsn_t new_vinsn, 1447 unsigned uid, vinsn_t new_vinsn,
1439 bool compare_vinsns, int *indp) 1448 bool compare_vinsns, int *indp)
1440 { 1449 {
1441 expr_history_def *arr; 1450 expr_history_def *arr;
1442 int i, j, len = VEC_length (expr_history_def, vect); 1451 int i, j, len = vect.length ();
1443 1452
1444 if (len == 0) 1453 if (len == 0)
1445 { 1454 {
1446 *indp = 0; 1455 *indp = 0;
1447 return false; 1456 return false;
1448 } 1457 }
1449 1458
1450 arr = VEC_address (expr_history_def, vect); 1459 arr = vect.address ();
1451 i = 0, j = len - 1; 1460 i = 0, j = len - 1;
1452 1461
1453 while (i <= j) 1462 while (i <= j)
1454 { 1463 {
1455 unsigned auid = arr[i].uid; 1464 unsigned auid = arr[i].uid;
1477 1486
1478 /* Search for a uid of INSN and NEW_VINSN in a sorted vector VECT. Return 1487 /* Search for a uid of INSN and NEW_VINSN in a sorted vector VECT. Return
1479 the position found or -1, if no such value is in vector. 1488 the position found or -1, if no such value is in vector.
1480 Search also for UIDs of insn's originators, if ORIGINATORS_P is true. */ 1489 Search also for UIDs of insn's originators, if ORIGINATORS_P is true. */
1481 int 1490 int
1482 find_in_history_vect (VEC(expr_history_def, heap) *vect, rtx insn, 1491 find_in_history_vect (vec<expr_history_def> vect, rtx insn,
1483 vinsn_t new_vinsn, bool originators_p) 1492 vinsn_t new_vinsn, bool originators_p)
1484 { 1493 {
1485 int ind; 1494 int ind;
1486 1495
1487 if (find_in_history_vect_1 (vect, INSN_UID (insn), new_vinsn, 1496 if (find_in_history_vect_1 (vect, INSN_UID (insn), new_vinsn,
1504 /* Insert new element in a sorted history vector pointed to by PVECT, 1513 /* Insert new element in a sorted history vector pointed to by PVECT,
1505 if it is not there already. The element is searched using 1514 if it is not there already. The element is searched using
1506 UID/NEW_EXPR_VINSN pair. TYPE, OLD_EXPR_VINSN and SPEC_DS save 1515 UID/NEW_EXPR_VINSN pair. TYPE, OLD_EXPR_VINSN and SPEC_DS save
1507 the history of a transformation. */ 1516 the history of a transformation. */
1508 void 1517 void
1509 insert_in_history_vect (VEC (expr_history_def, heap) **pvect, 1518 insert_in_history_vect (vec<expr_history_def> *pvect,
1510 unsigned uid, enum local_trans_type type, 1519 unsigned uid, enum local_trans_type type,
1511 vinsn_t old_expr_vinsn, vinsn_t new_expr_vinsn, 1520 vinsn_t old_expr_vinsn, vinsn_t new_expr_vinsn,
1512 ds_t spec_ds) 1521 ds_t spec_ds)
1513 { 1522 {
1514 VEC(expr_history_def, heap) *vect = *pvect; 1523 vec<expr_history_def> vect = *pvect;
1515 expr_history_def temp; 1524 expr_history_def temp;
1516 bool res; 1525 bool res;
1517 int ind; 1526 int ind;
1518 1527
1519 res = find_in_history_vect_1 (vect, uid, new_expr_vinsn, true, &ind); 1528 res = find_in_history_vect_1 (vect, uid, new_expr_vinsn, true, &ind);
1520 1529
1521 if (res) 1530 if (res)
1522 { 1531 {
1523 expr_history_def *phist = VEC_index (expr_history_def, vect, ind); 1532 expr_history_def *phist = &vect[ind];
1524 1533
1525 /* It is possible that speculation types of expressions that were 1534 /* It is possible that speculation types of expressions that were
1526 propagated through different paths will be different here. In this 1535 propagated through different paths will be different here. In this
1527 case, merge the status to get the correct check later. */ 1536 case, merge the status to get the correct check later. */
1528 if (phist->spec_ds != spec_ds) 1537 if (phist->spec_ds != spec_ds)
1536 temp.spec_ds = spec_ds; 1545 temp.spec_ds = spec_ds;
1537 temp.type = type; 1546 temp.type = type;
1538 1547
1539 vinsn_attach (old_expr_vinsn); 1548 vinsn_attach (old_expr_vinsn);
1540 vinsn_attach (new_expr_vinsn); 1549 vinsn_attach (new_expr_vinsn);
1541 VEC_safe_insert (expr_history_def, heap, vect, ind, &temp); 1550 vect.safe_insert (ind, temp);
1542 *pvect = vect; 1551 *pvect = vect;
1543 } 1552 }
1544 1553
1545 /* Free history vector PVECT. */ 1554 /* Free history vector PVECT. */
1546 static void 1555 static void
1547 free_history_vect (VEC (expr_history_def, heap) **pvect) 1556 free_history_vect (vec<expr_history_def> &pvect)
1548 { 1557 {
1549 unsigned i; 1558 unsigned i;
1550 expr_history_def *phist; 1559 expr_history_def *phist;
1551 1560
1552 if (! *pvect) 1561 if (! pvect.exists ())
1553 return; 1562 return;
1554 1563
1555 for (i = 0; 1564 for (i = 0; pvect.iterate (i, &phist); i++)
1556 VEC_iterate (expr_history_def, *pvect, i, phist);
1557 i++)
1558 { 1565 {
1559 vinsn_detach (phist->old_expr_vinsn); 1566 vinsn_detach (phist->old_expr_vinsn);
1560 vinsn_detach (phist->new_expr_vinsn); 1567 vinsn_detach (phist->new_expr_vinsn);
1561 } 1568 }
1562 1569
1563 VEC_free (expr_history_def, heap, *pvect); 1570 pvect.release ();
1564 *pvect = NULL; 1571 }
1565 } 1572
1566 1573 /* Merge vector FROM to PVECT. */
1574 static void
1575 merge_history_vect (vec<expr_history_def> *pvect,
1576 vec<expr_history_def> from)
1577 {
1578 expr_history_def *phist;
1579 int i;
1580
1581 /* We keep this vector sorted. */
1582 for (i = 0; from.iterate (i, &phist); i++)
1583 insert_in_history_vect (pvect, phist->uid, phist->type,
1584 phist->old_expr_vinsn, phist->new_expr_vinsn,
1585 phist->spec_ds);
1586 }
1567 1587
1568 /* Compare two vinsns as rhses if possible and as vinsns otherwise. */ 1588 /* Compare two vinsns as rhses if possible and as vinsns otherwise. */
1569 bool 1589 bool
1570 vinsn_equal_p (vinsn_t x, vinsn_t y) 1590 vinsn_equal_p (vinsn_t x, vinsn_t y)
1571 { 1591 {
1599 /* Initialize EXPR. */ 1619 /* Initialize EXPR. */
1600 static void 1620 static void
1601 init_expr (expr_t expr, vinsn_t vi, int spec, int use, int priority, 1621 init_expr (expr_t expr, vinsn_t vi, int spec, int use, int priority,
1602 int sched_times, int orig_bb_index, ds_t spec_done_ds, 1622 int sched_times, int orig_bb_index, ds_t spec_done_ds,
1603 ds_t spec_to_check_ds, int orig_sched_cycle, 1623 ds_t spec_to_check_ds, int orig_sched_cycle,
1604 VEC(expr_history_def, heap) *history, signed char target_available, 1624 vec<expr_history_def> history,
1625 signed char target_available,
1605 bool was_substituted, bool was_renamed, bool needs_spec_check_p, 1626 bool was_substituted, bool was_renamed, bool needs_spec_check_p,
1606 bool cant_move) 1627 bool cant_move)
1607 { 1628 {
1608 vinsn_attach (vi); 1629 vinsn_attach (vi);
1609 1630
1616 EXPR_ORIG_BB_INDEX (expr) = orig_bb_index; 1637 EXPR_ORIG_BB_INDEX (expr) = orig_bb_index;
1617 EXPR_ORIG_SCHED_CYCLE (expr) = orig_sched_cycle; 1638 EXPR_ORIG_SCHED_CYCLE (expr) = orig_sched_cycle;
1618 EXPR_SPEC_DONE_DS (expr) = spec_done_ds; 1639 EXPR_SPEC_DONE_DS (expr) = spec_done_ds;
1619 EXPR_SPEC_TO_CHECK_DS (expr) = spec_to_check_ds; 1640 EXPR_SPEC_TO_CHECK_DS (expr) = spec_to_check_ds;
1620 1641
1621 if (history) 1642 if (history.exists ())
1622 EXPR_HISTORY_OF_CHANGES (expr) = history; 1643 EXPR_HISTORY_OF_CHANGES (expr) = history;
1623 else 1644 else
1624 EXPR_HISTORY_OF_CHANGES (expr) = NULL; 1645 EXPR_HISTORY_OF_CHANGES (expr).create (0);
1625 1646
1626 EXPR_TARGET_AVAILABLE (expr) = target_available; 1647 EXPR_TARGET_AVAILABLE (expr) = target_available;
1627 EXPR_WAS_SUBSTITUTED (expr) = was_substituted; 1648 EXPR_WAS_SUBSTITUTED (expr) = was_substituted;
1628 EXPR_WAS_RENAMED (expr) = was_renamed; 1649 EXPR_WAS_RENAMED (expr) = was_renamed;
1629 EXPR_NEEDS_SPEC_CHECK_P (expr) = needs_spec_check_p; 1650 EXPR_NEEDS_SPEC_CHECK_P (expr) = needs_spec_check_p;
1632 1653
1633 /* Make a copy of the expr FROM into the expr TO. */ 1654 /* Make a copy of the expr FROM into the expr TO. */
1634 void 1655 void
1635 copy_expr (expr_t to, expr_t from) 1656 copy_expr (expr_t to, expr_t from)
1636 { 1657 {
1637 VEC(expr_history_def, heap) *temp = NULL; 1658 vec<expr_history_def> temp = vNULL;
1638 1659
1639 if (EXPR_HISTORY_OF_CHANGES (from)) 1660 if (EXPR_HISTORY_OF_CHANGES (from).exists ())
1640 { 1661 {
1641 unsigned i; 1662 unsigned i;
1642 expr_history_def *phist; 1663 expr_history_def *phist;
1643 1664
1644 temp = VEC_copy (expr_history_def, heap, EXPR_HISTORY_OF_CHANGES (from)); 1665 temp = EXPR_HISTORY_OF_CHANGES (from).copy ();
1645 for (i = 0; 1666 for (i = 0;
1646 VEC_iterate (expr_history_def, temp, i, phist); 1667 temp.iterate (i, &phist);
1647 i++) 1668 i++)
1648 { 1669 {
1649 vinsn_attach (phist->old_expr_vinsn); 1670 vinsn_attach (phist->old_expr_vinsn);
1650 vinsn_attach (phist->new_expr_vinsn); 1671 vinsn_attach (phist->new_expr_vinsn);
1651 } 1672 }
1666 void 1687 void
1667 copy_expr_onside (expr_t to, expr_t from) 1688 copy_expr_onside (expr_t to, expr_t from)
1668 { 1689 {
1669 init_expr (to, EXPR_VINSN (from), EXPR_SPEC (from), EXPR_USEFULNESS (from), 1690 init_expr (to, EXPR_VINSN (from), EXPR_SPEC (from), EXPR_USEFULNESS (from),
1670 EXPR_PRIORITY (from), EXPR_SCHED_TIMES (from), 0, 1691 EXPR_PRIORITY (from), EXPR_SCHED_TIMES (from), 0,
1671 EXPR_SPEC_DONE_DS (from), EXPR_SPEC_TO_CHECK_DS (from), 0, NULL, 1692 EXPR_SPEC_DONE_DS (from), EXPR_SPEC_TO_CHECK_DS (from), 0,
1693 vNULL,
1672 EXPR_TARGET_AVAILABLE (from), EXPR_WAS_SUBSTITUTED (from), 1694 EXPR_TARGET_AVAILABLE (from), EXPR_WAS_SUBSTITUTED (from),
1673 EXPR_WAS_RENAMED (from), EXPR_NEEDS_SPEC_CHECK_P (from), 1695 EXPR_WAS_RENAMED (from), EXPR_NEEDS_SPEC_CHECK_P (from),
1674 EXPR_CANT_MOVE (from)); 1696 EXPR_CANT_MOVE (from));
1675 } 1697 }
1676 1698
1698 existing status. To be fixed. */ 1720 existing status. To be fixed. */
1699 ds = EXPR_SPEC_DONE_DS (expr); 1721 ds = EXPR_SPEC_DONE_DS (expr);
1700 if (ds) 1722 if (ds)
1701 EXPR_SPEC_DONE_DS (expr) = ds_get_max_dep_weak (ds); 1723 EXPR_SPEC_DONE_DS (expr) = ds_get_max_dep_weak (ds);
1702 1724
1703 free_history_vect (&EXPR_HISTORY_OF_CHANGES (expr)); 1725 free_history_vect (EXPR_HISTORY_OF_CHANGES (expr));
1704 } 1726 }
1705 1727
1706 /* Update target_available bits when merging exprs TO and FROM. SPLIT_POINT 1728 /* Update target_available bits when merging exprs TO and FROM. SPLIT_POINT
1707 is non-null when expressions are merged from different successors at 1729 is non-null when expressions are merged from different successors at
1708 a split point. */ 1730 a split point. */
1729 merge_with_other_exprs. */ 1751 merge_with_other_exprs. */
1730 ; 1752 ;
1731 else 1753 else
1732 EXPR_TARGET_AVAILABLE (to) = -1; 1754 EXPR_TARGET_AVAILABLE (to) = -1;
1733 } 1755 }
1756 else if (EXPR_TARGET_AVAILABLE (from) == 0
1757 && EXPR_LHS (from)
1758 && REG_P (EXPR_LHS (from))
1759 && REGNO (EXPR_LHS (to)) != REGNO (EXPR_LHS (from)))
1760 EXPR_TARGET_AVAILABLE (to) = -1;
1734 else 1761 else
1735 EXPR_TARGET_AVAILABLE (to) &= EXPR_TARGET_AVAILABLE (from); 1762 EXPR_TARGET_AVAILABLE (to) &= EXPR_TARGET_AVAILABLE (from);
1736 } 1763 }
1737 } 1764 }
1738 1765
1794 /* Merge bits of FROM expr to TO expr. When SPLIT_POINT is not NULL, 1821 /* Merge bits of FROM expr to TO expr. When SPLIT_POINT is not NULL,
1795 this is done along different paths. */ 1822 this is done along different paths. */
1796 void 1823 void
1797 merge_expr_data (expr_t to, expr_t from, insn_t split_point) 1824 merge_expr_data (expr_t to, expr_t from, insn_t split_point)
1798 { 1825 {
1799 int i; 1826 /* Choose the maximum of the specs of merged exprs. This is required
1800 expr_history_def *phist; 1827 for correctness of bookkeeping. */
1801 1828 if (EXPR_SPEC (to) < EXPR_SPEC (from))
1802 /* For now, we just set the spec of resulting expr to be minimum of the specs
1803 of merged exprs. */
1804 if (EXPR_SPEC (to) > EXPR_SPEC (from))
1805 EXPR_SPEC (to) = EXPR_SPEC (from); 1829 EXPR_SPEC (to) = EXPR_SPEC (from);
1806 1830
1807 if (split_point) 1831 if (split_point)
1808 EXPR_USEFULNESS (to) += EXPR_USEFULNESS (from); 1832 EXPR_USEFULNESS (to) += EXPR_USEFULNESS (from);
1809 else 1833 else
1820 EXPR_ORIG_BB_INDEX (to) = 0; 1844 EXPR_ORIG_BB_INDEX (to) = 0;
1821 1845
1822 EXPR_ORIG_SCHED_CYCLE (to) = MIN (EXPR_ORIG_SCHED_CYCLE (to), 1846 EXPR_ORIG_SCHED_CYCLE (to) = MIN (EXPR_ORIG_SCHED_CYCLE (to),
1823 EXPR_ORIG_SCHED_CYCLE (from)); 1847 EXPR_ORIG_SCHED_CYCLE (from));
1824 1848
1825 /* We keep this vector sorted. */
1826 for (i = 0;
1827 VEC_iterate (expr_history_def, EXPR_HISTORY_OF_CHANGES (from),
1828 i, phist);
1829 i++)
1830 insert_in_history_vect (&EXPR_HISTORY_OF_CHANGES (to),
1831 phist->uid, phist->type,
1832 phist->old_expr_vinsn, phist->new_expr_vinsn,
1833 phist->spec_ds);
1834
1835 EXPR_WAS_SUBSTITUTED (to) |= EXPR_WAS_SUBSTITUTED (from); 1849 EXPR_WAS_SUBSTITUTED (to) |= EXPR_WAS_SUBSTITUTED (from);
1836 EXPR_WAS_RENAMED (to) |= EXPR_WAS_RENAMED (from); 1850 EXPR_WAS_RENAMED (to) |= EXPR_WAS_RENAMED (from);
1837 EXPR_CANT_MOVE (to) |= EXPR_CANT_MOVE (from); 1851 EXPR_CANT_MOVE (to) |= EXPR_CANT_MOVE (from);
1838 1852
1853 merge_history_vect (&EXPR_HISTORY_OF_CHANGES (to),
1854 EXPR_HISTORY_OF_CHANGES (from));
1839 update_target_availability (to, from, split_point); 1855 update_target_availability (to, from, split_point);
1840 update_speculative_bits (to, from, split_point); 1856 update_speculative_bits (to, from, split_point);
1841 } 1857 }
1842 1858
1843 /* Merge bits of FROM expr to TO expr. Vinsns in the exprs should be equal 1859 /* Merge bits of FROM expr to TO expr. Vinsns in the exprs should be equal
1853 1869
1854 /* Make sure that speculative pattern is propagated into exprs that 1870 /* Make sure that speculative pattern is propagated into exprs that
1855 have non-speculative one. This will provide us with consistent 1871 have non-speculative one. This will provide us with consistent
1856 speculative bits and speculative patterns inside expr. */ 1872 speculative bits and speculative patterns inside expr. */
1857 if (EXPR_SPEC_DONE_DS (to) == 0 1873 if (EXPR_SPEC_DONE_DS (to) == 0
1858 && EXPR_SPEC_DONE_DS (from) != 0) 1874 && (EXPR_SPEC_DONE_DS (from) != 0
1875 /* Do likewise for volatile insns, so that we always retain
1876 the may_trap_p bit on the resulting expression. However,
1877 avoid propagating the trapping bit into the instructions
1878 already speculated. This would result in replacing the
1879 speculative pattern with the non-speculative one and breaking
1880 the speculation support. */
1881 || (!VINSN_MAY_TRAP_P (EXPR_VINSN (to))
1882 && VINSN_MAY_TRAP_P (EXPR_VINSN (from)))))
1859 change_vinsn_in_expr (to, EXPR_VINSN (from)); 1883 change_vinsn_in_expr (to, EXPR_VINSN (from));
1860 1884
1861 merge_expr_data (to, from, split_point); 1885 merge_expr_data (to, from, split_point);
1862 gcc_assert (EXPR_USEFULNESS (to) <= REG_BR_PROB_BASE); 1886 gcc_assert (EXPR_USEFULNESS (to) <= REG_BR_PROB_BASE);
1863 } 1887 }
1868 { 1892 {
1869 1893
1870 vinsn_detach (EXPR_VINSN (expr)); 1894 vinsn_detach (EXPR_VINSN (expr));
1871 EXPR_VINSN (expr) = NULL; 1895 EXPR_VINSN (expr) = NULL;
1872 1896
1873 free_history_vect (&EXPR_HISTORY_OF_CHANGES (expr)); 1897 free_history_vect (EXPR_HISTORY_OF_CHANGES (expr));
1874 } 1898 }
1875 1899
1876 /* For a given LV_SET, mark EXPR having unavailable target register. */ 1900 /* For a given LV_SET, mark EXPR having unavailable target register. */
1877 static void 1901 static void
1878 set_unavailable_target_for_expr (expr_t expr, regset lv_set) 1902 set_unavailable_target_for_expr (expr_t expr, regset lv_set)
1879 { 1903 {
1880 if (EXPR_SEPARABLE_P (expr)) 1904 if (EXPR_SEPARABLE_P (expr))
1881 { 1905 {
1882 if (REG_P (EXPR_LHS (expr)) 1906 if (REG_P (EXPR_LHS (expr))
1883 && bitmap_bit_p (lv_set, REGNO (EXPR_LHS (expr)))) 1907 && register_unavailable_p (lv_set, EXPR_LHS (expr)))
1884 { 1908 {
1885 /* If it's an insn like r1 = use (r1, ...), and it exists in 1909 /* If it's an insn like r1 = use (r1, ...), and it exists in
1886 different forms in each of the av_sets being merged, we can't say 1910 different forms in each of the av_sets being merged, we can't say
1887 whether original destination register is available or not. 1911 whether original destination register is available or not.
1888 However, this still works if destination register is not used 1912 However, this still works if destination register is not used
1899 It still doesn't cover the case when register is defined and used 1923 It still doesn't cover the case when register is defined and used
1900 somewhere within the code motion path, and in this case we could 1924 somewhere within the code motion path, and in this case we could
1901 miss a unifying code motion along both branches using a renamed 1925 miss a unifying code motion along both branches using a renamed
1902 register, but it won't affect a code correctness since upon 1926 register, but it won't affect a code correctness since upon
1903 an actual code motion a bookkeeping code would be generated. */ 1927 an actual code motion a bookkeeping code would be generated. */
1904 if (bitmap_bit_p (VINSN_REG_USES (EXPR_VINSN (expr)), 1928 if (register_unavailable_p (VINSN_REG_USES (EXPR_VINSN (expr)),
1905 REGNO (EXPR_LHS (expr)))) 1929 EXPR_LHS (expr)))
1906 EXPR_TARGET_AVAILABLE (expr) = -1; 1930 EXPR_TARGET_AVAILABLE (expr) = -1;
1907 else 1931 else
1908 EXPR_TARGET_AVAILABLE (expr) = false; 1932 EXPR_TARGET_AVAILABLE (expr) = false;
1909 } 1933 }
1910 } 1934 }
1936 became unavailable, 0 if nothing had to be changed. */ 1960 became unavailable, 0 if nothing had to be changed. */
1937 int 1961 int
1938 speculate_expr (expr_t expr, ds_t ds) 1962 speculate_expr (expr_t expr, ds_t ds)
1939 { 1963 {
1940 int res; 1964 int res;
1941 rtx orig_insn_rtx; 1965 rtx_insn *orig_insn_rtx;
1942 rtx spec_pat; 1966 rtx spec_pat;
1943 ds_t target_ds, current_ds; 1967 ds_t target_ds, current_ds;
1944 1968
1945 /* Obtain the status we need to put on EXPR. */ 1969 /* Obtain the status we need to put on EXPR. */
1946 target_ds = (ds & SPECULATIVE); 1970 target_ds = (ds & SPECULATIVE);
1957 EXPR_SPEC_DONE_DS (expr) = ds; 1981 EXPR_SPEC_DONE_DS (expr) = ds;
1958 return current_ds != ds ? 1 : 0; 1982 return current_ds != ds ? 1 : 0;
1959 1983
1960 case 1: 1984 case 1:
1961 { 1985 {
1962 rtx spec_insn_rtx = create_insn_rtx_from_pattern (spec_pat, NULL_RTX); 1986 rtx_insn *spec_insn_rtx =
1987 create_insn_rtx_from_pattern (spec_pat, NULL_RTX);
1963 vinsn_t spec_vinsn = create_vinsn_from_insn_rtx (spec_insn_rtx, false); 1988 vinsn_t spec_vinsn = create_vinsn_from_insn_rtx (spec_insn_rtx, false);
1964 1989
1965 change_vinsn_in_expr (expr, spec_vinsn); 1990 change_vinsn_in_expr (expr, spec_vinsn);
1966 EXPR_SPEC_DONE_DS (expr) = ds; 1991 EXPR_SPEC_DONE_DS (expr) = ds;
1967 EXPR_NEEDS_SPEC_CHECK_P (expr) = true; 1992 EXPR_NEEDS_SPEC_CHECK_P (expr) = true;
1968 1993
1969 /* Do not allow clobbering the address register of speculative 1994 /* Do not allow clobbering the address register of speculative
1970 insns. */ 1995 insns. */
1971 if (bitmap_bit_p (VINSN_REG_USES (EXPR_VINSN (expr)), 1996 if (register_unavailable_p (VINSN_REG_USES (EXPR_VINSN (expr)),
1972 expr_dest_regno (expr))) 1997 expr_dest_reg (expr)))
1973 { 1998 {
1974 EXPR_TARGET_AVAILABLE (expr) = false; 1999 EXPR_TARGET_AVAILABLE (expr) = false;
1975 return 2; 2000 return 2;
1976 } 2001 }
1977 2002
2020 FOR_EACH_EXPR (expr, avi, join_set) 2045 FOR_EACH_EXPR (expr, avi, join_set)
2021 if (av_set_lookup (av_set, EXPR_VINSN (expr)) == NULL) 2046 if (av_set_lookup (av_set, EXPR_VINSN (expr)) == NULL)
2022 set_unavailable_target_for_expr (expr, lv_set); 2047 set_unavailable_target_for_expr (expr, lv_set);
2023 } 2048 }
2024 2049
2050
2051 /* Returns true if REG (at least partially) is present in REGS. */
2052 bool
2053 register_unavailable_p (regset regs, rtx reg)
2054 {
2055 unsigned regno, end_regno;
2056
2057 regno = REGNO (reg);
2058 if (bitmap_bit_p (regs, regno))
2059 return true;
2060
2061 end_regno = END_REGNO (reg);
2062
2063 while (++regno < end_regno)
2064 if (bitmap_bit_p (regs, regno))
2065 return true;
2066
2067 return false;
2068 }
2025 2069
2026 /* Av set functions. */ 2070 /* Av set functions. */
2027 2071
2028 /* Add a new element to av set SETP. 2072 /* Add a new element to av set SETP.
2029 Return the element added. */ 2073 Return the element added. */
2326 ? (EXPR_USEFULNESS (expr) * prob) / all_prob 2370 ? (EXPR_USEFULNESS (expr) * prob) / all_prob
2327 : 0); 2371 : 0);
2328 } 2372 }
2329 2373
2330 /* Leave in AVP only those expressions, which are present in AV, 2374 /* Leave in AVP only those expressions, which are present in AV,
2331 and return it. */ 2375 and return it, merging history expressions. */
2332 void 2376 void
2333 av_set_intersect (av_set_t *avp, av_set_t av) 2377 av_set_code_motion_filter (av_set_t *avp, av_set_t av)
2334 { 2378 {
2335 av_set_iterator i; 2379 av_set_iterator i;
2336 expr_t expr; 2380 expr_t expr, expr2;
2337 2381
2338 FOR_EACH_EXPR_1 (expr, i, avp) 2382 FOR_EACH_EXPR_1 (expr, i, avp)
2339 if (av_set_lookup (av, EXPR_VINSN (expr)) == NULL) 2383 if ((expr2 = av_set_lookup (av, EXPR_VINSN (expr))) == NULL)
2340 av_set_iter_remove (&i); 2384 av_set_iter_remove (&i);
2385 else
2386 /* When updating av sets in bookkeeping blocks, we can add more insns
2387 there which will be transformed but the upper av sets will not
2388 reflect those transformations. We then fail to undo those
2389 when searching for such insns. So merge the history saved
2390 in the av set of the block we are processing. */
2391 merge_history_vect (&EXPR_HISTORY_OF_CHANGES (expr),
2392 EXPR_HISTORY_OF_CHANGES (expr2));
2341 } 2393 }
2342 2394
2343 2395
2344 2396
2345 /* Dependence hooks to initialize insn data. */ 2397 /* Dependence hooks to initialize insn data. */
2560 /* Possibly downgrade INSN to USE. */ 2612 /* Possibly downgrade INSN to USE. */
2561 static void 2613 static void
2562 maybe_downgrade_id_to_use (idata_t id, insn_t insn) 2614 maybe_downgrade_id_to_use (idata_t id, insn_t insn)
2563 { 2615 {
2564 bool must_be_use = false; 2616 bool must_be_use = false;
2565 unsigned uid = INSN_UID (insn); 2617 df_ref def;
2566 df_ref *rec;
2567 rtx lhs = IDATA_LHS (id); 2618 rtx lhs = IDATA_LHS (id);
2568 rtx rhs = IDATA_RHS (id); 2619 rtx rhs = IDATA_RHS (id);
2569 2620
2570 /* We downgrade only SETs. */ 2621 /* We downgrade only SETs. */
2571 if (IDATA_TYPE (id) != SET) 2622 if (IDATA_TYPE (id) != SET)
2575 { 2626 {
2576 IDATA_TYPE (id) = USE; 2627 IDATA_TYPE (id) = USE;
2577 return; 2628 return;
2578 } 2629 }
2579 2630
2580 for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++) 2631 FOR_EACH_INSN_DEF (def, insn)
2581 { 2632 {
2582 df_ref def = *rec;
2583
2584 if (DF_REF_INSN (def) 2633 if (DF_REF_INSN (def)
2585 && DF_REF_FLAGS_IS_SET (def, DF_REF_PRE_POST_MODIFY) 2634 && DF_REF_FLAGS_IS_SET (def, DF_REF_PRE_POST_MODIFY)
2586 && loc_mentioned_in_p (DF_REF_LOC (def), IDATA_RHS (id))) 2635 && loc_mentioned_in_p (DF_REF_LOC (def), IDATA_RHS (id)))
2587 { 2636 {
2588 must_be_use = true; 2637 must_be_use = true;
2602 2651
2603 if (must_be_use) 2652 if (must_be_use)
2604 IDATA_TYPE (id) = USE; 2653 IDATA_TYPE (id) = USE;
2605 } 2654 }
2606 2655
2656 /* Setup implicit register clobbers calculated by sched-deps for INSN
2657 before reload and save them in ID. */
2658 static void
2659 setup_id_implicit_regs (idata_t id, insn_t insn)
2660 {
2661 if (reload_completed)
2662 return;
2663
2664 HARD_REG_SET temp;
2665 unsigned regno;
2666 hard_reg_set_iterator hrsi;
2667
2668 get_implicit_reg_pending_clobbers (&temp, insn);
2669 EXECUTE_IF_SET_IN_HARD_REG_SET (temp, 0, regno, hrsi)
2670 SET_REGNO_REG_SET (IDATA_REG_SETS (id), regno);
2671 }
2672
2607 /* Setup register sets describing INSN in ID. */ 2673 /* Setup register sets describing INSN in ID. */
2608 static void 2674 static void
2609 setup_id_reg_sets (idata_t id, insn_t insn) 2675 setup_id_reg_sets (idata_t id, insn_t insn)
2610 { 2676 {
2611 unsigned uid = INSN_UID (insn); 2677 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2612 df_ref *rec; 2678 df_ref def, use;
2613 regset tmp = get_clear_regset_from_pool (); 2679 regset tmp = get_clear_regset_from_pool ();
2614 2680
2615 for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++) 2681 FOR_EACH_INSN_INFO_DEF (def, insn_info)
2616 { 2682 {
2617 df_ref def = *rec;
2618 unsigned int regno = DF_REF_REGNO (def); 2683 unsigned int regno = DF_REF_REGNO (def);
2619 2684
2620 /* Post modifies are treated like clobbers by sched-deps.c. */ 2685 /* Post modifies are treated like clobbers by sched-deps.c. */
2621 if (DF_REF_FLAGS_IS_SET (def, (DF_REF_MUST_CLOBBER 2686 if (DF_REF_FLAGS_IS_SET (def, (DF_REF_MUST_CLOBBER
2622 | DF_REF_PRE_POST_MODIFY))) 2687 | DF_REF_PRE_POST_MODIFY)))
2636 if (DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL) 2701 if (DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL)
2637 || regno == STACK_POINTER_REGNUM) 2702 || regno == STACK_POINTER_REGNUM)
2638 bitmap_set_bit (tmp, regno); 2703 bitmap_set_bit (tmp, regno);
2639 } 2704 }
2640 2705
2641 for (rec = DF_INSN_UID_USES (uid); *rec; rec++) 2706 FOR_EACH_INSN_INFO_USE (use, insn_info)
2642 { 2707 {
2643 df_ref use = *rec;
2644 unsigned int regno = DF_REF_REGNO (use); 2708 unsigned int regno = DF_REF_REGNO (use);
2645 2709
2646 /* When these refs are met for the first time, skip them, as 2710 /* When these refs are met for the first time, skip them, as
2647 these uses are just counterparts of some defs. */ 2711 these uses are just counterparts of some defs. */
2648 if (bitmap_bit_p (tmp, regno)) 2712 if (bitmap_bit_p (tmp, regno))
2658 SET_REGNO_REG_SET (IDATA_REG_USES (id), FIRST_STACK_REG); 2722 SET_REGNO_REG_SET (IDATA_REG_USES (id), FIRST_STACK_REG);
2659 #endif 2723 #endif
2660 } 2724 }
2661 } 2725 }
2662 2726
2727 /* Also get implicit reg clobbers from sched-deps. */
2728 setup_id_implicit_regs (id, insn);
2729
2663 return_regset_to_pool (tmp); 2730 return_regset_to_pool (tmp);
2664 } 2731 }
2665 2732
2666 /* Initialize instruction data for INSN in ID using DF's data. */ 2733 /* Initialize instruction data for INSN in ID using DF's data. */
2667 static void 2734 static void
2689 deps_init_id_data.id = id; 2756 deps_init_id_data.id = id;
2690 deps_init_id_data.force_unique_p = force_unique_p; 2757 deps_init_id_data.force_unique_p = force_unique_p;
2691 deps_init_id_data.force_use_p = false; 2758 deps_init_id_data.force_use_p = false;
2692 2759
2693 init_deps (dc, false); 2760 init_deps (dc, false);
2694
2695 memcpy (&deps_init_id_sched_deps_info, 2761 memcpy (&deps_init_id_sched_deps_info,
2696 &const_deps_init_id_sched_deps_info, 2762 &const_deps_init_id_sched_deps_info,
2697 sizeof (deps_init_id_sched_deps_info)); 2763 sizeof (deps_init_id_sched_deps_info));
2698
2699 if (spec_info != NULL) 2764 if (spec_info != NULL)
2700 deps_init_id_sched_deps_info.generate_spec_deps = 1; 2765 deps_init_id_sched_deps_info.generate_spec_deps = 1;
2701
2702 sched_deps_info = &deps_init_id_sched_deps_info; 2766 sched_deps_info = &deps_init_id_sched_deps_info;
2703 2767
2704 deps_analyze_insn (dc, insn); 2768 deps_analyze_insn (dc, insn);
2769 /* Implicit reg clobbers received from sched-deps separately. */
2770 setup_id_implicit_regs (id, insn);
2705 2771
2706 free_deps (dc); 2772 free_deps (dc);
2707
2708 deps_init_id_data.id = NULL; 2773 deps_init_id_data.id = NULL;
2709 } 2774 }
2710 2775
2711 2776
2777 struct sched_scan_info_def
2778 {
2779 /* This hook notifies scheduler frontend to extend its internal per basic
2780 block data structures. This hook should be called once before a series of
2781 calls to bb_init (). */
2782 void (*extend_bb) (void);
2783
2784 /* This hook makes scheduler frontend to initialize its internal data
2785 structures for the passed basic block. */
2786 void (*init_bb) (basic_block);
2787
2788 /* This hook notifies scheduler frontend to extend its internal per insn data
2789 structures. This hook should be called once before a series of calls to
2790 insn_init (). */
2791 void (*extend_insn) (void);
2792
2793 /* This hook makes scheduler frontend to initialize its internal data
2794 structures for the passed insn. */
2795 void (*init_insn) (insn_t);
2796 };
2797
2798 /* A driver function to add a set of basic blocks (BBS) to the
2799 scheduling region. */
2800 static void
2801 sched_scan (const struct sched_scan_info_def *ssi, bb_vec_t bbs)
2802 {
2803 unsigned i;
2804 basic_block bb;
2805
2806 if (ssi->extend_bb)
2807 ssi->extend_bb ();
2808
2809 if (ssi->init_bb)
2810 FOR_EACH_VEC_ELT (bbs, i, bb)
2811 ssi->init_bb (bb);
2812
2813 if (ssi->extend_insn)
2814 ssi->extend_insn ();
2815
2816 if (ssi->init_insn)
2817 FOR_EACH_VEC_ELT (bbs, i, bb)
2818 {
2819 rtx_insn *insn;
2820
2821 FOR_BB_INSNS (bb, insn)
2822 ssi->init_insn (insn);
2823 }
2824 }
2712 2825
2713 /* Implement hooks for collecting fundamental insn properties like if insn is 2826 /* Implement hooks for collecting fundamental insn properties like if insn is
2714 an ASM or is within a SCHED_GROUP. */ 2827 an ASM or is within a SCHED_GROUP. */
2715 2828
2716 /* True when a "one-time init" data for INSN was already inited. */ 2829 /* True when a "one-time init" data for INSN was already inited. */
2729 2842
2730 /* Compare the entries in a transformed_insns hashtable. */ 2843 /* Compare the entries in a transformed_insns hashtable. */
2731 static int 2844 static int
2732 eq_transformed_insns (const void *p, const void *q) 2845 eq_transformed_insns (const void *p, const void *q)
2733 { 2846 {
2734 rtx i1 = VINSN_INSN_RTX (((const struct transformed_insns *) p)->vinsn_old); 2847 rtx_insn *i1 =
2735 rtx i2 = VINSN_INSN_RTX (((const struct transformed_insns *) q)->vinsn_old); 2848 VINSN_INSN_RTX (((const struct transformed_insns *) p)->vinsn_old);
2849 rtx_insn *i2 =
2850 VINSN_INSN_RTX (((const struct transformed_insns *) q)->vinsn_old);
2736 2851
2737 if (INSN_UID (i1) == INSN_UID (i2)) 2852 if (INSN_UID (i1) == INSN_UID (i2))
2738 return 1; 2853 return 1;
2739 return rtx_equal_p (PATTERN (i1), PATTERN (i2)); 2854 return rtx_equal_p (PATTERN (i1), PATTERN (i2));
2740 } 2855 }
2838 if (LABEL_P (insn)) 2953 if (LABEL_P (insn))
2839 return; 2954 return;
2840 2955
2841 if (NOTE_INSN_BASIC_BLOCK_P (insn)) 2956 if (NOTE_INSN_BASIC_BLOCK_P (insn))
2842 { 2957 {
2843 init_global_data.prev_insn = NULL_RTX; 2958 init_global_data.prev_insn = NULL;
2844 return; 2959 return;
2845 } 2960 }
2846 2961
2847 gcc_assert (INSN_P (insn)); 2962 gcc_assert (INSN_P (insn));
2848 2963
2855 INSN_SCHED_NEXT (prev_insn) = insn; 2970 INSN_SCHED_NEXT (prev_insn) = insn;
2856 2971
2857 init_global_data.prev_insn = insn; 2972 init_global_data.prev_insn = insn;
2858 } 2973 }
2859 else 2974 else
2860 init_global_data.prev_insn = NULL_RTX; 2975 init_global_data.prev_insn = NULL;
2861 2976
2862 if (GET_CODE (PATTERN (insn)) == ASM_INPUT 2977 if (GET_CODE (PATTERN (insn)) == ASM_INPUT
2863 || asm_noperands (PATTERN (insn)) >= 0) 2978 || asm_noperands (PATTERN (insn)) >= 0)
2864 /* Mark INSN as an asm. */ 2979 /* Mark INSN as an asm. */
2865 INSN_ASM_P (insn) = true; 2980 INSN_ASM_P (insn) = true;
2891 } 3006 }
2892 else 3007 else
2893 if (CANT_MOVE (insn) 3008 if (CANT_MOVE (insn)
2894 || INSN_ASM_P (insn) 3009 || INSN_ASM_P (insn)
2895 || SCHED_GROUP_P (insn) 3010 || SCHED_GROUP_P (insn)
3011 || CALL_P (insn)
2896 /* Exception handling insns are always unique. */ 3012 /* Exception handling insns are always unique. */
2897 || (cfun->can_throw_non_call_exceptions && can_throw_internal (insn)) 3013 || (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
2898 /* TRAP_IF though have an INSN code is control_flow_insn_p (). */ 3014 /* TRAP_IF though have an INSN code is control_flow_insn_p (). */
2899 || control_flow_insn_p (insn)) 3015 || control_flow_insn_p (insn)
3016 || volatile_insn_p (PATTERN (insn))
3017 || (targetm.cannot_copy_insn_p
3018 && targetm.cannot_copy_insn_p (insn)))
2900 force_unique_p = true; 3019 force_unique_p = true;
2901 else 3020 else
2902 force_unique_p = false; 3021 force_unique_p = false;
2903 3022
2904 if (targetm.sched.get_insn_spec_ds) 3023 if (targetm.sched.get_insn_spec_ds)
2910 spec_done_ds = 0; 3029 spec_done_ds = 0;
2911 3030
2912 /* Initialize INSN's expr. */ 3031 /* Initialize INSN's expr. */
2913 init_expr (INSN_EXPR (insn), vinsn_create (insn, force_unique_p), 0, 3032 init_expr (INSN_EXPR (insn), vinsn_create (insn, force_unique_p), 0,
2914 REG_BR_PROB_BASE, INSN_PRIORITY (insn), 0, BLOCK_NUM (insn), 3033 REG_BR_PROB_BASE, INSN_PRIORITY (insn), 0, BLOCK_NUM (insn),
2915 spec_done_ds, 0, 0, NULL, true, false, false, false, 3034 spec_done_ds, 0, 0, vNULL, true,
2916 CANT_MOVE (insn)); 3035 false, false, false, CANT_MOVE (insn));
2917 } 3036 }
2918 3037
2919 init_first_time_insn_data (insn); 3038 init_first_time_insn_data (insn);
2920 } 3039 }
2921 3040
2930 init_global_and_expr_for_bb, /* init_bb */ 3049 init_global_and_expr_for_bb, /* init_bb */
2931 extend_insn_data, /* extend_insn */ 3050 extend_insn_data, /* extend_insn */
2932 init_global_and_expr_for_insn /* init_insn */ 3051 init_global_and_expr_for_insn /* init_insn */
2933 }; 3052 };
2934 3053
2935 sched_scan (&ssi, bbs, NULL, NULL, NULL); 3054 sched_scan (&ssi, bbs);
2936 } 3055 }
2937 3056
2938 /* Finalize region-scope data structures for basic blocks. */ 3057 /* Finalize region-scope data structures for basic blocks. */
2939 static void 3058 static void
2940 finish_global_and_expr_for_bb (basic_block bb) 3059 finish_global_and_expr_for_bb (basic_block bb)
2972 { 3091 {
2973 { 3092 {
2974 bb_vec_t bbs; 3093 bb_vec_t bbs;
2975 int i; 3094 int i;
2976 3095
2977 bbs = VEC_alloc (basic_block, heap, current_nr_blocks); 3096 bbs.create (current_nr_blocks);
2978 3097
2979 for (i = 0; i < current_nr_blocks; i++) 3098 for (i = 0; i < current_nr_blocks; i++)
2980 VEC_quick_push (basic_block, bbs, BASIC_BLOCK (BB_TO_BLOCK (i))); 3099 bbs.quick_push (BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i)));
2981 3100
2982 /* Clear AV_SETs and INSN_EXPRs. */ 3101 /* Clear AV_SETs and INSN_EXPRs. */
2983 { 3102 {
2984 const struct sched_scan_info_def ssi = 3103 const struct sched_scan_info_def ssi =
2985 { 3104 {
2987 finish_global_and_expr_for_bb, /* init_bb */ 3106 finish_global_and_expr_for_bb, /* init_bb */
2988 NULL, /* extend_insn */ 3107 NULL, /* extend_insn */
2989 finish_global_and_expr_insn /* init_insn */ 3108 finish_global_and_expr_insn /* init_insn */
2990 }; 3109 };
2991 3110
2992 sched_scan (&ssi, bbs, NULL, NULL, NULL); 3111 sched_scan (&ssi, bbs);
2993 } 3112 }
2994 3113
2995 VEC_free (basic_block, heap, bbs); 3114 bbs.release ();
2996 } 3115 }
2997 3116
2998 finish_insns (); 3117 finish_insns ();
2999 } 3118 }
3000 3119
3092 3211
3093 if (reg_last->sets != NULL 3212 if (reg_last->sets != NULL
3094 || reg_last->clobbers != NULL) 3213 || reg_last->clobbers != NULL)
3095 *dsp = (*dsp & ~SPECULATIVE) | DEP_OUTPUT; 3214 *dsp = (*dsp & ~SPECULATIVE) | DEP_OUTPUT;
3096 3215
3097 if (reg_last->uses) 3216 if (reg_last->uses || reg_last->implicit_sets)
3098 *dsp = (*dsp & ~SPECULATIVE) | DEP_ANTI; 3217 *dsp = (*dsp & ~SPECULATIVE) | DEP_ANTI;
3099 } 3218 }
3100 } 3219 }
3101 3220
3102 /* Note a clobber of REGNO. */ 3221 /* Note a clobber of REGNO. */
3112 ds_t *dsp = &has_dependence_data.has_dep_p[has_dependence_data.where]; 3231 ds_t *dsp = &has_dependence_data.has_dep_p[has_dependence_data.where];
3113 3232
3114 if (reg_last->sets) 3233 if (reg_last->sets)
3115 *dsp = (*dsp & ~SPECULATIVE) | DEP_OUTPUT; 3234 *dsp = (*dsp & ~SPECULATIVE) | DEP_OUTPUT;
3116 3235
3117 if (reg_last->uses) 3236 if (reg_last->uses || reg_last->implicit_sets)
3118 *dsp = (*dsp & ~SPECULATIVE) | DEP_ANTI; 3237 *dsp = (*dsp & ~SPECULATIVE) | DEP_ANTI;
3119 } 3238 }
3120 } 3239 }
3121 3240
3122 /* Note a use of REGNO. */ 3241 /* Note a use of REGNO. */
3132 ds_t *dsp = &has_dependence_data.has_dep_p[has_dependence_data.where]; 3251 ds_t *dsp = &has_dependence_data.has_dep_p[has_dependence_data.where];
3133 3252
3134 if (reg_last->sets) 3253 if (reg_last->sets)
3135 *dsp = (*dsp & ~SPECULATIVE) | DEP_TRUE; 3254 *dsp = (*dsp & ~SPECULATIVE) | DEP_TRUE;
3136 3255
3137 if (reg_last->clobbers) 3256 if (reg_last->clobbers || reg_last->implicit_sets)
3138 *dsp = (*dsp & ~SPECULATIVE) | DEP_ANTI; 3257 *dsp = (*dsp & ~SPECULATIVE) | DEP_ANTI;
3139 3258
3140 /* Handle BE_IN_SPEC. */ 3259 /* Merge BE_IN_SPEC bits into *DSP when the dependency producer
3260 is actually a check insn. We need to do this for any register
3261 read-read dependency with the check unless we track properly
3262 all registers written by BE_IN_SPEC-speculated insns, as
3263 we don't have explicit dependence lists. See PR 53975. */
3141 if (reg_last->uses) 3264 if (reg_last->uses)
3142 { 3265 {
3143 ds_t pro_spec_checked_ds; 3266 ds_t pro_spec_checked_ds;
3144 3267
3145 pro_spec_checked_ds = INSN_SPEC_CHECKED_DS (has_dependence_data.pro); 3268 pro_spec_checked_ds = INSN_SPEC_CHECKED_DS (has_dependence_data.pro);
3146 pro_spec_checked_ds = ds_get_max_dep_weak (pro_spec_checked_ds); 3269 pro_spec_checked_ds = ds_get_max_dep_weak (pro_spec_checked_ds);
3147 3270
3148 if (pro_spec_checked_ds != 0) 3271 if (pro_spec_checked_ds != 0)
3149 /* Merge BE_IN_SPEC bits into *DSP. */
3150 *dsp = ds_full_merge (*dsp, pro_spec_checked_ds, 3272 *dsp = ds_full_merge (*dsp, pro_spec_checked_ds,
3151 NULL_RTX, NULL_RTX); 3273 NULL_RTX, NULL_RTX);
3152 } 3274 }
3153 } 3275 }
3154 } 3276 }
3467 at the data structures of the selective scheduler, not by examining 3589 at the data structures of the selective scheduler, not by examining
3468 the pattern. */ 3590 the pattern. */
3469 bool 3591 bool
3470 sel_insn_is_speculation_check (rtx insn) 3592 sel_insn_is_speculation_check (rtx insn)
3471 { 3593 {
3472 return s_i_d && !! INSN_SPEC_CHECKED_DS (insn); 3594 return s_i_d.exists () && !! INSN_SPEC_CHECKED_DS (insn);
3473 } 3595 }
3474 3596
3475 /* Extracts machine mode MODE and destination location DST_LOC 3597 /* Extracts machine mode MODE and destination location DST_LOC
3476 for given INSN. */ 3598 for given INSN. */
3477 void 3599 void
3478 get_dest_and_mode (rtx insn, rtx *dst_loc, enum machine_mode *mode) 3600 get_dest_and_mode (rtx insn, rtx *dst_loc, machine_mode *mode)
3479 { 3601 {
3480 rtx pat = PATTERN (insn); 3602 rtx pat = PATTERN (insn);
3481 3603
3482 gcc_assert (dst_loc); 3604 gcc_assert (dst_loc);
3483 gcc_assert (GET_CODE (pat) == SET); 3605 gcc_assert (GET_CODE (pat) == SET);
3511 insn_is_the_only_one_in_bb_p (insn_t insn) 3633 insn_is_the_only_one_in_bb_p (insn_t insn)
3512 { 3634 {
3513 return sel_bb_head_p (insn) && sel_bb_end_p (insn); 3635 return sel_bb_head_p (insn) && sel_bb_end_p (insn);
3514 } 3636 }
3515 3637
3516 #ifdef ENABLE_CHECKING
3517 /* Check that the region we're scheduling still has at most one 3638 /* Check that the region we're scheduling still has at most one
3518 backedge. */ 3639 backedge. */
3519 static void 3640 static void
3520 verify_backedges (void) 3641 verify_backedges (void)
3521 { 3642 {
3524 int i, n = 0; 3645 int i, n = 0;
3525 edge e; 3646 edge e;
3526 edge_iterator ei; 3647 edge_iterator ei;
3527 3648
3528 for (i = 0; i < current_nr_blocks; i++) 3649 for (i = 0; i < current_nr_blocks; i++)
3529 FOR_EACH_EDGE (e, ei, BASIC_BLOCK (BB_TO_BLOCK (i))->succs) 3650 FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i))->succs)
3530 if (in_current_region_p (e->dest) 3651 if (in_current_region_p (e->dest)
3531 && BLOCK_TO_BB (e->dest->index) < i) 3652 && BLOCK_TO_BB (e->dest->index) < i)
3532 n++; 3653 n++;
3533 3654
3534 gcc_assert (n <= 1); 3655 gcc_assert (n <= 1);
3535 } 3656 }
3536 } 3657 }
3537 #endif
3538 3658
3539 3659
3540 /* Functions to work with control flow. */ 3660 /* Functions to work with control flow. */
3541 3661
3542 /* Recompute BLOCK_TO_BB and BB_FOR_BLOCK for current region so that blocks 3662 /* Recompute BLOCK_TO_BB and BB_FOR_BLOCK for current region so that blocks
3546 sel_recompute_toporder (void) 3666 sel_recompute_toporder (void)
3547 { 3667 {
3548 int i, n, rgn; 3668 int i, n, rgn;
3549 int *postorder, n_blocks; 3669 int *postorder, n_blocks;
3550 3670
3551 postorder = XALLOCAVEC (int, n_basic_blocks); 3671 postorder = XALLOCAVEC (int, n_basic_blocks_for_fn (cfun));
3552 n_blocks = post_order_compute (postorder, false, false); 3672 n_blocks = post_order_compute (postorder, false, false);
3553 3673
3554 rgn = CONTAINING_RGN (BB_TO_BLOCK (0)); 3674 rgn = CONTAINING_RGN (BB_TO_BLOCK (0));
3555 for (n = 0, i = n_blocks - 1; i >= 0; i--) 3675 for (n = 0, i = n_blocks - 1; i >= 0; i--)
3556 if (CONTAINING_RGN (postorder[i]) == rgn) 3676 if (CONTAINING_RGN (postorder[i]) == rgn)
3568 3688
3569 /* Tidy the possibly empty block BB. */ 3689 /* Tidy the possibly empty block BB. */
3570 static bool 3690 static bool
3571 maybe_tidy_empty_bb (basic_block bb) 3691 maybe_tidy_empty_bb (basic_block bb)
3572 { 3692 {
3573 basic_block succ_bb, pred_bb; 3693 basic_block succ_bb, pred_bb, note_bb;
3574 VEC (basic_block, heap) *dom_bbs; 3694 vec<basic_block> dom_bbs;
3575 edge e; 3695 edge e;
3576 edge_iterator ei; 3696 edge_iterator ei;
3577 bool rescan_p; 3697 bool rescan_p;
3578 3698
3579 /* Keep empty bb only if this block immediately precedes EXIT and 3699 /* Keep empty bb only if this block immediately precedes EXIT and
3580 has incoming non-fallthrough edge, or it has no predecessors or 3700 has incoming non-fallthrough edge, or it has no predecessors or
3581 successors. Otherwise remove it. */ 3701 successors. Otherwise remove it. */
3582 if (!sel_bb_empty_p (bb) 3702 if (!sel_bb_empty_p (bb)
3583 || (single_succ_p (bb) 3703 || (single_succ_p (bb)
3584 && single_succ (bb) == EXIT_BLOCK_PTR 3704 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
3585 && (!single_pred_p (bb) 3705 && (!single_pred_p (bb)
3586 || !(single_pred_edge (bb)->flags & EDGE_FALLTHRU))) 3706 || !(single_pred_edge (bb)->flags & EDGE_FALLTHRU)))
3587 || EDGE_COUNT (bb->preds) == 0 3707 || EDGE_COUNT (bb->preds) == 0
3588 || EDGE_COUNT (bb->succs) == 0) 3708 || EDGE_COUNT (bb->succs) == 0)
3589 return false; 3709 return false;
3590 3710
3591 /* Do not attempt to redirect complex edges. */ 3711 /* Do not attempt to redirect complex edges. */
3592 FOR_EACH_EDGE (e, ei, bb->preds) 3712 FOR_EACH_EDGE (e, ei, bb->preds)
3593 if (e->flags & EDGE_COMPLEX) 3713 if (e->flags & EDGE_COMPLEX)
3594 return false; 3714 return false;
3715 else if (e->flags & EDGE_FALLTHRU)
3716 {
3717 rtx note;
3718 /* If prev bb ends with asm goto, see if any of the
3719 ASM_OPERANDS_LABELs don't point to the fallthru
3720 label. Do not attempt to redirect it in that case. */
3721 if (JUMP_P (BB_END (e->src))
3722 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
3723 {
3724 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
3725
3726 for (i = 0; i < n; ++i)
3727 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (bb))
3728 return false;
3729 }
3730 }
3595 3731
3596 free_data_sets (bb); 3732 free_data_sets (bb);
3597 3733
3598 /* Do not delete BB if it has more than one successor. 3734 /* Do not delete BB if it has more than one successor.
3599 That can occur when we moving a jump. */ 3735 That can occur when we moving a jump. */
3605 } 3741 }
3606 3742
3607 succ_bb = single_succ (bb); 3743 succ_bb = single_succ (bb);
3608 rescan_p = true; 3744 rescan_p = true;
3609 pred_bb = NULL; 3745 pred_bb = NULL;
3610 dom_bbs = NULL; 3746 dom_bbs.create (0);
3747
3748 /* Save a pred/succ from the current region to attach the notes to. */
3749 note_bb = NULL;
3750 FOR_EACH_EDGE (e, ei, bb->preds)
3751 if (in_current_region_p (e->src))
3752 {
3753 note_bb = e->src;
3754 break;
3755 }
3756 if (note_bb == NULL)
3757 note_bb = succ_bb;
3611 3758
3612 /* Redirect all non-fallthru edges to the next bb. */ 3759 /* Redirect all non-fallthru edges to the next bb. */
3613 while (rescan_p) 3760 while (rescan_p)
3614 { 3761 {
3615 rescan_p = false; 3762 rescan_p = false;
3626 We will update dominators here only when we'll get 3773 We will update dominators here only when we'll get
3627 an unreachable block when redirecting, otherwise 3774 an unreachable block when redirecting, otherwise
3628 sel_redirect_edge_and_branch will take care of it. */ 3775 sel_redirect_edge_and_branch will take care of it. */
3629 if (e->dest != bb 3776 if (e->dest != bb
3630 && single_pred_p (e->dest)) 3777 && single_pred_p (e->dest))
3631 VEC_safe_push (basic_block, heap, dom_bbs, e->dest); 3778 dom_bbs.safe_push (e->dest);
3632 sel_redirect_edge_and_branch (e, succ_bb); 3779 sel_redirect_edge_and_branch (e, succ_bb);
3633 rescan_p = true; 3780 rescan_p = true;
3634 break; 3781 break;
3635 } 3782 }
3636 /* If the edge is fallthru, but PRED_BB ends in a conditional jump 3783 /* If the edge is fallthru, but PRED_BB ends in a conditional jump
3656 if (can_merge_blocks_p (bb->prev_bb, bb)) 3803 if (can_merge_blocks_p (bb->prev_bb, bb))
3657 sel_merge_blocks (bb->prev_bb, bb); 3804 sel_merge_blocks (bb->prev_bb, bb);
3658 else 3805 else
3659 { 3806 {
3660 /* This is a block without fallthru predecessor. Just delete it. */ 3807 /* This is a block without fallthru predecessor. Just delete it. */
3661 gcc_assert (pred_bb != NULL); 3808 gcc_assert (note_bb);
3662 3809 move_bb_info (note_bb, bb);
3663 if (in_current_region_p (pred_bb))
3664 move_bb_info (pred_bb, bb);
3665 remove_empty_bb (bb, true); 3810 remove_empty_bb (bb, true);
3666 } 3811 }
3667 3812
3668 if (!VEC_empty (basic_block, dom_bbs)) 3813 if (!dom_bbs.is_empty ())
3669 { 3814 {
3670 VEC_safe_push (basic_block, heap, dom_bbs, succ_bb); 3815 dom_bbs.safe_push (succ_bb);
3671 iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, false); 3816 iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, false);
3672 VEC_free (basic_block, heap, dom_bbs); 3817 dom_bbs.release ();
3673 } 3818 }
3674 3819
3675 return true; 3820 return true;
3676 } 3821 }
3677 3822
3725 && INSN_NOP_P (last) 3870 && INSN_NOP_P (last)
3726 /* Flow goes fallthru from current block to the next. */ 3871 /* Flow goes fallthru from current block to the next. */
3727 && EDGE_COUNT (xbb->succs) == 1 3872 && EDGE_COUNT (xbb->succs) == 1
3728 && (EDGE_SUCC (xbb, 0)->flags & EDGE_FALLTHRU) 3873 && (EDGE_SUCC (xbb, 0)->flags & EDGE_FALLTHRU)
3729 /* When successor is an EXIT block, it may not be the next block. */ 3874 /* When successor is an EXIT block, it may not be the next block. */
3730 && single_succ (xbb) != EXIT_BLOCK_PTR 3875 && single_succ (xbb) != EXIT_BLOCK_PTR_FOR_FN (cfun)
3731 /* And unconditional jump in previous basic block leads to 3876 /* And unconditional jump in previous basic block leads to
3732 next basic block of XBB and this jump can be safely removed. */ 3877 next basic block of XBB and this jump can be safely removed. */
3733 && in_current_region_p (xbb->prev_bb) 3878 && in_current_region_p (xbb->prev_bb)
3734 && bb_has_removable_jump_to_p (xbb->prev_bb, xbb->next_bb) 3879 && bb_has_removable_jump_to_p (xbb->prev_bb, xbb->next_bb)
3735 && INSN_SCHED_TIMES (BB_END (xbb->prev_bb)) == 0 3880 && INSN_SCHED_TIMES (BB_END (xbb->prev_bb)) == 0
3752 changed = maybe_tidy_empty_bb (xbb->prev_bb); 3897 changed = maybe_tidy_empty_bb (xbb->prev_bb);
3753 if (recompute_toporder_p) 3898 if (recompute_toporder_p)
3754 sel_recompute_toporder (); 3899 sel_recompute_toporder ();
3755 } 3900 }
3756 3901
3757 #ifdef ENABLE_CHECKING 3902 /* TODO: use separate flag for CFG checking. */
3758 verify_backedges (); 3903 if (flag_checking)
3759 verify_dominators (CDI_DOMINATORS); 3904 {
3760 #endif 3905 verify_backedges ();
3906 verify_dominators (CDI_DOMINATORS);
3907 }
3761 3908
3762 return changed; 3909 return changed;
3763 } 3910 }
3764 3911
3765 /* Purge meaningless empty blocks in the middle of a region. */ 3912 /* Purge meaningless empty blocks in the middle of a region. */
3769 int i; 3916 int i;
3770 3917
3771 /* Do not attempt to delete the first basic block in the region. */ 3918 /* Do not attempt to delete the first basic block in the region. */
3772 for (i = 1; i < current_nr_blocks; ) 3919 for (i = 1; i < current_nr_blocks; )
3773 { 3920 {
3774 basic_block b = BASIC_BLOCK (BB_TO_BLOCK (i)); 3921 basic_block b = BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i));
3775 3922
3776 if (maybe_tidy_empty_bb (b)) 3923 if (maybe_tidy_empty_bb (b))
3777 continue; 3924 continue;
3778 3925
3779 i++; 3926 i++;
3804 break; 3951 break;
3805 } 3952 }
3806 } 3953 }
3807 3954
3808 if (only_disconnect) 3955 if (only_disconnect)
3809 { 3956 remove_insn (insn);
3810 insn_t prev = PREV_INSN (insn);
3811 insn_t next = NEXT_INSN (insn);
3812 basic_block bb = BLOCK_FOR_INSN (insn);
3813
3814 NEXT_INSN (prev) = next;
3815 PREV_INSN (next) = prev;
3816
3817 if (BB_HEAD (bb) == insn)
3818 {
3819 gcc_assert (BLOCK_FOR_INSN (prev) == bb);
3820 BB_HEAD (bb) = prev;
3821 }
3822 if (BB_END (bb) == insn)
3823 BB_END (bb) = prev;
3824 }
3825 else 3957 else
3826 { 3958 {
3827 remove_insn (insn); 3959 delete_insn (insn);
3828 clear_expr (INSN_EXPR (insn)); 3960 clear_expr (INSN_EXPR (insn));
3829 } 3961 }
3830 3962
3831 /* It is necessary to null this fields before calling add_insn (). */ 3963 /* It is necessary to NULL these fields in case we are going to re-insert
3832 PREV_INSN (insn) = NULL_RTX; 3964 INSN into the insns stream, as will usually happen in the ONLY_DISCONNECT
3833 NEXT_INSN (insn) = NULL_RTX; 3965 case, but also for NOPs that we will return to the nop pool. */
3966 SET_PREV_INSN (insn) = NULL_RTX;
3967 SET_NEXT_INSN (insn) = NULL_RTX;
3968 set_block_for_insn (insn, NULL);
3834 3969
3835 return tidy_control_flow (bb, full_tidying); 3970 return tidy_control_flow (bb, full_tidying);
3836 } 3971 }
3837 3972
3838 /* Estimate number of the insns in BB. */ 3973 /* Estimate number of the insns in BB. */
3856 gcc_assert (NOTE_P (x) || LABEL_P (x)); 3991 gcc_assert (NOTE_P (x) || LABEL_P (x));
3857 3992
3858 return -1; 3993 return -1;
3859 } 3994 }
3860 3995
3861 /* Return seqno of the only predecessor of INSN. */ 3996 /* Find the proper seqno for inserting at INSN by successors.
3997 Return -1 if no successors with positive seqno exist. */
3862 static int 3998 static int
3863 get_seqno_of_a_pred (insn_t insn) 3999 get_seqno_by_succs (rtx_insn *insn)
4000 {
4001 basic_block bb = BLOCK_FOR_INSN (insn);
4002 rtx_insn *tmp = insn, *end = BB_END (bb);
4003 int seqno;
4004 insn_t succ = NULL;
4005 succ_iterator si;
4006
4007 while (tmp != end)
4008 {
4009 tmp = NEXT_INSN (tmp);
4010 if (INSN_P (tmp))
4011 return INSN_SEQNO (tmp);
4012 }
4013
4014 seqno = INT_MAX;
4015
4016 FOR_EACH_SUCC_1 (succ, si, end, SUCCS_NORMAL)
4017 if (INSN_SEQNO (succ) > 0)
4018 seqno = MIN (seqno, INSN_SEQNO (succ));
4019
4020 if (seqno == INT_MAX)
4021 return -1;
4022
4023 return seqno;
4024 }
4025
4026 /* Compute seqno for INSN by its preds or succs. Use OLD_SEQNO to compute
4027 seqno in corner cases. */
4028 static int
4029 get_seqno_for_a_jump (insn_t insn, int old_seqno)
3864 { 4030 {
3865 int seqno; 4031 int seqno;
3866 4032
3867 gcc_assert (INSN_SIMPLEJUMP_P (insn)); 4033 gcc_assert (INSN_SIMPLEJUMP_P (insn));
3868 4034
3898 { 4064 {
3899 insn_t *preds; 4065 insn_t *preds;
3900 int n; 4066 int n;
3901 4067
3902 cfg_preds (BLOCK_FOR_INSN (insn), &preds, &n); 4068 cfg_preds (BLOCK_FOR_INSN (insn), &preds, &n);
3903 gcc_assert (n == 1); 4069
3904 4070 gcc_assert (n > 0);
3905 seqno = INSN_SEQNO (preds[0]); 4071 /* For one predecessor, use simple method. */
4072 if (n == 1)
4073 seqno = INSN_SEQNO (preds[0]);
4074 else
4075 seqno = get_seqno_by_preds (insn);
3906 4076
3907 free (preds); 4077 free (preds);
3908 } 4078 }
3909 } 4079 }
3910 4080
4081 /* We were unable to find a good seqno among preds. */
4082 if (seqno < 0)
4083 seqno = get_seqno_by_succs (insn);
4084
4085 if (seqno < 0)
4086 {
4087 /* The only case where this could be here legally is that the only
4088 unscheduled insn was a conditional jump that got removed and turned
4089 into this unconditional one. Initialize from the old seqno
4090 of that jump passed down to here. */
4091 seqno = old_seqno;
4092 }
4093
4094 gcc_assert (seqno >= 0);
3911 return seqno; 4095 return seqno;
3912 } 4096 }
3913 4097
3914 /* Find the proper seqno for inserting at INSN. Returns -1 if no predecessors 4098 /* Find the proper seqno for inserting at INSN. Returns -1 if no predecessors
3915 with positive seqno exist. */ 4099 with positive seqno exist. */
3916 int 4100 int
3917 get_seqno_by_preds (rtx insn) 4101 get_seqno_by_preds (rtx_insn *insn)
3918 { 4102 {
3919 basic_block bb = BLOCK_FOR_INSN (insn); 4103 basic_block bb = BLOCK_FOR_INSN (insn);
3920 rtx tmp = insn, head = BB_HEAD (bb); 4104 rtx_insn *tmp = insn, *head = BB_HEAD (bb);
3921 insn_t *preds; 4105 insn_t *preds;
3922 int n, i, seqno; 4106 int n, i, seqno;
3923 4107
3924 while (tmp != head) 4108 /* Loop backwards from INSN to HEAD including both. */
3925 if (INSN_P (tmp)) 4109 while (1)
3926 return INSN_SEQNO (tmp); 4110 {
3927 else 4111 if (INSN_P (tmp))
4112 return INSN_SEQNO (tmp);
4113 if (tmp == head)
4114 break;
3928 tmp = PREV_INSN (tmp); 4115 tmp = PREV_INSN (tmp);
4116 }
3929 4117
3930 cfg_preds (bb, &preds, &n); 4118 cfg_preds (bb, &preds, &n);
3931 for (i = 0, seqno = -1; i < n; i++) 4119 for (i = 0, seqno = -1; i < n; i++)
3932 seqno = MAX (seqno, INSN_SEQNO (preds[i])); 4120 seqno = MAX (seqno, INSN_SEQNO (preds[i]));
3933 4121
3938 4126
3939 /* Extend pass-scope data structures for basic blocks. */ 4127 /* Extend pass-scope data structures for basic blocks. */
3940 void 4128 void
3941 sel_extend_global_bb_info (void) 4129 sel_extend_global_bb_info (void)
3942 { 4130 {
3943 VEC_safe_grow_cleared (sel_global_bb_info_def, heap, sel_global_bb_info, 4131 sel_global_bb_info.safe_grow_cleared (last_basic_block_for_fn (cfun));
3944 last_basic_block);
3945 } 4132 }
3946 4133
3947 /* Extend region-scope data structures for basic blocks. */ 4134 /* Extend region-scope data structures for basic blocks. */
3948 static void 4135 static void
3949 extend_region_bb_info (void) 4136 extend_region_bb_info (void)
3950 { 4137 {
3951 VEC_safe_grow_cleared (sel_region_bb_info_def, heap, sel_region_bb_info, 4138 sel_region_bb_info.safe_grow_cleared (last_basic_block_for_fn (cfun));
3952 last_basic_block);
3953 } 4139 }
3954 4140
3955 /* Extend all data structures to fit for all basic blocks. */ 4141 /* Extend all data structures to fit for all basic blocks. */
3956 static void 4142 static void
3957 extend_bb_info (void) 4143 extend_bb_info (void)
3962 4148
3963 /* Finalize pass-scope data structures for basic blocks. */ 4149 /* Finalize pass-scope data structures for basic blocks. */
3964 void 4150 void
3965 sel_finish_global_bb_info (void) 4151 sel_finish_global_bb_info (void)
3966 { 4152 {
3967 VEC_free (sel_global_bb_info_def, heap, sel_global_bb_info); 4153 sel_global_bb_info.release ();
3968 } 4154 }
3969 4155
3970 /* Finalize region-scope data structures for basic blocks. */ 4156 /* Finalize region-scope data structures for basic blocks. */
3971 static void 4157 static void
3972 finish_region_bb_info (void) 4158 finish_region_bb_info (void)
3973 { 4159 {
3974 VEC_free (sel_region_bb_info_def, heap, sel_region_bb_info); 4160 sel_region_bb_info.release ();
3975 } 4161 }
3976 4162
3977 4163
3978 /* Data for each insn in current region. */ 4164 /* Data for each insn in current region. */
3979 VEC (sel_insn_data_def, heap) *s_i_d = NULL; 4165 vec<sel_insn_data_def> s_i_d;
3980
3981 /* A vector for the insns we've emitted. */
3982 static insn_vec_t new_insns = NULL;
3983 4166
3984 /* Extend data structures for insns from current region. */ 4167 /* Extend data structures for insns from current region. */
3985 static void 4168 static void
3986 extend_insn_data (void) 4169 extend_insn_data (void)
3987 { 4170 {
3989 4172
3990 sched_extend_target (); 4173 sched_extend_target ();
3991 sched_deps_init (false); 4174 sched_deps_init (false);
3992 4175
3993 /* Extend data structures for insns from current region. */ 4176 /* Extend data structures for insns from current region. */
3994 reserve = (sched_max_luid + 1 4177 reserve = (sched_max_luid + 1 - s_i_d.length ());
3995 - VEC_length (sel_insn_data_def, s_i_d)); 4178 if (reserve > 0 && ! s_i_d.space (reserve))
3996 if (reserve > 0
3997 && ! VEC_space (sel_insn_data_def, s_i_d, reserve))
3998 { 4179 {
3999 int size; 4180 int size;
4000 4181
4001 if (sched_max_luid / 2 > 1024) 4182 if (sched_max_luid / 2 > 1024)
4002 size = sched_max_luid + 1024; 4183 size = sched_max_luid + 1024;
4003 else 4184 else
4004 size = 3 * sched_max_luid / 2; 4185 size = 3 * sched_max_luid / 2;
4005 4186
4006 4187
4007 VEC_safe_grow_cleared (sel_insn_data_def, heap, s_i_d, size); 4188 s_i_d.safe_grow_cleared (size);
4008 } 4189 }
4009 } 4190 }
4010 4191
4011 /* Finalize data structures for insns from current region. */ 4192 /* Finalize data structures for insns from current region. */
4012 static void 4193 static void
4014 { 4195 {
4015 unsigned i; 4196 unsigned i;
4016 4197
4017 /* Clear here all dependence contexts that may have left from insns that were 4198 /* Clear here all dependence contexts that may have left from insns that were
4018 removed during the scheduling. */ 4199 removed during the scheduling. */
4019 for (i = 0; i < VEC_length (sel_insn_data_def, s_i_d); i++) 4200 for (i = 0; i < s_i_d.length (); i++)
4020 { 4201 {
4021 sel_insn_data_def *sid_entry = VEC_index (sel_insn_data_def, s_i_d, i); 4202 sel_insn_data_def *sid_entry = &s_i_d[i];
4022 4203
4023 if (sid_entry->live) 4204 if (sid_entry->live)
4024 return_regset_to_pool (sid_entry->live); 4205 return_regset_to_pool (sid_entry->live);
4025 if (sid_entry->analyzed_deps) 4206 if (sid_entry->analyzed_deps)
4026 { 4207 {
4037 to be passed to the next region. */ 4218 to be passed to the next region. */
4038 CANT_MOVE_BY_LUID (i) = 0; 4219 CANT_MOVE_BY_LUID (i) = 0;
4039 } 4220 }
4040 } 4221 }
4041 4222
4042 VEC_free (sel_insn_data_def, heap, s_i_d); 4223 s_i_d.release ();
4043 } 4224 }
4044 4225
4045 /* A proxy to pass initialization data to init_insn (). */ 4226 /* A proxy to pass initialization data to init_insn (). */
4046 static sel_insn_data_def _insn_init_ssid; 4227 static sel_insn_data_def _insn_init_ssid;
4047 static sel_insn_data_t insn_init_ssid = &_insn_init_ssid; 4228 static sel_insn_data_t insn_init_ssid = &_insn_init_ssid;
4091 if (first_time_insn_init (insn)) 4272 if (first_time_insn_init (insn))
4092 init_first_time_insn_data (insn); 4273 init_first_time_insn_data (insn);
4093 } 4274 }
4094 4275
4095 /* This is used to initialize spurious jumps generated by 4276 /* This is used to initialize spurious jumps generated by
4096 sel_redirect_edge (). */ 4277 sel_redirect_edge (). OLD_SEQNO is used for initializing seqnos
4097 static void 4278 in corner cases within get_seqno_for_a_jump. */
4098 init_simplejump_data (insn_t insn) 4279 static void
4280 init_simplejump_data (insn_t insn, int old_seqno)
4099 { 4281 {
4100 init_expr (INSN_EXPR (insn), vinsn_create (insn, false), 0, 4282 init_expr (INSN_EXPR (insn), vinsn_create (insn, false), 0,
4101 REG_BR_PROB_BASE, 0, 0, 0, 0, 0, 0, NULL, true, false, false, 4283 REG_BR_PROB_BASE, 0, 0, 0, 0, 0, 0,
4284 vNULL, true, false, false,
4102 false, true); 4285 false, true);
4103 INSN_SEQNO (insn) = get_seqno_of_a_pred (insn); 4286 INSN_SEQNO (insn) = get_seqno_for_a_jump (insn, old_seqno);
4104 init_first_time_insn_data (insn); 4287 init_first_time_insn_data (insn);
4105 } 4288 }
4106 4289
4107 /* Perform deferred initialization of insns. This is used to process 4290 /* Perform deferred initialization of insns. This is used to process
4108 a new jump that may be created by redirect_edge. */ 4291 a new jump that may be created by redirect_edge. OLD_SEQNO is used
4109 void 4292 for initializing simplejumps in init_simplejump_data. */
4110 sel_init_new_insn (insn_t insn, int flags) 4293 static void
4294 sel_init_new_insn (insn_t insn, int flags, int old_seqno)
4111 { 4295 {
4112 /* We create data structures for bb when the first insn is emitted in it. */ 4296 /* We create data structures for bb when the first insn is emitted in it. */
4113 if (INSN_P (insn) 4297 if (INSN_P (insn)
4114 && INSN_IN_STREAM_P (insn) 4298 && INSN_IN_STREAM_P (insn)
4115 && insn_is_the_only_one_in_bb_p (insn)) 4299 && insn_is_the_only_one_in_bb_p (insn))
4117 extend_bb_info (); 4301 extend_bb_info ();
4118 create_initial_data_sets (BLOCK_FOR_INSN (insn)); 4302 create_initial_data_sets (BLOCK_FOR_INSN (insn));
4119 } 4303 }
4120 4304
4121 if (flags & INSN_INIT_TODO_LUID) 4305 if (flags & INSN_INIT_TODO_LUID)
4122 sched_init_luids (NULL, NULL, NULL, insn); 4306 {
4307 sched_extend_luids ();
4308 sched_init_insn_luid (insn);
4309 }
4123 4310
4124 if (flags & INSN_INIT_TODO_SSID) 4311 if (flags & INSN_INIT_TODO_SSID)
4125 { 4312 {
4126 extend_insn_data (); 4313 extend_insn_data ();
4127 init_insn_data (insn); 4314 init_insn_data (insn);
4129 } 4316 }
4130 4317
4131 if (flags & INSN_INIT_TODO_SIMPLEJUMP) 4318 if (flags & INSN_INIT_TODO_SIMPLEJUMP)
4132 { 4319 {
4133 extend_insn_data (); 4320 extend_insn_data ();
4134 init_simplejump_data (insn); 4321 init_simplejump_data (insn, old_seqno);
4135 } 4322 }
4136 4323
4137 gcc_assert (CONTAINING_RGN (BLOCK_NUM (insn)) 4324 gcc_assert (CONTAINING_RGN (BLOCK_NUM (insn))
4138 == CONTAINING_RGN (BB_TO_BLOCK (0))); 4325 == CONTAINING_RGN (BB_TO_BLOCK (0)));
4139 } 4326 }
4167 init_lv_sets (void) 4354 init_lv_sets (void)
4168 { 4355 {
4169 basic_block bb; 4356 basic_block bb;
4170 4357
4171 /* Initialize of LV sets. */ 4358 /* Initialize of LV sets. */
4172 FOR_EACH_BB (bb) 4359 FOR_EACH_BB_FN (bb, cfun)
4173 init_lv_set (bb); 4360 init_lv_set (bb);
4174 4361
4175 /* Don't forget EXIT_BLOCK. */ 4362 /* Don't forget EXIT_BLOCK. */
4176 init_lv_set (EXIT_BLOCK_PTR); 4363 init_lv_set (EXIT_BLOCK_PTR_FOR_FN (cfun));
4177 } 4364 }
4178 4365
4179 /* Release lv set of HEAD. */ 4366 /* Release lv set of HEAD. */
4180 static void 4367 static void
4181 free_lv_set (basic_block bb) 4368 free_lv_set (basic_block bb)
4192 free_lv_sets (void) 4379 free_lv_sets (void)
4193 { 4380 {
4194 basic_block bb; 4381 basic_block bb;
4195 4382
4196 /* Don't forget EXIT_BLOCK. */ 4383 /* Don't forget EXIT_BLOCK. */
4197 free_lv_set (EXIT_BLOCK_PTR); 4384 free_lv_set (EXIT_BLOCK_PTR_FOR_FN (cfun));
4198 4385
4199 /* Free LV sets. */ 4386 /* Free LV sets. */
4200 FOR_EACH_BB (bb) 4387 FOR_EACH_BB_FN (bb, cfun)
4201 if (BB_LV_SET (bb)) 4388 if (BB_LV_SET (bb))
4202 free_lv_set (bb); 4389 free_lv_set (bb);
4203 } 4390 }
4204 4391
4205 /* Initialize an invalid AV_SET for BB. 4392 /* Mark AV_SET for BB as invalid, so this set will be updated the next time
4206 This set will be updated next time compute_av () process BB. */ 4393 compute_av() processes BB. This function is called when creating new basic
4394 blocks, as well as for blocks (either new or existing) where new jumps are
4395 created when the control flow is being updated. */
4207 static void 4396 static void
4208 invalidate_av_set (basic_block bb) 4397 invalidate_av_set (basic_block bb)
4209 { 4398 {
4210 gcc_assert (BB_AV_LEVEL (bb) <= 0
4211 && BB_AV_SET (bb) == NULL);
4212
4213 BB_AV_LEVEL (bb) = -1; 4399 BB_AV_LEVEL (bb) = -1;
4214 } 4400 }
4215 4401
4216 /* Create initial data sets for BB (they will be invalid). */ 4402 /* Create initial data sets for BB (they will be invalid). */
4217 static void 4403 static void
4238 { 4424 {
4239 free_lv_set (bb); 4425 free_lv_set (bb);
4240 free_av_set (bb); 4426 free_av_set (bb);
4241 } 4427 }
4242 4428
4243 /* Exchange lv sets of TO and FROM. */
4244 static void
4245 exchange_lv_sets (basic_block to, basic_block from)
4246 {
4247 {
4248 regset to_lv_set = BB_LV_SET (to);
4249
4250 BB_LV_SET (to) = BB_LV_SET (from);
4251 BB_LV_SET (from) = to_lv_set;
4252 }
4253
4254 {
4255 bool to_lv_set_valid_p = BB_LV_SET_VALID_P (to);
4256
4257 BB_LV_SET_VALID_P (to) = BB_LV_SET_VALID_P (from);
4258 BB_LV_SET_VALID_P (from) = to_lv_set_valid_p;
4259 }
4260 }
4261
4262
4263 /* Exchange av sets of TO and FROM. */
4264 static void
4265 exchange_av_sets (basic_block to, basic_block from)
4266 {
4267 {
4268 av_set_t to_av_set = BB_AV_SET (to);
4269
4270 BB_AV_SET (to) = BB_AV_SET (from);
4271 BB_AV_SET (from) = to_av_set;
4272 }
4273
4274 {
4275 int to_av_level = BB_AV_LEVEL (to);
4276
4277 BB_AV_LEVEL (to) = BB_AV_LEVEL (from);
4278 BB_AV_LEVEL (from) = to_av_level;
4279 }
4280 }
4281
4282 /* Exchange data sets of TO and FROM. */ 4429 /* Exchange data sets of TO and FROM. */
4283 void 4430 void
4284 exchange_data_sets (basic_block to, basic_block from) 4431 exchange_data_sets (basic_block to, basic_block from)
4285 { 4432 {
4286 exchange_lv_sets (to, from); 4433 /* Exchange lv sets of TO and FROM. */
4287 exchange_av_sets (to, from); 4434 std::swap (BB_LV_SET (from), BB_LV_SET (to));
4435 std::swap (BB_LV_SET_VALID_P (from), BB_LV_SET_VALID_P (to));
4436
4437 /* Exchange av sets of TO and FROM. */
4438 std::swap (BB_AV_SET (from), BB_AV_SET (to));
4439 std::swap (BB_AV_LEVEL (from), BB_AV_LEVEL (to));
4288 } 4440 }
4289 4441
4290 /* Copy data sets of FROM to TO. */ 4442 /* Copy data sets of FROM to TO. */
4291 void 4443 void
4292 copy_data_sets (basic_block to, basic_block from) 4444 copy_data_sets (basic_block to, basic_block from)
4344 4496
4345 /* Variables to work with control-flow graph. */ 4497 /* Variables to work with control-flow graph. */
4346 4498
4347 /* The basic block that already has been processed by the sched_data_update (), 4499 /* The basic block that already has been processed by the sched_data_update (),
4348 but hasn't been in sel_add_bb () yet. */ 4500 but hasn't been in sel_add_bb () yet. */
4349 static VEC (basic_block, heap) *last_added_blocks = NULL; 4501 static vec<basic_block> last_added_blocks;
4350 4502
4351 /* A pool for allocating successor infos. */ 4503 /* A pool for allocating successor infos. */
4352 static struct 4504 static struct
4353 { 4505 {
4354 /* A stack for saving succs_info structures. */ 4506 /* A stack for saving succs_info structures. */
4365 } succs_info_pool; 4517 } succs_info_pool;
4366 4518
4367 /* Functions to work with control-flow graph. */ 4519 /* Functions to work with control-flow graph. */
4368 4520
4369 /* Return basic block note of BB. */ 4521 /* Return basic block note of BB. */
4370 insn_t 4522 rtx_insn *
4371 sel_bb_head (basic_block bb) 4523 sel_bb_head (basic_block bb)
4372 { 4524 {
4373 insn_t head; 4525 rtx_insn *head;
4374 4526
4375 if (bb == EXIT_BLOCK_PTR) 4527 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4376 { 4528 {
4377 gcc_assert (exit_insn != NULL_RTX); 4529 gcc_assert (exit_insn != NULL_RTX);
4378 head = exit_insn; 4530 head = exit_insn;
4379 } 4531 }
4380 else 4532 else
4381 { 4533 {
4382 insn_t note; 4534 rtx_note *note = bb_note (bb);
4383
4384 note = bb_note (bb);
4385 head = next_nonnote_insn (note); 4535 head = next_nonnote_insn (note);
4386 4536
4387 if (head && (BARRIER_P (head) || BLOCK_FOR_INSN (head) != bb)) 4537 if (head && (BARRIER_P (head) || BLOCK_FOR_INSN (head) != bb))
4388 head = NULL_RTX; 4538 head = NULL;
4389 } 4539 }
4390 4540
4391 return head; 4541 return head;
4392 } 4542 }
4393 4543
4397 { 4547 {
4398 return sel_bb_head (BLOCK_FOR_INSN (insn)) == insn; 4548 return sel_bb_head (BLOCK_FOR_INSN (insn)) == insn;
4399 } 4549 }
4400 4550
4401 /* Return last insn of BB. */ 4551 /* Return last insn of BB. */
4402 insn_t 4552 rtx_insn *
4403 sel_bb_end (basic_block bb) 4553 sel_bb_end (basic_block bb)
4404 { 4554 {
4405 if (sel_bb_empty_p (bb)) 4555 if (sel_bb_empty_p (bb))
4406 return NULL_RTX; 4556 return NULL;
4407 4557
4408 gcc_assert (bb != EXIT_BLOCK_PTR); 4558 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
4409 4559
4410 return BB_END (bb); 4560 return BB_END (bb);
4411 } 4561 }
4412 4562
4413 /* Return true if INSN is the last insn in its basic block. */ 4563 /* Return true if INSN is the last insn in its basic block. */
4434 return CONTAINING_RGN (bb->index) == CONTAINING_RGN (BB_TO_BLOCK (0)); 4584 return CONTAINING_RGN (bb->index) == CONTAINING_RGN (BB_TO_BLOCK (0));
4435 } 4585 }
4436 4586
4437 /* Return the block which is a fallthru bb of a conditional jump JUMP. */ 4587 /* Return the block which is a fallthru bb of a conditional jump JUMP. */
4438 basic_block 4588 basic_block
4439 fallthru_bb_of_jump (rtx jump) 4589 fallthru_bb_of_jump (const rtx_insn *jump)
4440 { 4590 {
4441 if (!JUMP_P (jump)) 4591 if (!JUMP_P (jump))
4442 return NULL; 4592 return NULL;
4443 4593
4444 if (!any_condjump_p (jump)) 4594 if (!any_condjump_p (jump))
4459 remove_notes (bb_note (bb), BB_END (bb)); 4609 remove_notes (bb_note (bb), BB_END (bb));
4460 BB_NOTE_LIST (bb) = note_list; 4610 BB_NOTE_LIST (bb) = note_list;
4461 } 4611 }
4462 4612
4463 void 4613 void
4464 sel_init_bbs (bb_vec_t bbs, basic_block bb) 4614 sel_init_bbs (bb_vec_t bbs)
4465 { 4615 {
4466 const struct sched_scan_info_def ssi = 4616 const struct sched_scan_info_def ssi =
4467 { 4617 {
4468 extend_bb_info, /* extend_bb */ 4618 extend_bb_info, /* extend_bb */
4469 init_bb, /* init_bb */ 4619 init_bb, /* init_bb */
4470 NULL, /* extend_insn */ 4620 NULL, /* extend_insn */
4471 NULL /* init_insn */ 4621 NULL /* init_insn */
4472 }; 4622 };
4473 4623
4474 sched_scan (&ssi, bbs, bb, new_insns, NULL); 4624 sched_scan (&ssi, bbs);
4475 } 4625 }
4476 4626
4477 /* Restore notes for the whole region. */ 4627 /* Restore notes for the whole region. */
4478 static void 4628 static void
4479 sel_restore_notes (void) 4629 sel_restore_notes (void)
4490 4640
4491 do 4641 do
4492 { 4642 {
4493 note_list = BB_NOTE_LIST (first); 4643 note_list = BB_NOTE_LIST (first);
4494 restore_other_notes (NULL, first); 4644 restore_other_notes (NULL, first);
4495 BB_NOTE_LIST (first) = NULL_RTX; 4645 BB_NOTE_LIST (first) = NULL;
4496 4646
4497 FOR_BB_INSNS (first, insn) 4647 FOR_BB_INSNS (first, insn)
4498 if (NONDEBUG_INSN_P (insn)) 4648 if (NONDEBUG_INSN_P (insn))
4499 reemit_notes (insn); 4649 reemit_notes (insn);
4500 4650
4546 4696
4547 if (++succs_info_pool.max_top >= succs_info_pool.size) 4697 if (++succs_info_pool.max_top >= succs_info_pool.size)
4548 gcc_unreachable (); 4698 gcc_unreachable ();
4549 4699
4550 i = ++succs_info_pool.top; 4700 i = ++succs_info_pool.top;
4551 succs_info_pool.stack[i].succs_ok = VEC_alloc (rtx, heap, 10); 4701 succs_info_pool.stack[i].succs_ok.create (10);
4552 succs_info_pool.stack[i].succs_other = VEC_alloc (rtx, heap, 10); 4702 succs_info_pool.stack[i].succs_other.create (10);
4553 succs_info_pool.stack[i].probs_ok = VEC_alloc (int, heap, 10); 4703 succs_info_pool.stack[i].probs_ok.create (10);
4554 } 4704 }
4555 else 4705 else
4556 succs_info_pool.top++; 4706 succs_info_pool.top++;
4557 4707
4558 return &succs_info_pool.stack[succs_info_pool.top]; 4708 return &succs_info_pool.stack[succs_info_pool.top];
4565 gcc_assert (succs_info_pool.top >= 0 4715 gcc_assert (succs_info_pool.top >= 0
4566 && &succs_info_pool.stack[succs_info_pool.top] == sinfo); 4716 && &succs_info_pool.stack[succs_info_pool.top] == sinfo);
4567 succs_info_pool.top--; 4717 succs_info_pool.top--;
4568 4718
4569 /* Clear stale info. */ 4719 /* Clear stale info. */
4570 VEC_block_remove (rtx, sinfo->succs_ok, 4720 sinfo->succs_ok.block_remove (0, sinfo->succs_ok.length ());
4571 0, VEC_length (rtx, sinfo->succs_ok)); 4721 sinfo->succs_other.block_remove (0, sinfo->succs_other.length ());
4572 VEC_block_remove (rtx, sinfo->succs_other, 4722 sinfo->probs_ok.block_remove (0, sinfo->probs_ok.length ());
4573 0, VEC_length (rtx, sinfo->succs_other));
4574 VEC_block_remove (int, sinfo->probs_ok,
4575 0, VEC_length (int, sinfo->probs_ok));
4576 sinfo->all_prob = 0; 4723 sinfo->all_prob = 0;
4577 sinfo->succs_ok_n = 0; 4724 sinfo->succs_ok_n = 0;
4578 sinfo->all_succs_n = 0; 4725 sinfo->all_succs_n = 0;
4579 } 4726 }
4580 4727
4594 perform code motion through inner loops. */ 4741 perform code motion through inner loops. */
4595 short current_flags = si.current_flags & ~SUCCS_SKIP_TO_LOOP_EXITS; 4742 short current_flags = si.current_flags & ~SUCCS_SKIP_TO_LOOP_EXITS;
4596 4743
4597 if (current_flags & flags) 4744 if (current_flags & flags)
4598 { 4745 {
4599 VEC_safe_push (rtx, heap, sinfo->succs_ok, succ); 4746 sinfo->succs_ok.safe_push (succ);
4600 VEC_safe_push (int, heap, sinfo->probs_ok, 4747 sinfo->probs_ok.safe_push (
4601 /* FIXME: Improve calculation when skipping 4748 /* FIXME: Improve calculation when skipping
4602 inner loop to exits. */ 4749 inner loop to exits. */
4603 (si.bb_end 4750 si.bb_end
4604 ? si.e1->probability 4751 ? (si.e1->probability.initialized_p ()
4605 : REG_BR_PROB_BASE)); 4752 ? si.e1->probability.to_reg_br_prob_base ()
4753 : 0)
4754 : REG_BR_PROB_BASE);
4606 sinfo->succs_ok_n++; 4755 sinfo->succs_ok_n++;
4607 } 4756 }
4608 else 4757 else
4609 VEC_safe_push (rtx, heap, sinfo->succs_other, succ); 4758 sinfo->succs_other.safe_push (succ);
4610 4759
4611 /* Compute all_prob. */ 4760 /* Compute all_prob. */
4612 if (!si.bb_end) 4761 if (!si.bb_end)
4613 sinfo->all_prob = REG_BR_PROB_BASE; 4762 sinfo->all_prob = REG_BR_PROB_BASE;
4614 else 4763 else if (si.e1->probability.initialized_p ())
4615 sinfo->all_prob += si.e1->probability; 4764 sinfo->all_prob += si.e1->probability.to_reg_br_prob_base ();
4616 4765
4617 sinfo->all_succs_n++; 4766 sinfo->all_succs_n++;
4618 } 4767 }
4619 4768
4620 return sinfo; 4769 return sinfo;
4703 bb_ends_ebb_p (basic_block bb) 4852 bb_ends_ebb_p (basic_block bb)
4704 { 4853 {
4705 basic_block next_bb = bb_next_bb (bb); 4854 basic_block next_bb = bb_next_bb (bb);
4706 edge e; 4855 edge e;
4707 4856
4708 if (next_bb == EXIT_BLOCK_PTR 4857 if (next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
4709 || bitmap_bit_p (forced_ebb_heads, next_bb->index) 4858 || bitmap_bit_p (forced_ebb_heads, next_bb->index)
4710 || (LABEL_P (BB_HEAD (next_bb)) 4859 || (LABEL_P (BB_HEAD (next_bb))
4711 /* NB: LABEL_NUSES () is not maintained outside of jump.c. 4860 /* NB: LABEL_NUSES () is not maintained outside of jump.c.
4712 Work around that. */ 4861 Work around that. */
4713 && !single_pred_p (next_bb))) 4862 && !single_pred_p (next_bb)))
4732 bool 4881 bool
4733 in_same_ebb_p (insn_t insn, insn_t succ) 4882 in_same_ebb_p (insn_t insn, insn_t succ)
4734 { 4883 {
4735 basic_block ptr = BLOCK_FOR_INSN (insn); 4884 basic_block ptr = BLOCK_FOR_INSN (insn);
4736 4885
4737 for(;;) 4886 for (;;)
4738 { 4887 {
4739 if (ptr == BLOCK_FOR_INSN (succ)) 4888 if (ptr == BLOCK_FOR_INSN (succ))
4740 return true; 4889 return true;
4741 4890
4742 if (bb_ends_ebb_p (ptr)) 4891 if (bb_ends_ebb_p (ptr))
4756 recompute_rev_top_order (void) 4905 recompute_rev_top_order (void)
4757 { 4906 {
4758 int *postorder; 4907 int *postorder;
4759 int n_blocks, i; 4908 int n_blocks, i;
4760 4909
4761 if (!rev_top_order_index || rev_top_order_index_len < last_basic_block) 4910 if (!rev_top_order_index
4762 { 4911 || rev_top_order_index_len < last_basic_block_for_fn (cfun))
4763 rev_top_order_index_len = last_basic_block; 4912 {
4913 rev_top_order_index_len = last_basic_block_for_fn (cfun);
4764 rev_top_order_index = XRESIZEVEC (int, rev_top_order_index, 4914 rev_top_order_index = XRESIZEVEC (int, rev_top_order_index,
4765 rev_top_order_index_len); 4915 rev_top_order_index_len);
4766 } 4916 }
4767 4917
4768 postorder = XNEWVEC (int, n_basic_blocks); 4918 postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
4769 4919
4770 n_blocks = post_order_compute (postorder, true, false); 4920 n_blocks = post_order_compute (postorder, true, false);
4771 gcc_assert (n_basic_blocks == n_blocks); 4921 gcc_assert (n_basic_blocks_for_fn (cfun) == n_blocks);
4772 4922
4773 /* Build reverse function: for each basic block with BB->INDEX == K 4923 /* Build reverse function: for each basic block with BB->INDEX == K
4774 rev_top_order_index[K] is it's reverse topological sort number. */ 4924 rev_top_order_index[K] is it's reverse topological sort number. */
4775 for (i = 0; i < n_blocks; i++) 4925 for (i = 0; i < n_blocks; i++)
4776 { 4926 {
4783 4933
4784 /* Clear all flags from insns in BB that could spoil its rescheduling. */ 4934 /* Clear all flags from insns in BB that could spoil its rescheduling. */
4785 void 4935 void
4786 clear_outdated_rtx_info (basic_block bb) 4936 clear_outdated_rtx_info (basic_block bb)
4787 { 4937 {
4788 rtx insn; 4938 rtx_insn *insn;
4789 4939
4790 FOR_BB_INSNS (bb, insn) 4940 FOR_BB_INSNS (bb, insn)
4791 if (INSN_P (insn)) 4941 if (INSN_P (insn))
4792 { 4942 {
4793 SCHED_GROUP_P (insn) = 0; 4943 SCHED_GROUP_P (insn) = 0;
4804 4954
4805 /* Add BB_NOTE to the pool of available basic block notes. */ 4955 /* Add BB_NOTE to the pool of available basic block notes. */
4806 static void 4956 static void
4807 return_bb_to_pool (basic_block bb) 4957 return_bb_to_pool (basic_block bb)
4808 { 4958 {
4809 rtx note = bb_note (bb); 4959 rtx_note *note = bb_note (bb);
4810 4960
4811 gcc_assert (NOTE_BASIC_BLOCK (note) == bb 4961 gcc_assert (NOTE_BASIC_BLOCK (note) == bb
4812 && bb->aux == NULL); 4962 && bb->aux == NULL);
4813 4963
4814 /* It turns out that current cfg infrastructure does not support 4964 /* It turns out that current cfg infrastructure does not support
4815 reuse of basic blocks. Don't bother for now. */ 4965 reuse of basic blocks. Don't bother for now. */
4816 /*VEC_safe_push (rtx, heap, bb_note_pool, note);*/ 4966 /*bb_note_pool.safe_push (note);*/
4817 } 4967 }
4818 4968
4819 /* Get a bb_note from pool or return NULL_RTX if pool is empty. */ 4969 /* Get a bb_note from pool or return NULL_RTX if pool is empty. */
4820 static rtx 4970 static rtx_note *
4821 get_bb_note_from_pool (void) 4971 get_bb_note_from_pool (void)
4822 { 4972 {
4823 if (VEC_empty (rtx, bb_note_pool)) 4973 if (bb_note_pool.is_empty ())
4824 return NULL_RTX; 4974 return NULL;
4825 else 4975 else
4826 { 4976 {
4827 rtx note = VEC_pop (rtx, bb_note_pool); 4977 rtx_note *note = bb_note_pool.pop ();
4828 4978
4829 PREV_INSN (note) = NULL_RTX; 4979 SET_PREV_INSN (note) = NULL_RTX;
4830 NEXT_INSN (note) = NULL_RTX; 4980 SET_NEXT_INSN (note) = NULL_RTX;
4831 4981
4832 return note; 4982 return note;
4833 } 4983 }
4834 } 4984 }
4835 4985
4836 /* Free bb_note_pool. */ 4986 /* Free bb_note_pool. */
4837 void 4987 void
4838 free_bb_note_pool (void) 4988 free_bb_note_pool (void)
4839 { 4989 {
4840 VEC_free (rtx, heap, bb_note_pool); 4990 bb_note_pool.release ();
4841 } 4991 }
4842 4992
4843 /* Setup scheduler pool and successor structure. */ 4993 /* Setup scheduler pool and successor structure. */
4844 void 4994 void
4845 alloc_sched_pools (void) 4995 alloc_sched_pools (void)
4849 succs_size = MAX_WS + 1; 4999 succs_size = MAX_WS + 1;
4850 succs_info_pool.stack = XCNEWVEC (struct succs_info, succs_size); 5000 succs_info_pool.stack = XCNEWVEC (struct succs_info, succs_size);
4851 succs_info_pool.size = succs_size; 5001 succs_info_pool.size = succs_size;
4852 succs_info_pool.top = -1; 5002 succs_info_pool.top = -1;
4853 succs_info_pool.max_top = -1; 5003 succs_info_pool.max_top = -1;
4854
4855 sched_lists_pool = create_alloc_pool ("sel-sched-lists",
4856 sizeof (struct _list_node), 500);
4857 } 5004 }
4858 5005
4859 /* Free the pools. */ 5006 /* Free the pools. */
4860 void 5007 void
4861 free_sched_pools (void) 5008 free_sched_pools (void)
4862 { 5009 {
4863 int i; 5010 int i;
4864 5011
4865 free_alloc_pool (sched_lists_pool); 5012 sched_lists_pool.release ();
4866 gcc_assert (succs_info_pool.top == -1); 5013 gcc_assert (succs_info_pool.top == -1);
4867 for (i = 0; i < succs_info_pool.max_top; i++) 5014 for (i = 0; i <= succs_info_pool.max_top; i++)
4868 { 5015 {
4869 VEC_free (rtx, heap, succs_info_pool.stack[i].succs_ok); 5016 succs_info_pool.stack[i].succs_ok.release ();
4870 VEC_free (rtx, heap, succs_info_pool.stack[i].succs_other); 5017 succs_info_pool.stack[i].succs_other.release ();
4871 VEC_free (int, heap, succs_info_pool.stack[i].probs_ok); 5018 succs_info_pool.stack[i].probs_ok.release ();
4872 } 5019 }
4873 free (succs_info_pool.stack); 5020 free (succs_info_pool.stack);
4874 } 5021 }
4875 5022
4876 5023
4905 if (rev_top_order_index[bbi] 5052 if (rev_top_order_index[bbi]
4906 < rev_top_order_index[cur_bbi]) 5053 < rev_top_order_index[cur_bbi])
4907 break; 5054 break;
4908 } 5055 }
4909 5056
4910 /* We skipped the right block, so we increase i. We accomodate 5057 /* We skipped the right block, so we increase i. We accommodate
4911 it for increasing by step later, so we decrease i. */ 5058 it for increasing by step later, so we decrease i. */
4912 return (i + 1) - 1; 5059 return (i + 1) - 1;
4913 } 5060 }
4914 else if (has_preds_outside_rgn) 5061 else if (has_preds_outside_rgn)
4915 { 5062 {
5026 blocks from last_added_blocks vector. */ 5173 blocks from last_added_blocks vector. */
5027 static void 5174 static void
5028 sel_add_bb (basic_block bb) 5175 sel_add_bb (basic_block bb)
5029 { 5176 {
5030 /* Extend luids so that new notes will receive zero luids. */ 5177 /* Extend luids so that new notes will receive zero luids. */
5031 sched_init_luids (NULL, NULL, NULL, NULL); 5178 sched_extend_luids ();
5032 sched_init_bbs (); 5179 sched_init_bbs ();
5033 sel_init_bbs (last_added_blocks, NULL); 5180 sel_init_bbs (last_added_blocks);
5034 5181
5035 /* When bb is passed explicitly, the vector should contain 5182 /* When bb is passed explicitly, the vector should contain
5036 the only element that equals to bb; otherwise, the vector 5183 the only element that equals to bb; otherwise, the vector
5037 should not be NULL. */ 5184 should not be NULL. */
5038 gcc_assert (last_added_blocks != NULL); 5185 gcc_assert (last_added_blocks.exists ());
5039 5186
5040 if (bb != NULL) 5187 if (bb != NULL)
5041 { 5188 {
5042 gcc_assert (VEC_length (basic_block, last_added_blocks) == 1 5189 gcc_assert (last_added_blocks.length () == 1
5043 && VEC_index (basic_block, 5190 && last_added_blocks[0] == bb);
5044 last_added_blocks, 0) == bb);
5045 add_block_to_current_region (bb); 5191 add_block_to_current_region (bb);
5046 5192
5047 /* We associate creating/deleting data sets with the first insn 5193 /* We associate creating/deleting data sets with the first insn
5048 appearing / disappearing in the bb. */ 5194 appearing / disappearing in the bb. */
5049 if (!sel_bb_empty_p (bb) && BB_LV_SET (bb) == NULL) 5195 if (!sel_bb_empty_p (bb) && BB_LV_SET (bb) == NULL)
5050 create_initial_data_sets (bb); 5196 create_initial_data_sets (bb);
5051 5197
5052 VEC_free (basic_block, heap, last_added_blocks); 5198 last_added_blocks.release ();
5053 } 5199 }
5054 else 5200 else
5055 /* BB is NULL - process LAST_ADDED_BLOCKS instead. */ 5201 /* BB is NULL - process LAST_ADDED_BLOCKS instead. */
5056 { 5202 {
5057 int i; 5203 int i;
5058 basic_block temp_bb = NULL; 5204 basic_block temp_bb = NULL;
5059 5205
5060 for (i = 0; 5206 for (i = 0;
5061 VEC_iterate (basic_block, last_added_blocks, i, bb); i++) 5207 last_added_blocks.iterate (i, &bb); i++)
5062 { 5208 {
5063 add_block_to_current_region (bb); 5209 add_block_to_current_region (bb);
5064 temp_bb = bb; 5210 temp_bb = bb;
5065 } 5211 }
5066 5212
5067 /* We need to fetch at least one bb so we know the region 5213 /* We need to fetch at least one bb so we know the region
5068 to update. */ 5214 to update. */
5069 gcc_assert (temp_bb != NULL); 5215 gcc_assert (temp_bb != NULL);
5070 bb = temp_bb; 5216 bb = temp_bb;
5071 5217
5072 VEC_free (basic_block, heap, last_added_blocks); 5218 last_added_blocks.release ();
5073 } 5219 }
5074 5220
5075 rgn_setup_region (CONTAINING_RGN (bb->index)); 5221 rgn_setup_region (CONTAINING_RGN (bb->index));
5076 } 5222 }
5077 5223
5101 5247
5102 /* Concatenate info of EMPTY_BB to info of MERGE_BB. */ 5248 /* Concatenate info of EMPTY_BB to info of MERGE_BB. */
5103 static void 5249 static void
5104 move_bb_info (basic_block merge_bb, basic_block empty_bb) 5250 move_bb_info (basic_block merge_bb, basic_block empty_bb)
5105 { 5251 {
5106 gcc_assert (in_current_region_p (merge_bb)); 5252 if (in_current_region_p (merge_bb))
5107 5253 concat_note_lists (BB_NOTE_LIST (empty_bb),
5108 concat_note_lists (BB_NOTE_LIST (empty_bb), 5254 &BB_NOTE_LIST (merge_bb));
5109 &BB_NOTE_LIST (merge_bb)); 5255 BB_NOTE_LIST (empty_bb) = NULL;
5110 BB_NOTE_LIST (empty_bb) = NULL_RTX;
5111 5256
5112 } 5257 }
5113 5258
5114 /* Remove EMPTY_BB. If REMOVE_FROM_CFG_P is false, remove EMPTY_BB from 5259 /* Remove EMPTY_BB. If REMOVE_FROM_CFG_P is false, remove EMPTY_BB from
5115 region, but keep it in CFG. */ 5260 region, but keep it in CFG. */
5182 per-bb data structures. */ 5327 per-bb data structures. */
5183 static basic_block 5328 static basic_block
5184 sel_create_basic_block (void *headp, void *endp, basic_block after) 5329 sel_create_basic_block (void *headp, void *endp, basic_block after)
5185 { 5330 {
5186 basic_block new_bb; 5331 basic_block new_bb;
5187 insn_t new_bb_note; 5332 rtx_note *new_bb_note;
5188 5333
5189 gcc_assert (flag_sel_sched_pipelining_outer_loops 5334 gcc_assert (flag_sel_sched_pipelining_outer_loops
5190 || last_added_blocks == NULL); 5335 || !last_added_blocks.exists ());
5191 5336
5192 new_bb_note = get_bb_note_from_pool (); 5337 new_bb_note = get_bb_note_from_pool ();
5193 5338
5194 if (new_bb_note == NULL_RTX) 5339 if (new_bb_note == NULL_RTX)
5195 new_bb = orig_cfg_hooks.create_basic_block (headp, endp, after); 5340 new_bb = orig_cfg_hooks.create_basic_block (headp, endp, after);
5196 else 5341 else
5197 { 5342 {
5198 new_bb = create_basic_block_structure ((rtx) headp, (rtx) endp, 5343 new_bb = create_basic_block_structure ((rtx_insn *) headp,
5344 (rtx_insn *) endp,
5199 new_bb_note, after); 5345 new_bb_note, after);
5200 new_bb->aux = NULL; 5346 new_bb->aux = NULL;
5201 } 5347 }
5202 5348
5203 VEC_safe_push (basic_block, heap, last_added_blocks, new_bb); 5349 last_added_blocks.safe_push (new_bb);
5204 5350
5205 return new_bb; 5351 return new_bb;
5206 } 5352 }
5207 5353
5208 /* Implement sched_init_only_bb (). */ 5354 /* Implement sched_init_only_bb (). */
5275 return new_bb; 5421 return new_bb;
5276 } 5422 }
5277 5423
5278 /* If BB ends with a jump insn whose ID is bigger then PREV_MAX_UID, return it. 5424 /* If BB ends with a jump insn whose ID is bigger then PREV_MAX_UID, return it.
5279 Otherwise returns NULL. */ 5425 Otherwise returns NULL. */
5280 static rtx 5426 static rtx_insn *
5281 check_for_new_jump (basic_block bb, int prev_max_uid) 5427 check_for_new_jump (basic_block bb, int prev_max_uid)
5282 { 5428 {
5283 rtx end; 5429 rtx_insn *end;
5284 5430
5285 end = sel_bb_end (bb); 5431 end = sel_bb_end (bb);
5286 if (end && INSN_UID (end) >= prev_max_uid) 5432 if (end && INSN_UID (end) >= prev_max_uid)
5287 return end; 5433 return end;
5288 return NULL; 5434 return NULL;
5289 } 5435 }
5290 5436
5291 /* Look for a new jump either in FROM_BB block or in newly created JUMP_BB block. 5437 /* Look for a new jump either in FROM_BB block or in newly created JUMP_BB block.
5292 New means having UID at least equal to PREV_MAX_UID. */ 5438 New means having UID at least equal to PREV_MAX_UID. */
5293 static rtx 5439 static rtx_insn *
5294 find_new_jump (basic_block from, basic_block jump_bb, int prev_max_uid) 5440 find_new_jump (basic_block from, basic_block jump_bb, int prev_max_uid)
5295 { 5441 {
5296 rtx jump; 5442 rtx_insn *jump;
5297 5443
5298 /* Return immediately if no new insns were emitted. */ 5444 /* Return immediately if no new insns were emitted. */
5299 if (get_max_uid () == prev_max_uid) 5445 if (get_max_uid () == prev_max_uid)
5300 return NULL; 5446 return NULL;
5301 5447
5314 basic_block 5460 basic_block
5315 sel_split_edge (edge e) 5461 sel_split_edge (edge e)
5316 { 5462 {
5317 basic_block new_bb, src, other_bb = NULL; 5463 basic_block new_bb, src, other_bb = NULL;
5318 int prev_max_uid; 5464 int prev_max_uid;
5319 rtx jump; 5465 rtx_insn *jump;
5320 5466
5321 src = e->src; 5467 src = e->src;
5322 prev_max_uid = get_max_uid (); 5468 prev_max_uid = get_max_uid ();
5323 new_bb = split_edge (e); 5469 new_bb = split_edge (e);
5324 5470
5329 basic_block bb; 5475 basic_block bb;
5330 5476
5331 /* Some of the basic blocks might not have been added to the loop. 5477 /* Some of the basic blocks might not have been added to the loop.
5332 Add them here, until this is fixed in force_fallthru. */ 5478 Add them here, until this is fixed in force_fallthru. */
5333 for (i = 0; 5479 for (i = 0;
5334 VEC_iterate (basic_block, last_added_blocks, i, bb); i++) 5480 last_added_blocks.iterate (i, &bb); i++)
5335 if (!bb->loop_father) 5481 if (!bb->loop_father)
5336 { 5482 {
5337 add_bb_to_loop (bb, e->dest->loop_father); 5483 add_bb_to_loop (bb, e->dest->loop_father);
5338 5484
5339 gcc_assert (!other_bb && (new_bb->index != bb->index)); 5485 gcc_assert (!other_bb && (new_bb->index != bb->index));
5363 5509
5364 new_bb = sched_create_empty_bb_1 (after); 5510 new_bb = sched_create_empty_bb_1 (after);
5365 5511
5366 /* We'll explicitly initialize NEW_BB via sel_init_only_bb () a bit 5512 /* We'll explicitly initialize NEW_BB via sel_init_only_bb () a bit
5367 later. */ 5513 later. */
5368 gcc_assert (VEC_length (basic_block, last_added_blocks) == 1 5514 gcc_assert (last_added_blocks.length () == 1
5369 && VEC_index (basic_block, last_added_blocks, 0) == new_bb); 5515 && last_added_blocks[0] == new_bb);
5370 5516
5371 VEC_free (basic_block, heap, last_added_blocks); 5517 last_added_blocks.release ();
5372 return new_bb; 5518 return new_bb;
5373 } 5519 }
5374 5520
5375 /* Implement sched_create_recovery_block. ORIG_INSN is where block 5521 /* Implement sched_create_recovery_block. ORIG_INSN is where block
5376 will be splitted to insert a check. */ 5522 will be splitted to insert a check. */
5377 basic_block 5523 basic_block
5378 sel_create_recovery_block (insn_t orig_insn) 5524 sel_create_recovery_block (insn_t orig_insn)
5379 { 5525 {
5380 basic_block first_bb, second_bb, recovery_block; 5526 basic_block first_bb, second_bb, recovery_block;
5381 basic_block before_recovery = NULL; 5527 basic_block before_recovery = NULL;
5382 rtx jump; 5528 rtx_insn *jump;
5383 5529
5384 first_bb = BLOCK_FOR_INSN (orig_insn); 5530 first_bb = BLOCK_FOR_INSN (orig_insn);
5385 if (sel_bb_end_p (orig_insn)) 5531 if (sel_bb_end_p (orig_insn))
5386 { 5532 {
5387 /* Avoid introducing an empty block while splitting. */ 5533 /* Avoid introducing an empty block while splitting. */
5391 else 5537 else
5392 second_bb = sched_split_block (first_bb, orig_insn); 5538 second_bb = sched_split_block (first_bb, orig_insn);
5393 5539
5394 recovery_block = sched_create_recovery_block (&before_recovery); 5540 recovery_block = sched_create_recovery_block (&before_recovery);
5395 if (before_recovery) 5541 if (before_recovery)
5396 copy_lv_set_from (before_recovery, EXIT_BLOCK_PTR); 5542 copy_lv_set_from (before_recovery, EXIT_BLOCK_PTR_FOR_FN (cfun));
5397 5543
5398 gcc_assert (sel_bb_empty_p (recovery_block)); 5544 gcc_assert (sel_bb_empty_p (recovery_block));
5399 sched_create_recovery_edges (first_bb, recovery_block, second_bb); 5545 sched_create_recovery_edges (first_bb, recovery_block, second_bb);
5400 if (current_loops != NULL) 5546 if (current_loops != NULL)
5401 add_bb_to_loop (recovery_block, first_bb->loop_father); 5547 add_bb_to_loop (recovery_block, first_bb->loop_father);
5422 merge_blocks (a, b); 5568 merge_blocks (a, b);
5423 change_loops_latches (b, a); 5569 change_loops_latches (b, a);
5424 } 5570 }
5425 5571
5426 /* A wrapper for redirect_edge_and_branch_force, which also initializes 5572 /* A wrapper for redirect_edge_and_branch_force, which also initializes
5427 data structures for possibly created bb and insns. Returns the newly 5573 data structures for possibly created bb and insns. */
5428 added bb or NULL, when a bb was not needed. */
5429 void 5574 void
5430 sel_redirect_edge_and_branch_force (edge e, basic_block to) 5575 sel_redirect_edge_and_branch_force (edge e, basic_block to)
5431 { 5576 {
5432 basic_block jump_bb, src, orig_dest = e->dest; 5577 basic_block jump_bb, src, orig_dest = e->dest;
5433 int prev_max_uid; 5578 int prev_max_uid;
5434 rtx jump; 5579 rtx_insn *jump;
5580 int old_seqno = -1;
5435 5581
5436 /* This function is now used only for bookkeeping code creation, where 5582 /* This function is now used only for bookkeeping code creation, where
5437 we'll never get the single pred of orig_dest block and thus will not 5583 we'll never get the single pred of orig_dest block and thus will not
5438 hit unreachable blocks when updating dominator info. */ 5584 hit unreachable blocks when updating dominator info. */
5439 gcc_assert (!sel_bb_empty_p (e->src) 5585 gcc_assert (!sel_bb_empty_p (e->src)
5440 && !single_pred_p (orig_dest)); 5586 && !single_pred_p (orig_dest));
5441 src = e->src; 5587 src = e->src;
5442 prev_max_uid = get_max_uid (); 5588 prev_max_uid = get_max_uid ();
5589 /* Compute and pass old_seqno down to sel_init_new_insn only for the case
5590 when the conditional jump being redirected may become unconditional. */
5591 if (any_condjump_p (BB_END (src))
5592 && INSN_SEQNO (BB_END (src)) >= 0)
5593 old_seqno = INSN_SEQNO (BB_END (src));
5594
5443 jump_bb = redirect_edge_and_branch_force (e, to); 5595 jump_bb = redirect_edge_and_branch_force (e, to);
5444
5445 if (jump_bb != NULL) 5596 if (jump_bb != NULL)
5446 sel_add_bb (jump_bb); 5597 sel_add_bb (jump_bb);
5447 5598
5448 /* This function could not be used to spoil the loop structure by now, 5599 /* This function could not be used to spoil the loop structure by now,
5449 thus we don't care to update anything. But check it to be sure. */ 5600 thus we don't care to update anything. But check it to be sure. */
5451 && pipelining_p) 5602 && pipelining_p)
5452 gcc_assert (loop_latch_edge (current_loop_nest)); 5603 gcc_assert (loop_latch_edge (current_loop_nest));
5453 5604
5454 jump = find_new_jump (src, jump_bb, prev_max_uid); 5605 jump = find_new_jump (src, jump_bb, prev_max_uid);
5455 if (jump) 5606 if (jump)
5456 sel_init_new_insn (jump, INSN_INIT_TODO_LUID | INSN_INIT_TODO_SIMPLEJUMP); 5607 sel_init_new_insn (jump, INSN_INIT_TODO_LUID | INSN_INIT_TODO_SIMPLEJUMP,
5608 old_seqno);
5457 set_immediate_dominator (CDI_DOMINATORS, to, 5609 set_immediate_dominator (CDI_DOMINATORS, to,
5458 recompute_dominator (CDI_DOMINATORS, to)); 5610 recompute_dominator (CDI_DOMINATORS, to));
5459 set_immediate_dominator (CDI_DOMINATORS, orig_dest, 5611 set_immediate_dominator (CDI_DOMINATORS, orig_dest,
5460 recompute_dominator (CDI_DOMINATORS, orig_dest)); 5612 recompute_dominator (CDI_DOMINATORS, orig_dest));
5461 } 5613 }
5466 sel_redirect_edge_and_branch (edge e, basic_block to) 5618 sel_redirect_edge_and_branch (edge e, basic_block to)
5467 { 5619 {
5468 bool latch_edge_p; 5620 bool latch_edge_p;
5469 basic_block src, orig_dest = e->dest; 5621 basic_block src, orig_dest = e->dest;
5470 int prev_max_uid; 5622 int prev_max_uid;
5471 rtx jump; 5623 rtx_insn *jump;
5472 edge redirected; 5624 edge redirected;
5473 bool recompute_toporder_p = false; 5625 bool recompute_toporder_p = false;
5474 bool maybe_unreachable = single_pred_p (orig_dest); 5626 bool maybe_unreachable = single_pred_p (orig_dest);
5627 int old_seqno = -1;
5475 5628
5476 latch_edge_p = (pipelining_p 5629 latch_edge_p = (pipelining_p
5477 && current_loop_nest 5630 && current_loop_nest
5478 && e == loop_latch_edge (current_loop_nest)); 5631 && e == loop_latch_edge (current_loop_nest));
5479 5632
5480 src = e->src; 5633 src = e->src;
5481 prev_max_uid = get_max_uid (); 5634 prev_max_uid = get_max_uid ();
5482 5635
5636 /* Compute and pass old_seqno down to sel_init_new_insn only for the case
5637 when the conditional jump being redirected may become unconditional. */
5638 if (any_condjump_p (BB_END (src))
5639 && INSN_SEQNO (BB_END (src)) >= 0)
5640 old_seqno = INSN_SEQNO (BB_END (src));
5641
5483 redirected = redirect_edge_and_branch (e, to); 5642 redirected = redirect_edge_and_branch (e, to);
5484 5643
5485 gcc_assert (redirected && last_added_blocks == NULL); 5644 gcc_assert (redirected && !last_added_blocks.exists ());
5486 5645
5487 /* When we've redirected a latch edge, update the header. */ 5646 /* When we've redirected a latch edge, update the header. */
5488 if (latch_edge_p) 5647 if (latch_edge_p)
5489 { 5648 {
5490 current_loop_nest->header = to; 5649 current_loop_nest->header = to;
5498 && BLOCK_TO_BB (e->src->index) > BLOCK_TO_BB (to->index)) 5657 && BLOCK_TO_BB (e->src->index) > BLOCK_TO_BB (to->index))
5499 recompute_toporder_p = true; 5658 recompute_toporder_p = true;
5500 5659
5501 jump = find_new_jump (src, NULL, prev_max_uid); 5660 jump = find_new_jump (src, NULL, prev_max_uid);
5502 if (jump) 5661 if (jump)
5503 sel_init_new_insn (jump, INSN_INIT_TODO_LUID | INSN_INIT_TODO_SIMPLEJUMP); 5662 sel_init_new_insn (jump, INSN_INIT_TODO_LUID | INSN_INIT_TODO_SIMPLEJUMP, old_seqno);
5504 5663
5505 /* Only update dominator info when we don't have unreachable blocks. 5664 /* Only update dominator info when we don't have unreachable blocks.
5506 Otherwise we'll update in maybe_tidy_empty_bb. */ 5665 Otherwise we'll update in maybe_tidy_empty_bb. */
5507 if (!maybe_unreachable) 5666 if (!maybe_unreachable)
5508 { 5667 {
5547 } 5706 }
5548 5707
5549 5708
5550 /* Emit an insn rtx based on PATTERN. If a jump insn is wanted, 5709 /* Emit an insn rtx based on PATTERN. If a jump insn is wanted,
5551 LABEL is where this jump should be directed. */ 5710 LABEL is where this jump should be directed. */
5552 rtx 5711 rtx_insn *
5553 create_insn_rtx_from_pattern (rtx pattern, rtx label) 5712 create_insn_rtx_from_pattern (rtx pattern, rtx label)
5554 { 5713 {
5555 rtx insn_rtx; 5714 rtx_insn *insn_rtx;
5556 5715
5557 gcc_assert (!INSN_P (pattern)); 5716 gcc_assert (!INSN_P (pattern));
5558 5717
5559 start_sequence (); 5718 start_sequence ();
5560 5719
5569 ++LABEL_NUSES (label); 5728 ++LABEL_NUSES (label);
5570 } 5729 }
5571 5730
5572 end_sequence (); 5731 end_sequence ();
5573 5732
5574 sched_init_luids (NULL, NULL, NULL, NULL); 5733 sched_extend_luids ();
5575 sched_extend_target (); 5734 sched_extend_target ();
5576 sched_deps_init (false); 5735 sched_deps_init (false);
5577 5736
5578 /* Initialize INSN_CODE now. */ 5737 /* Initialize INSN_CODE now. */
5579 recog_memoized (insn_rtx); 5738 recog_memoized (insn_rtx);
5581 } 5740 }
5582 5741
5583 /* Create a new vinsn for INSN_RTX. FORCE_UNIQUE_P is true when the vinsn 5742 /* Create a new vinsn for INSN_RTX. FORCE_UNIQUE_P is true when the vinsn
5584 must not be clonable. */ 5743 must not be clonable. */
5585 vinsn_t 5744 vinsn_t
5586 create_vinsn_from_insn_rtx (rtx insn_rtx, bool force_unique_p) 5745 create_vinsn_from_insn_rtx (rtx_insn *insn_rtx, bool force_unique_p)
5587 { 5746 {
5588 gcc_assert (INSN_P (insn_rtx) && !INSN_IN_STREAM_P (insn_rtx)); 5747 gcc_assert (INSN_P (insn_rtx) && !INSN_IN_STREAM_P (insn_rtx));
5589 5748
5590 /* If VINSN_TYPE is not USE, retain its uniqueness. */ 5749 /* If VINSN_TYPE is not USE, retain its uniqueness. */
5591 return vinsn_create (insn_rtx, force_unique_p); 5750 return vinsn_create (insn_rtx, force_unique_p);
5592 } 5751 }
5593 5752
5594 /* Create a copy of INSN_RTX. */ 5753 /* Create a copy of INSN_RTX. */
5595 rtx 5754 rtx_insn *
5596 create_copy_of_insn_rtx (rtx insn_rtx) 5755 create_copy_of_insn_rtx (rtx insn_rtx)
5597 { 5756 {
5598 rtx res; 5757 rtx_insn *res;
5758 rtx link;
5599 5759
5600 if (DEBUG_INSN_P (insn_rtx)) 5760 if (DEBUG_INSN_P (insn_rtx))
5601 return create_insn_rtx_from_pattern (copy_rtx (PATTERN (insn_rtx)), 5761 return create_insn_rtx_from_pattern (copy_rtx (PATTERN (insn_rtx)),
5602 insn_rtx); 5762 insn_rtx);
5603 5763
5604 gcc_assert (NONJUMP_INSN_P (insn_rtx)); 5764 gcc_assert (NONJUMP_INSN_P (insn_rtx));
5605 5765
5606 res = create_insn_rtx_from_pattern (copy_rtx (PATTERN (insn_rtx)), 5766 res = create_insn_rtx_from_pattern (copy_rtx (PATTERN (insn_rtx)),
5607 NULL_RTX); 5767 NULL_RTX);
5768
5769 /* Locate the end of existing REG_NOTES in NEW_RTX. */
5770 rtx *ptail = &REG_NOTES (res);
5771 while (*ptail != NULL_RTX)
5772 ptail = &XEXP (*ptail, 1);
5773
5774 /* Copy all REG_NOTES except REG_EQUAL/REG_EQUIV and REG_LABEL_OPERAND
5775 since mark_jump_label will make them. REG_LABEL_TARGETs are created
5776 there too, but are supposed to be sticky, so we copy them. */
5777 for (link = REG_NOTES (insn_rtx); link; link = XEXP (link, 1))
5778 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND
5779 && REG_NOTE_KIND (link) != REG_EQUAL
5780 && REG_NOTE_KIND (link) != REG_EQUIV)
5781 {
5782 *ptail = duplicate_reg_note (link);
5783 ptail = &XEXP (*ptail, 1);
5784 }
5785
5608 return res; 5786 return res;
5609 } 5787 }
5610 5788
5611 /* Change vinsn field of EXPR to hold NEW_VINSN. */ 5789 /* Change vinsn field of EXPR to hold NEW_VINSN. */
5612 void 5790 void
5636 NULL, NULL, 5814 NULL, NULL,
5637 0, 0, 5815 0, 0,
5638 5816
5639 NULL, /* add_remove_insn */ 5817 NULL, /* add_remove_insn */
5640 NULL, /* begin_schedule_ready */ 5818 NULL, /* begin_schedule_ready */
5819 NULL, /* begin_move_insn */
5641 NULL, /* advance_target_bb */ 5820 NULL, /* advance_target_bb */
5821
5822 NULL,
5823 NULL,
5824
5642 SEL_SCHED | NEW_BBS 5825 SEL_SCHED | NEW_BBS
5643 }; 5826 };
5644 5827
5645 /* Setup special insns used in the scheduler. */ 5828 /* Setup special insns used in the scheduler. */
5646 void 5829 void
5653 5836
5654 start_sequence (); 5837 start_sequence ();
5655 emit_insn (nop_pattern); 5838 emit_insn (nop_pattern);
5656 exit_insn = get_insns (); 5839 exit_insn = get_insns ();
5657 end_sequence (); 5840 end_sequence ();
5658 set_block_for_insn (exit_insn, EXIT_BLOCK_PTR); 5841 set_block_for_insn (exit_insn, EXIT_BLOCK_PTR_FOR_FN (cfun));
5659 } 5842 }
5660 5843
5661 /* Free special insns used in the scheduler. */ 5844 /* Free special insns used in the scheduler. */
5662 void 5845 void
5663 free_nop_and_exit_insns (void) 5846 free_nop_and_exit_insns (void)
5664 { 5847 {
5665 exit_insn = NULL_RTX; 5848 exit_insn = NULL;
5666 nop_pattern = NULL_RTX; 5849 nop_pattern = NULL_RTX;
5667 } 5850 }
5668 5851
5669 /* Setup a special vinsn used in new insns initialization. */ 5852 /* Setup a special vinsn used in new insns initialization. */
5670 void 5853 void
5826 gcc_assert (loop_blocks[0] == loop->header); 6009 gcc_assert (loop_blocks[0] == loop->header);
5827 6010
5828 new_rgn_number = sel_create_new_region (); 6011 new_rgn_number = sel_create_new_region ();
5829 6012
5830 sel_add_block_to_region (preheader_block, &bb_ord_index, new_rgn_number); 6013 sel_add_block_to_region (preheader_block, &bb_ord_index, new_rgn_number);
5831 SET_BIT (bbs_in_loop_rgns, preheader_block->index); 6014 bitmap_set_bit (bbs_in_loop_rgns, preheader_block->index);
5832 6015
5833 for (i = 0; i < loop->num_nodes; i++) 6016 for (i = 0; i < loop->num_nodes; i++)
5834 { 6017 {
5835 /* Add only those blocks that haven't been scheduled in the inner loop. 6018 /* Add only those blocks that haven't been scheduled in the inner loop.
5836 The exception is the basic blocks with bookkeeping code - they should 6019 The exception is the basic blocks with bookkeeping code - they should
5837 be added to the region (and they actually don't belong to the loop 6020 be added to the region (and they actually don't belong to the loop
5838 body, but to the region containing that loop body). */ 6021 body, but to the region containing that loop body). */
5839 6022
5840 gcc_assert (new_rgn_number >= 0); 6023 gcc_assert (new_rgn_number >= 0);
5841 6024
5842 if (! TEST_BIT (bbs_in_loop_rgns, loop_blocks[i]->index)) 6025 if (! bitmap_bit_p (bbs_in_loop_rgns, loop_blocks[i]->index))
5843 { 6026 {
5844 sel_add_block_to_region (loop_blocks[i], &bb_ord_index, 6027 sel_add_block_to_region (loop_blocks[i], &bb_ord_index,
5845 new_rgn_number); 6028 new_rgn_number);
5846 SET_BIT (bbs_in_loop_rgns, loop_blocks[i]->index); 6029 bitmap_set_bit (bbs_in_loop_rgns, loop_blocks[i]->index);
5847 } 6030 }
5848 } 6031 }
5849 6032
5850 free (loop_blocks); 6033 free (loop_blocks);
5851 MARK_LOOP_FOR_PIPELINING (loop); 6034 MARK_LOOP_FOR_PIPELINING (loop);
5853 return new_rgn_number; 6036 return new_rgn_number;
5854 } 6037 }
5855 6038
5856 /* Create a new region from preheader blocks LOOP_BLOCKS. */ 6039 /* Create a new region from preheader blocks LOOP_BLOCKS. */
5857 void 6040 void
5858 make_region_from_loop_preheader (VEC(basic_block, heap) **loop_blocks) 6041 make_region_from_loop_preheader (vec<basic_block> *&loop_blocks)
5859 { 6042 {
5860 unsigned int i; 6043 unsigned int i;
5861 int new_rgn_number = -1; 6044 int new_rgn_number = -1;
5862 basic_block bb; 6045 basic_block bb;
5863 6046
5864 /* Basic block index, to be assigned to BLOCK_TO_BB. */ 6047 /* Basic block index, to be assigned to BLOCK_TO_BB. */
5865 int bb_ord_index = 0; 6048 int bb_ord_index = 0;
5866 6049
5867 new_rgn_number = sel_create_new_region (); 6050 new_rgn_number = sel_create_new_region ();
5868 6051
5869 FOR_EACH_VEC_ELT (basic_block, *loop_blocks, i, bb) 6052 FOR_EACH_VEC_ELT (*loop_blocks, i, bb)
5870 { 6053 {
5871 gcc_assert (new_rgn_number >= 0); 6054 gcc_assert (new_rgn_number >= 0);
5872 6055
5873 sel_add_block_to_region (bb, &bb_ord_index, new_rgn_number); 6056 sel_add_block_to_region (bb, &bb_ord_index, new_rgn_number);
5874 } 6057 }
5875 6058
5876 VEC_free (basic_block, heap, *loop_blocks); 6059 vec_free (loop_blocks);
5877 gcc_assert (*loop_blocks == NULL);
5878 } 6060 }
5879 6061
5880 6062
5881 /* Create region(s) from loop nest LOOP, such that inner loops will be 6063 /* Create region(s) from loop nest LOOP, such that inner loops will be
5882 pipelined before outer loops. Returns true when a region for LOOP 6064 pipelined before outer loops. Returns true when a region for LOOP
5887 struct loop *cur_loop; 6069 struct loop *cur_loop;
5888 int rgn_number; 6070 int rgn_number;
5889 6071
5890 /* Traverse all inner nodes of the loop. */ 6072 /* Traverse all inner nodes of the loop. */
5891 for (cur_loop = loop->inner; cur_loop; cur_loop = cur_loop->next) 6073 for (cur_loop = loop->inner; cur_loop; cur_loop = cur_loop->next)
5892 if (! TEST_BIT (bbs_in_loop_rgns, cur_loop->header->index)) 6074 if (! bitmap_bit_p (bbs_in_loop_rgns, cur_loop->header->index))
5893 return false; 6075 return false;
5894 6076
5895 /* At this moment all regular inner loops should have been pipelined. 6077 /* At this moment all regular inner loops should have been pipelined.
5896 Try to create a region from this loop. */ 6078 Try to create a region from this loop. */
5897 rgn_number = make_region_from_loop (loop); 6079 rgn_number = make_region_from_loop (loop);
5898 6080
5899 if (rgn_number < 0) 6081 if (rgn_number < 0)
5900 return false; 6082 return false;
5901 6083
5902 VEC_safe_push (loop_p, heap, loop_nests, loop); 6084 loop_nests.safe_push (loop);
5903 return true; 6085 return true;
5904 } 6086 }
5905 6087
5906 /* Initalize data structures needed. */ 6088 /* Initalize data structures needed. */
5907 void 6089 void
5912 | LOOPS_HAVE_FALLTHRU_PREHEADERS 6094 | LOOPS_HAVE_FALLTHRU_PREHEADERS
5913 | LOOPS_HAVE_RECORDED_EXITS 6095 | LOOPS_HAVE_RECORDED_EXITS
5914 | LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS); 6096 | LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS);
5915 current_loop_nest = NULL; 6097 current_loop_nest = NULL;
5916 6098
5917 bbs_in_loop_rgns = sbitmap_alloc (last_basic_block); 6099 bbs_in_loop_rgns = sbitmap_alloc (last_basic_block_for_fn (cfun));
5918 sbitmap_zero (bbs_in_loop_rgns); 6100 bitmap_clear (bbs_in_loop_rgns);
5919 6101
5920 recompute_rev_top_order (); 6102 recompute_rev_top_order ();
5921 } 6103 }
5922 6104
5923 /* Returns a struct loop for region RGN. */ 6105 /* Returns a struct loop for region RGN. */
5924 loop_p 6106 loop_p
5925 get_loop_nest_for_rgn (unsigned int rgn) 6107 get_loop_nest_for_rgn (unsigned int rgn)
5926 { 6108 {
5927 /* Regions created with extend_rgns don't have corresponding loop nests, 6109 /* Regions created with extend_rgns don't have corresponding loop nests,
5928 because they don't represent loops. */ 6110 because they don't represent loops. */
5929 if (rgn < VEC_length (loop_p, loop_nests)) 6111 if (rgn < loop_nests.length ())
5930 return VEC_index (loop_p, loop_nests, rgn); 6112 return loop_nests[rgn];
5931 else 6113 else
5932 return NULL; 6114 return NULL;
5933 } 6115 }
5934 6116
5935 /* True when LOOP was included into pipelining regions. */ 6117 /* True when LOOP was included into pipelining regions. */
5947 Latch can't be used because it could be in the inner loop too. */ 6129 Latch can't be used because it could be in the inner loop too. */
5948 if (LOOP_MARKED_FOR_PIPELINING_P (loop)) 6130 if (LOOP_MARKED_FOR_PIPELINING_P (loop))
5949 { 6131 {
5950 int rgn = CONTAINING_RGN (loop->latch->index); 6132 int rgn = CONTAINING_RGN (loop->latch->index);
5951 6133
5952 gcc_assert ((unsigned) rgn < VEC_length (loop_p, loop_nests)); 6134 gcc_assert ((unsigned) rgn < loop_nests.length ());
5953 return true; 6135 return true;
5954 } 6136 }
5955 6137
5956 return false; 6138 return false;
5957 } 6139 }
5978 for extend_rgns. */ 6160 for extend_rgns. */
5979 6161
5980 /* LOOP_HDR[I] == -1 if I-th bb doesn't belong to any loop, 6162 /* LOOP_HDR[I] == -1 if I-th bb doesn't belong to any loop,
5981 LOOP_HDR[I] == LOOP_HDR[J] iff basic blocks I and J reside within the same 6163 LOOP_HDR[I] == LOOP_HDR[J] iff basic blocks I and J reside within the same
5982 loop. */ 6164 loop. */
5983 loop_hdr = XNEWVEC (int, last_basic_block); 6165 loop_hdr = XNEWVEC (int, last_basic_block_for_fn (cfun));
5984 degree = XCNEWVEC (int, last_basic_block); 6166 degree = XCNEWVEC (int, last_basic_block_for_fn (cfun));
5985 6167
5986 6168
5987 /* For each basic block that belongs to some loop assign the number 6169 /* For each basic block that belongs to some loop assign the number
5988 of innermost loop it belongs to. */ 6170 of innermost loop it belongs to. */
5989 for (i = 0; i < last_basic_block; i++) 6171 for (i = 0; i < last_basic_block_for_fn (cfun); i++)
5990 loop_hdr[i] = -1; 6172 loop_hdr[i] = -1;
5991 6173
5992 FOR_EACH_BB (bb) 6174 FOR_EACH_BB_FN (bb, cfun)
5993 { 6175 {
5994 if (bb->loop_father && !bb->loop_father->num == 0 6176 if (bb->loop_father && bb->loop_father->num != 0
5995 && !(bb->flags & BB_IRREDUCIBLE_LOOP)) 6177 && !(bb->flags & BB_IRREDUCIBLE_LOOP))
5996 loop_hdr[bb->index] = bb->loop_father->num; 6178 loop_hdr[bb->index] = bb->loop_father->num;
5997 } 6179 }
5998 6180
5999 /* For each basic block degree is calculated as the number of incoming 6181 /* For each basic block degree is calculated as the number of incoming
6000 edges, that are going out of bbs that are not yet scheduled. 6182 edges, that are going out of bbs that are not yet scheduled.
6001 The basic blocks that are scheduled have degree value of zero. */ 6183 The basic blocks that are scheduled have degree value of zero. */
6002 FOR_EACH_BB (bb) 6184 FOR_EACH_BB_FN (bb, cfun)
6003 { 6185 {
6004 degree[bb->index] = 0; 6186 degree[bb->index] = 0;
6005 6187
6006 if (!TEST_BIT (bbs_in_loop_rgns, bb->index)) 6188 if (!bitmap_bit_p (bbs_in_loop_rgns, bb->index))
6007 { 6189 {
6008 FOR_EACH_EDGE (e, ei, bb->preds) 6190 FOR_EACH_EDGE (e, ei, bb->preds)
6009 if (!TEST_BIT (bbs_in_loop_rgns, e->src->index)) 6191 if (!bitmap_bit_p (bbs_in_loop_rgns, e->src->index))
6010 degree[bb->index]++; 6192 degree[bb->index]++;
6011 } 6193 }
6012 else 6194 else
6013 degree[bb->index] = -1; 6195 degree[bb->index] = -1;
6014 } 6196 }
6015 6197
6016 extend_rgns (degree, &cur_rgn_blocks, bbs_in_loop_rgns, loop_hdr); 6198 extend_rgns (degree, &cur_rgn_blocks, bbs_in_loop_rgns, loop_hdr);
6017 6199
6018 /* Any block that did not end up in a region is placed into a region 6200 /* Any block that did not end up in a region is placed into a region
6019 by itself. */ 6201 by itself. */
6020 FOR_EACH_BB (bb) 6202 FOR_EACH_BB_FN (bb, cfun)
6021 if (degree[bb->index] >= 0) 6203 if (degree[bb->index] >= 0)
6022 { 6204 {
6023 rgn_bb_table[cur_rgn_blocks] = bb->index; 6205 rgn_bb_table[cur_rgn_blocks] = bb->index;
6024 RGN_NR_BLOCKS (nr_regions) = 1; 6206 RGN_NR_BLOCKS (nr_regions) = 1;
6025 RGN_BLOCKS (nr_regions) = cur_rgn_blocks++; 6207 RGN_BLOCKS (nr_regions) = cur_rgn_blocks++;
6034 } 6216 }
6035 6217
6036 /* Free data structures used in pipelining of loops. */ 6218 /* Free data structures used in pipelining of loops. */
6037 void sel_finish_pipelining (void) 6219 void sel_finish_pipelining (void)
6038 { 6220 {
6039 loop_iterator li;
6040 struct loop *loop; 6221 struct loop *loop;
6041 6222
6042 /* Release aux fields so we don't free them later by mistake. */ 6223 /* Release aux fields so we don't free them later by mistake. */
6043 FOR_EACH_LOOP (li, loop, 0) 6224 FOR_EACH_LOOP (loop, 0)
6044 loop->aux = NULL; 6225 loop->aux = NULL;
6045 6226
6046 loop_optimizer_finalize (); 6227 loop_optimizer_finalize ();
6047 6228
6048 VEC_free (loop_p, heap, loop_nests); 6229 loop_nests.release ();
6049 6230
6050 free (rev_top_order_index); 6231 free (rev_top_order_index);
6051 rev_top_order_index = NULL; 6232 rev_top_order_index = NULL;
6052 } 6233 }
6053 6234
6060 extend_regions (); 6241 extend_regions ();
6061 6242
6062 if (current_loops) 6243 if (current_loops)
6063 { 6244 {
6064 loop_p loop; 6245 loop_p loop;
6065 loop_iterator li; 6246
6066 6247 FOR_EACH_LOOP (loop, (flag_sel_sched_pipelining_outer_loops
6067 FOR_EACH_LOOP (li, loop, (flag_sel_sched_pipelining_outer_loops 6248 ? LI_FROM_INNERMOST
6068 ? LI_FROM_INNERMOST 6249 : LI_ONLY_INNERMOST))
6069 : LI_ONLY_INNERMOST))
6070 make_regions_from_loop_nest (loop); 6250 make_regions_from_loop_nest (loop);
6071 } 6251 }
6072 6252
6073 /* Make regions from all the rest basic blocks and schedule them. 6253 /* Make regions from all the rest basic blocks and schedule them.
6074 These blocks include blocks that don't belong to any loop or belong 6254 These blocks include blocks that don't belong to any loop or belong
6078 /* We don't need bbs_in_loop_rgns anymore. */ 6258 /* We don't need bbs_in_loop_rgns anymore. */
6079 sbitmap_free (bbs_in_loop_rgns); 6259 sbitmap_free (bbs_in_loop_rgns);
6080 bbs_in_loop_rgns = NULL; 6260 bbs_in_loop_rgns = NULL;
6081 } 6261 }
6082 6262
6083 /* Adds the preheader blocks from previous loop to current region taking 6263 /* Add the preheader blocks from previous loop to current region taking
6084 it from LOOP_PREHEADER_BLOCKS (current_loop_nest). 6264 it from LOOP_PREHEADER_BLOCKS (current_loop_nest) and record them in *BBS.
6085 This function is only used with -fsel-sched-pipelining-outer-loops. */ 6265 This function is only used with -fsel-sched-pipelining-outer-loops. */
6086 void 6266 void
6087 sel_add_loop_preheaders (void) 6267 sel_add_loop_preheaders (bb_vec_t *bbs)
6088 { 6268 {
6089 int i; 6269 int i;
6090 basic_block bb; 6270 basic_block bb;
6091 VEC(basic_block, heap) *preheader_blocks 6271 vec<basic_block> *preheader_blocks
6092 = LOOP_PREHEADER_BLOCKS (current_loop_nest); 6272 = LOOP_PREHEADER_BLOCKS (current_loop_nest);
6093 6273
6094 for (i = 0; 6274 if (!preheader_blocks)
6095 VEC_iterate (basic_block, preheader_blocks, i, bb); 6275 return;
6096 i++) 6276
6097 { 6277 for (i = 0; preheader_blocks->iterate (i, &bb); i++)
6098 VEC_safe_push (basic_block, heap, last_added_blocks, bb); 6278 {
6279 bbs->safe_push (bb);
6280 last_added_blocks.safe_push (bb);
6099 sel_add_bb (bb); 6281 sel_add_bb (bb);
6100 } 6282 }
6101 6283
6102 VEC_free (basic_block, heap, preheader_blocks); 6284 vec_free (preheader_blocks);
6103 } 6285 }
6104 6286
6105 /* While pipelining outer loops, returns TRUE if BB is a loop preheader. 6287 /* While pipelining outer loops, returns TRUE if BB is a loop preheader.
6106 Please note that the function should also work when pipelining_p is 6288 Please note that the function should also work when pipelining_p is
6107 false, because it is used when deciding whether we should or should 6289 false, because it is used when deciding whether we should or should
6168 { 6350 {
6169 int i, old_len; 6351 int i, old_len;
6170 int cur_rgn = CONTAINING_RGN (BB_TO_BLOCK (0)); 6352 int cur_rgn = CONTAINING_RGN (BB_TO_BLOCK (0));
6171 basic_block bb; 6353 basic_block bb;
6172 bool all_empty_p = true; 6354 bool all_empty_p = true;
6173 VEC(basic_block, heap) *preheader_blocks 6355 vec<basic_block> *preheader_blocks
6174 = LOOP_PREHEADER_BLOCKS (loop_outer (current_loop_nest)); 6356 = LOOP_PREHEADER_BLOCKS (loop_outer (current_loop_nest));
6175 6357
6358 vec_check_alloc (preheader_blocks, 0);
6359
6176 gcc_assert (current_loop_nest); 6360 gcc_assert (current_loop_nest);
6177 old_len = VEC_length (basic_block, preheader_blocks); 6361 old_len = preheader_blocks->length ();
6178 6362
6179 /* Add blocks that aren't within the current loop to PREHEADER_BLOCKS. */ 6363 /* Add blocks that aren't within the current loop to PREHEADER_BLOCKS. */
6180 for (i = 0; i < RGN_NR_BLOCKS (cur_rgn); i++) 6364 for (i = 0; i < RGN_NR_BLOCKS (cur_rgn); i++)
6181 { 6365 {
6182 bb = BASIC_BLOCK (BB_TO_BLOCK (i)); 6366 bb = BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i));
6183 6367
6184 /* If the basic block belongs to region, but doesn't belong to 6368 /* If the basic block belongs to region, but doesn't belong to
6185 corresponding loop, then it should be a preheader. */ 6369 corresponding loop, then it should be a preheader. */
6186 if (sel_is_loop_preheader_p (bb)) 6370 if (sel_is_loop_preheader_p (bb))
6187 { 6371 {
6188 VEC_safe_push (basic_block, heap, preheader_blocks, bb); 6372 preheader_blocks->safe_push (bb);
6189 if (BB_END (bb) != bb_note (bb)) 6373 if (BB_END (bb) != bb_note (bb))
6190 all_empty_p = false; 6374 all_empty_p = false;
6191 } 6375 }
6192 } 6376 }
6193 6377
6194 /* Remove these blocks only after iterating over the whole region. */ 6378 /* Remove these blocks only after iterating over the whole region. */
6195 for (i = VEC_length (basic_block, preheader_blocks) - 1; 6379 for (i = preheader_blocks->length () - 1; i >= old_len; i--)
6196 i >= old_len; 6380 {
6197 i--) 6381 bb = (*preheader_blocks)[i];
6198 {
6199 bb = VEC_index (basic_block, preheader_blocks, i);
6200 sel_remove_bb (bb, false); 6382 sel_remove_bb (bb, false);
6201 } 6383 }
6202 6384
6203 if (!considered_for_pipelining_p (loop_outer (current_loop_nest))) 6385 if (!considered_for_pipelining_p (loop_outer (current_loop_nest)))
6204 { 6386 {
6205 if (!all_empty_p) 6387 if (!all_empty_p)
6206 /* Immediately create new region from preheader. */ 6388 /* Immediately create new region from preheader. */
6207 make_region_from_loop_preheader (&preheader_blocks); 6389 make_region_from_loop_preheader (preheader_blocks);
6208 else 6390 else
6209 { 6391 {
6210 /* If all preheader blocks are empty - dont create new empty region. 6392 /* If all preheader blocks are empty - dont create new empty region.
6211 Instead, remove them completely. */ 6393 Instead, remove them completely. */
6212 FOR_EACH_VEC_ELT (basic_block, preheader_blocks, i, bb) 6394 FOR_EACH_VEC_ELT (*preheader_blocks, i, bb)
6213 { 6395 {
6214 edge e; 6396 edge e;
6215 edge_iterator ei; 6397 edge_iterator ei;
6216 basic_block prev_bb = bb->prev_bb, next_bb = bb->next_bb; 6398 basic_block prev_bb = bb->prev_bb, next_bb = bb->next_bb;
6217 6399
6229 /* Check if after deleting preheader there is a nonconditional 6411 /* Check if after deleting preheader there is a nonconditional
6230 jump in PREV_BB that leads to the next basic block NEXT_BB. 6412 jump in PREV_BB that leads to the next basic block NEXT_BB.
6231 If it is so - delete this jump and clear data sets of its 6413 If it is so - delete this jump and clear data sets of its
6232 basic block if it becomes empty. */ 6414 basic block if it becomes empty. */
6233 if (next_bb->prev_bb == prev_bb 6415 if (next_bb->prev_bb == prev_bb
6234 && prev_bb != ENTRY_BLOCK_PTR 6416 && prev_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
6235 && bb_has_removable_jump_to_p (prev_bb, next_bb)) 6417 && bb_has_removable_jump_to_p (prev_bb, next_bb))
6236 { 6418 {
6237 redirect_edge_and_branch (EDGE_SUCC (prev_bb, 0), next_bb); 6419 redirect_edge_and_branch (EDGE_SUCC (prev_bb, 0), next_bb);
6238 if (BB_END (prev_bb) == bb_note (prev_bb)) 6420 if (BB_END (prev_bb) == bb_note (prev_bb))
6239 free_data_sets (prev_bb); 6421 free_data_sets (prev_bb);
6242 set_immediate_dominator (CDI_DOMINATORS, next_bb, 6424 set_immediate_dominator (CDI_DOMINATORS, next_bb,
6243 recompute_dominator (CDI_DOMINATORS, 6425 recompute_dominator (CDI_DOMINATORS,
6244 next_bb)); 6426 next_bb));
6245 } 6427 }
6246 } 6428 }
6247 VEC_free (basic_block, heap, preheader_blocks); 6429 vec_free (preheader_blocks);
6248 } 6430 }
6249 else 6431 else
6250 /* Store preheader within the father's loop structure. */ 6432 /* Store preheader within the father's loop structure. */
6251 SET_LOOP_PREHEADER_BLOCKS (loop_outer (current_loop_nest), 6433 SET_LOOP_PREHEADER_BLOCKS (loop_outer (current_loop_nest),
6252 preheader_blocks); 6434 preheader_blocks);
6253 } 6435 }
6436
6254 #endif 6437 #endif