comparison gcc/reorg.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 /* Perform instruction reorganizations for delay slot filling. 1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992-2017 Free Software Foundation, Inc. 2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu). 3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com). 4 Hacked by Michael Tiemann (tiemann@cygnus.com).
5 5
6 This file is part of GCC. 6 This file is part of GCC.
7 7
274 274
275 switch (GET_CODE (insn)) 275 switch (GET_CODE (insn))
276 { 276 {
277 case NOTE: 277 case NOTE:
278 case CALL_INSN: 278 case CALL_INSN:
279 case DEBUG_INSN:
279 return 0; 280 return 0;
280 281
281 case CODE_LABEL: 282 case CODE_LABEL:
282 return labels_p; 283 return labels_p;
283 284
394 395
395 insn = get_last_insn (); 396 insn = get_last_insn ();
396 while (NOTE_P (insn) 397 while (NOTE_P (insn)
397 || (NONJUMP_INSN_P (insn) 398 || (NONJUMP_INSN_P (insn)
398 && (GET_CODE (PATTERN (insn)) == USE 399 && (GET_CODE (PATTERN (insn)) == USE
399 || GET_CODE (PATTERN (insn)) == CLOBBER))) 400 || GET_CODE (PATTERN (insn)) == CLOBBER
401 || GET_CODE (PATTERN (insn)) == CLOBBER_HIGH)))
400 insn = PREV_INSN (insn); 402 insn = PREV_INSN (insn);
401 403
402 /* When a target threads its epilogue we might already have a 404 /* When a target threads its epilogue we might already have a
403 suitable return insn. If so put a label before it for the 405 suitable return insn. If so put a label before it for the
404 function_return_label. */ 406 function_return_label. */
1034 PNEW_THREAD points to a location that is to receive the place at which 1036 PNEW_THREAD points to a location that is to receive the place at which
1035 execution should continue. */ 1037 execution should continue. */
1036 1038
1037 static void 1039 static void
1038 steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq, 1040 steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
1039 vec<rtx_insn *> *delay_list, resources *sets, 1041 vec<rtx_insn *> *delay_list,
1042 struct resources *sets,
1040 struct resources *needed, 1043 struct resources *needed,
1041 struct resources *other_needed, 1044 struct resources *other_needed,
1042 int slots_to_fill, int *pslots_filled, 1045 int slots_to_fill, int *pslots_filled,
1043 int *pannul_p, rtx *pnew_thread) 1046 int *pannul_p, rtx *pnew_thread)
1044 { 1047 {
1047 auto_vec<rtx_insn *, 5> new_delay_list; 1050 auto_vec<rtx_insn *, 5> new_delay_list;
1048 int must_annul = *pannul_p; 1051 int must_annul = *pannul_p;
1049 int used_annul = 0; 1052 int used_annul = 0;
1050 int i; 1053 int i;
1051 struct resources cc_set; 1054 struct resources cc_set;
1052 bool *redundant; 1055 rtx_insn **redundant;
1053 1056
1054 /* We can't do anything if there are more delay slots in SEQ than we 1057 /* We can't do anything if there are more delay slots in SEQ than we
1055 can handle, or if we don't know that it will be a taken branch. 1058 can handle, or if we don't know that it will be a taken branch.
1056 We know that it will be a taken branch if it is either an unconditional 1059 We know that it will be a taken branch if it is either an unconditional
1057 branch or a conditional branch with a stricter branch condition. 1060 branch or a conditional branch with a stricter branch condition.
1086 displacement. Give the back end a chance to tell us we can't do 1089 displacement. Give the back end a chance to tell us we can't do
1087 this. */ 1090 this. */
1088 if (! targetm.can_follow_jump (insn, seq->insn (0))) 1091 if (! targetm.can_follow_jump (insn, seq->insn (0)))
1089 return; 1092 return;
1090 1093
1091 redundant = XALLOCAVEC (bool, XVECLEN (seq, 0)); 1094 redundant = XALLOCAVEC (rtx_insn *, XVECLEN (seq, 0));
1092 for (i = 1; i < seq->len (); i++) 1095 for (i = 1; i < seq->len (); i++)
1093 { 1096 {
1094 rtx_insn *trial = seq->insn (i); 1097 rtx_insn *trial = seq->insn (i);
1095 int flags; 1098 int flags;
1096 1099
1150 1153
1151 /* Record the effect of the instructions that were redundant and which 1154 /* Record the effect of the instructions that were redundant and which
1152 we therefore decided not to copy. */ 1155 we therefore decided not to copy. */
1153 for (i = 1; i < seq->len (); i++) 1156 for (i = 1; i < seq->len (); i++)
1154 if (redundant[i]) 1157 if (redundant[i])
1155 update_block (seq->insn (i), insn); 1158 {
1159 fix_reg_dead_note (redundant[i], insn);
1160 update_block (seq->insn (i), insn);
1161 }
1156 1162
1157 /* Show the place to which we will be branching. */ 1163 /* Show the place to which we will be branching. */
1158 *pnew_thread = first_active_target_insn (JUMP_LABEL (seq->insn (0))); 1164 *pnew_thread = first_active_target_insn (JUMP_LABEL (seq->insn (0)));
1159 1165
1160 /* Add any new insns to the delay list and update the count of the 1166 /* Add any new insns to the delay list and update the count of the
1197 return; 1203 return;
1198 1204
1199 for (i = 1; i < seq->len (); i++) 1205 for (i = 1; i < seq->len (); i++)
1200 { 1206 {
1201 rtx_insn *trial = seq->insn (i); 1207 rtx_insn *trial = seq->insn (i);
1208 rtx_insn *prior_insn;
1202 1209
1203 /* If TRIAL sets CC0, stealing it will move it too far from the use 1210 /* If TRIAL sets CC0, stealing it will move it too far from the use
1204 of CC0. */ 1211 of CC0. */
1205 if (insn_references_resource_p (trial, sets, false) 1212 if (insn_references_resource_p (trial, sets, false)
1206 || insn_sets_resource_p (trial, needed, false) 1213 || insn_sets_resource_p (trial, needed, false)
1208 || (HAVE_cc0 && sets_cc0_p (PATTERN (trial)))) 1215 || (HAVE_cc0 && sets_cc0_p (PATTERN (trial))))
1209 1216
1210 break; 1217 break;
1211 1218
1212 /* If this insn was already done, we don't need it. */ 1219 /* If this insn was already done, we don't need it. */
1213 if (redundant_insn (trial, insn, *delay_list)) 1220 if ((prior_insn = redundant_insn (trial, insn, *delay_list)))
1214 { 1221 {
1222 fix_reg_dead_note (prior_insn, insn);
1215 update_block (trial, insn); 1223 update_block (trial, insn);
1216 delete_from_delay_slot (trial); 1224 delete_from_delay_slot (trial);
1217 continue; 1225 continue;
1218 } 1226 }
1219 1227
1288 1296
1289 next_trial = next_nonnote_insn (trial); 1297 next_trial = next_nonnote_insn (trial);
1290 1298
1291 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */ 1299 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1292 if (NONJUMP_INSN_P (trial) 1300 if (NONJUMP_INSN_P (trial)
1293 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)) 1301 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
1302 || GET_CODE (pat) == CLOBBER_HIGH))
1294 continue; 1303 continue;
1295 1304
1296 if (GET_CODE (next_to_match) == GET_CODE (trial) 1305 if (GET_CODE (next_to_match) == GET_CODE (trial)
1297 /* We can't share an insn that sets cc0. */ 1306 /* We can't share an insn that sets cc0. */
1298 && (!HAVE_cc0 || ! sets_cc0_p (pat)) 1307 && (!HAVE_cc0 || ! sets_cc0_p (pat))
1482 if (!INSN_P (trial)) 1491 if (!INSN_P (trial))
1483 continue; 1492 continue;
1484 --insns_to_search; 1493 --insns_to_search;
1485 1494
1486 pat = PATTERN (trial); 1495 pat = PATTERN (trial);
1487 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) 1496 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
1497 || GET_CODE (pat) == CLOBBER_HIGH)
1498 continue;
1499
1500 if (GET_CODE (trial) == DEBUG_INSN)
1488 continue; 1501 continue;
1489 1502
1490 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat)) 1503 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat))
1491 { 1504 {
1492 /* Stop for a CALL and its delay slots because it is difficult to 1505 /* Stop for a CALL and its delay slots because it is difficult to
1577 if (!INSN_P (trial)) 1590 if (!INSN_P (trial))
1578 continue; 1591 continue;
1579 --insns_to_search; 1592 --insns_to_search;
1580 1593
1581 pat = PATTERN (trial); 1594 pat = PATTERN (trial);
1582 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) 1595 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
1596 || GET_CODE (pat) == CLOBBER_HIGH)
1597 continue;
1598
1599 if (GET_CODE (trial) == DEBUG_INSN)
1583 continue; 1600 continue;
1584 1601
1585 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat)) 1602 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat))
1586 { 1603 {
1587 bool annul_p = false; 1604 bool annul_p = false;
1686 insn = prev_nonnote_insn (insn)) 1703 insn = prev_nonnote_insn (insn))
1687 if (insn == 0 1704 if (insn == 0
1688 || LABEL_P (insn) 1705 || LABEL_P (insn)
1689 || (NONJUMP_INSN_P (insn) 1706 || (NONJUMP_INSN_P (insn)
1690 && GET_CODE (PATTERN (insn)) != USE 1707 && GET_CODE (PATTERN (insn)) != USE
1691 && GET_CODE (PATTERN (insn)) != CLOBBER)) 1708 && GET_CODE (PATTERN (insn)) != CLOBBER
1709 && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH))
1692 return 0; 1710 return 0;
1693 1711
1694 return 1; 1712 return 1;
1695 } 1713 }
1696 1714
1789 return; 1807 return;
1790 } 1808 }
1791 } 1809 }
1792 } 1810 }
1793 1811
1794 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN. 1812 /* Delete any REG_UNUSED notes that exist on INSN but not on OTHER_INSN.
1795 1813
1796 This handles the case of udivmodXi4 instructions which optimize their 1814 This handles the case of udivmodXi4 instructions which optimize their
1797 output depending on whether any REG_UNUSED notes are present. 1815 output depending on whether any REG_UNUSED notes are present. We must
1798 we must make sure that INSN calculates as many results as REDUNDANT_INSN 1816 make sure that INSN calculates as many results as OTHER_INSN does. */
1799 does. */
1800 1817
1801 static void 1818 static void
1802 update_reg_unused_notes (rtx_insn *insn, rtx redundant_insn) 1819 update_reg_unused_notes (rtx_insn *insn, rtx other_insn)
1803 { 1820 {
1804 rtx link, next; 1821 rtx link, next;
1805 1822
1806 for (link = REG_NOTES (insn); link; link = next) 1823 for (link = REG_NOTES (insn); link; link = next)
1807 { 1824 {
1809 1826
1810 if (REG_NOTE_KIND (link) != REG_UNUSED 1827 if (REG_NOTE_KIND (link) != REG_UNUSED
1811 || !REG_P (XEXP (link, 0))) 1828 || !REG_P (XEXP (link, 0)))
1812 continue; 1829 continue;
1813 1830
1814 if (! find_regno_note (redundant_insn, REG_UNUSED, 1831 if (!find_regno_note (other_insn, REG_UNUSED, REGNO (XEXP (link, 0))))
1815 REGNO (XEXP (link, 0))))
1816 remove_note (insn, link); 1832 remove_note (insn, link);
1817 } 1833 }
1818 } 1834 }
1819 1835
1820 static vec <rtx> sibling_labels; 1836 static vec <rtx> sibling_labels;
2011 2027
2012 /* This must be an INSN or CALL_INSN. */ 2028 /* This must be an INSN or CALL_INSN. */
2013 pat = PATTERN (trial); 2029 pat = PATTERN (trial);
2014 2030
2015 /* Stand-alone USE and CLOBBER are just for flow. */ 2031 /* Stand-alone USE and CLOBBER are just for flow. */
2016 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) 2032 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2033 || GET_CODE (pat) == CLOBBER_HIGH)
2034 continue;
2035
2036 /* And DEBUG_INSNs never go into delay slots. */
2037 if (GET_CODE (trial) == DEBUG_INSN)
2017 continue; 2038 continue;
2018 2039
2019 /* Check for resource conflict first, to avoid unnecessary 2040 /* Check for resource conflict first, to avoid unnecessary
2020 splitting. */ 2041 splitting. */
2021 if (! insn_references_resource_p (trial, &set, true) 2042 if (! insn_references_resource_p (trial, &set, true)
2133 2154
2134 /* This must be an INSN or CALL_INSN. */ 2155 /* This must be an INSN or CALL_INSN. */
2135 pat = PATTERN (trial); 2156 pat = PATTERN (trial);
2136 2157
2137 /* Stand-alone USE and CLOBBER are just for flow. */ 2158 /* Stand-alone USE and CLOBBER are just for flow. */
2138 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) 2159 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2160 || GET_CODE (pat) == CLOBBER_HIGH)
2161 continue;
2162
2163 /* And DEBUG_INSNs do not go in delay slots. */
2164 if (GET_CODE (trial) == DEBUG_INSN)
2139 continue; 2165 continue;
2140 2166
2141 /* If this already has filled delay slots, get the insn needing 2167 /* If this already has filled delay slots, get the insn needing
2142 the delay slots. */ 2168 the delay slots. */
2143 if (GET_CODE (pat) == SEQUENCE) 2169 if (GET_CODE (pat) == SEQUENCE)
2205 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0)) 2231 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2206 && eligible_for_delay (insn, slots_filled, next_trial, flags) 2232 && eligible_for_delay (insn, slots_filled, next_trial, flags)
2207 && ! can_throw_internal (trial)) 2233 && ! can_throw_internal (trial))
2208 { 2234 {
2209 /* See comment in relax_delay_slots about necessity of using 2235 /* See comment in relax_delay_slots about necessity of using
2210 next_real_insn here. */ 2236 next_real_nondebug_insn here. */
2211 rtx_insn *new_label = next_real_insn (next_trial); 2237 rtx_insn *new_label = next_real_nondebug_insn (next_trial);
2212 2238
2213 if (new_label != 0) 2239 if (new_label != 0)
2214 new_label = get_label_before (new_label, JUMP_LABEL (trial)); 2240 new_label = get_label_before (new_label, JUMP_LABEL (trial));
2215 else 2241 else
2216 new_label = find_end_label (simple_return_rtx); 2242 new_label = find_end_label (simple_return_rtx);
2323 2349
2324 LIKELY is nonzero if it is extremely likely that the branch will be 2350 LIKELY is nonzero if it is extremely likely that the branch will be
2325 taken and THREAD_IF_TRUE is set. This is used for the branch at the 2351 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2326 end of a loop back up to the top. 2352 end of a loop back up to the top.
2327 2353
2328 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the 2354 OWN_THREAD is true if we are the only user of the thread, i.e. it is
2329 thread. I.e., it is the fallthrough code of our jump or the target of the 2355 the target of the jump when we are the only jump going there.
2330 jump when we are the only jump going there.
2331 2356
2332 If OWN_THREAD is false, it must be the "true" thread of a jump. In that 2357 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2333 case, we can only take insns from the head of the thread for our delay 2358 case, we can only take insns from the head of the thread for our delay
2334 slot. We then adjust the jump to point after the insns we have taken. */ 2359 slot. We then adjust the jump to point after the insns we have taken. */
2335 2360
2398 own_thread = 0; 2423 own_thread = 0;
2399 continue; 2424 continue;
2400 } 2425 }
2401 2426
2402 pat = PATTERN (trial); 2427 pat = PATTERN (trial);
2403 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) 2428 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2429 || GET_CODE (pat) == CLOBBER_HIGH)
2430 continue;
2431
2432 if (GET_CODE (trial) == DEBUG_INSN)
2404 continue; 2433 continue;
2405 2434
2406 /* If TRIAL conflicts with the insns ahead of it, we lose. Also, 2435 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2407 don't separate or copy insns that set and use CC0. */ 2436 don't separate or copy insns that set and use CC0. */
2408 if (! insn_references_resource_p (trial, &set, true) 2437 if (! insn_references_resource_p (trial, &set, true)
3116 rtx target_label; 3145 rtx target_label;
3117 3146
3118 /* Look at every JUMP_INSN and see if we can improve it. */ 3147 /* Look at every JUMP_INSN and see if we can improve it. */
3119 for (insn = first; insn; insn = next) 3148 for (insn = first; insn; insn = next)
3120 { 3149 {
3121 rtx_insn *other; 3150 rtx_insn *other, *prior_insn;
3122 bool crossing; 3151 bool crossing;
3123 3152
3124 next = next_active_insn (insn); 3153 next = next_active_insn (insn);
3125 3154
3126 /* If this is a jump insn, see if it now jumps to a jump, jumps to 3155 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3203 3232
3204 if (simplejump_or_return_p (insn) 3233 if (simplejump_or_return_p (insn)
3205 && (other = prev_active_insn (insn)) != 0 3234 && (other = prev_active_insn (insn)) != 0
3206 && any_condjump_p (other) 3235 && any_condjump_p (other)
3207 && no_labels_between_p (other, insn) 3236 && no_labels_between_p (other, insn)
3208 && 0 > mostly_true_jump (other)) 3237 && mostly_true_jump (other) < 0)
3209 { 3238 {
3210 rtx other_target = JUMP_LABEL (other); 3239 rtx other_target = JUMP_LABEL (other);
3211 target_label = JUMP_LABEL (insn); 3240 target_label = JUMP_LABEL (insn);
3212 3241
3213 if (invert_jump (as_a <rtx_jump_insn *> (other), target_label, 0)) 3242 if (invert_jump (as_a <rtx_jump_insn *> (other), target_label, 0))
3222 delay_insn = pat->insn (0); 3251 delay_insn = pat->insn (0);
3223 3252
3224 /* See if the first insn in the delay slot is redundant with some 3253 /* See if the first insn in the delay slot is redundant with some
3225 previous insn. Remove it from the delay slot if so; then set up 3254 previous insn. Remove it from the delay slot if so; then set up
3226 to reprocess this insn. */ 3255 to reprocess this insn. */
3227 if (redundant_insn (pat->insn (1), delay_insn, vNULL)) 3256 if ((prior_insn = redundant_insn (pat->insn (1), delay_insn, vNULL)))
3228 { 3257 {
3258 fix_reg_dead_note (prior_insn, insn);
3229 update_block (pat->insn (1), insn); 3259 update_block (pat->insn (1), insn);
3230 delete_from_delay_slot (pat->insn (1)); 3260 delete_from_delay_slot (pat->insn (1));
3231 next = prev_active_insn (next); 3261 next = prev_active_insn (next);
3232 continue; 3262 continue;
3233 } 3263 }
3303 CROSSING_JUMP_P (delay_jump_insn) = 1; 3333 CROSSING_JUMP_P (delay_jump_insn) = 1;
3304 } 3334 }
3305 3335
3306 /* If the first insn at TARGET_LABEL is redundant with a previous 3336 /* If the first insn at TARGET_LABEL is redundant with a previous
3307 insn, redirect the jump to the following insn and process again. 3337 insn, redirect the jump to the following insn and process again.
3308 We use next_real_insn instead of next_active_insn so we 3338 We use next_real_nondebug_insn instead of next_active_insn so we
3309 don't skip USE-markers, or we'll end up with incorrect 3339 don't skip USE-markers, or we'll end up with incorrect
3310 liveness info. */ 3340 liveness info. */
3311 trial = next_real_insn (target_label); 3341 trial = next_real_nondebug_insn (target_label);
3312 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE 3342 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3313 && redundant_insn (trial, insn, vNULL) 3343 && redundant_insn (trial, insn, vNULL)
3314 && ! can_throw_internal (trial)) 3344 && ! can_throw_internal (trial))
3315 { 3345 {
3316 /* Figure out where to emit the special USE insn so we don't 3346 /* Figure out where to emit the special USE insn so we don't
3321 3351
3322 if (tmp) 3352 if (tmp)
3323 { 3353 {
3324 /* Insert the special USE insn and update dataflow info. 3354 /* Insert the special USE insn and update dataflow info.
3325 We know "trial" is an insn here as it is the output of 3355 We know "trial" is an insn here as it is the output of
3326 next_real_insn () above. */ 3356 next_real_nondebug_insn () above. */
3327 update_block (as_a <rtx_insn *> (trial), tmp); 3357 update_block (as_a <rtx_insn *> (trial), tmp);
3328 3358
3329 /* Now emit a label before the special USE insn, and 3359 /* Now emit a label before the special USE insn, and
3330 redirect our jump to the new label. */ 3360 redirect our jump to the new label. */
3331 target_label = get_label_before (PREV_INSN (tmp), target_label); 3361 target_label = get_label_before (PREV_INSN (tmp), target_label);
3359 continue; 3389 continue;
3360 } 3390 }
3361 } 3391 }
3362 3392
3363 /* See if we have a simple (conditional) jump that is useless. */ 3393 /* See if we have a simple (conditional) jump that is useless. */
3364 if (! INSN_ANNULLED_BRANCH_P (delay_jump_insn) 3394 if (!CROSSING_JUMP_P (delay_jump_insn)
3365 && ! condjump_in_parallel_p (delay_jump_insn) 3395 && !INSN_ANNULLED_BRANCH_P (delay_jump_insn)
3396 && !condjump_in_parallel_p (delay_jump_insn)
3366 && prev_active_insn (as_a<rtx_insn *> (target_label)) == insn 3397 && prev_active_insn (as_a<rtx_insn *> (target_label)) == insn
3367 && ! BARRIER_P (prev_nonnote_insn (as_a<rtx_insn *> (target_label))) 3398 && !BARRIER_P (prev_nonnote_insn (as_a<rtx_insn *> (target_label)))
3368 /* If the last insn in the delay slot sets CC0 for some insn, 3399 /* If the last insn in the delay slot sets CC0 for some insn,
3369 various code assumes that it is in a delay slot. We could 3400 various code assumes that it is in a delay slot. We could
3370 put it back where it belonged and delete the register notes, 3401 put it back where it belonged and delete the register notes,
3371 but it doesn't seem worthwhile in this uncommon case. */ 3402 but it doesn't seem worthwhile in this uncommon case. */
3372 && (!HAVE_cc0 3403 && (!HAVE_cc0
3605 RETURN, delete the SEQUENCE and output the individual insns, 3636 RETURN, delete the SEQUENCE and output the individual insns,
3606 followed by the RETURN. Then set things up so we try to find 3637 followed by the RETURN. Then set things up so we try to find
3607 insns for its delay slots, if it needs some. */ 3638 insns for its delay slots, if it needs some. */
3608 if (ANY_RETURN_P (PATTERN (jump_insn))) 3639 if (ANY_RETURN_P (PATTERN (jump_insn)))
3609 { 3640 {
3610 rtx_insn *prev = PREV_INSN (insn); 3641 rtx_insn *after = PREV_INSN (insn);
3611 3642
3612 delete_related_insns (insn); 3643 delete_related_insns (insn);
3613 for (i = 1; i < XVECLEN (pat, 0); i++) 3644 insn = jump_insn;
3614 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev); 3645 for (i = 1; i < pat->len (); i++)
3615 3646 after = emit_copy_of_insn_after (pat->insn (i), after);
3616 insn = emit_jump_insn_after (PATTERN (jump_insn), prev); 3647 add_insn_after (insn, after, NULL);
3617 emit_barrier_after (insn); 3648 emit_barrier_after (insn);
3618 3649
3619 if (slots) 3650 if (slots)
3620 obstack_ptr_grow (&unfilled_slots_obstack, insn); 3651 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3621 } 3652 }
3788 for (insn = first; insn; insn = NEXT_INSN (insn)) 3819 for (insn = first; insn; insn = NEXT_INSN (insn))
3789 { 3820 {
3790 if (! insn->deleted () 3821 if (! insn->deleted ()
3791 && NONJUMP_INSN_P (insn) 3822 && NONJUMP_INSN_P (insn)
3792 && GET_CODE (PATTERN (insn)) != USE 3823 && GET_CODE (PATTERN (insn)) != USE
3793 && GET_CODE (PATTERN (insn)) != CLOBBER) 3824 && GET_CODE (PATTERN (insn)) != CLOBBER
3825 && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH)
3794 { 3826 {
3795 if (GET_CODE (PATTERN (insn)) == SEQUENCE) 3827 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3796 { 3828 {
3797 rtx control; 3829 rtx control;
3798 j = XVECLEN (PATTERN (insn), 0) - 1; 3830 j = XVECLEN (PATTERN (insn), 0) - 1;