comparison gcc/reload1.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents a06113de4d67
children b7f97abdc517
comparison
equal deleted inserted replaced
52:c156f1bd5cd9 55:77e2b8dfacca
316 { 316 {
317 int from; /* Register number to be eliminated. */ 317 int from; /* Register number to be eliminated. */
318 int to; /* Register number used as replacement. */ 318 int to; /* Register number used as replacement. */
319 HOST_WIDE_INT initial_offset; /* Initial difference between values. */ 319 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
320 int can_eliminate; /* Nonzero if this elimination can be done. */ 320 int can_eliminate; /* Nonzero if this elimination can be done. */
321 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over 321 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
322 insns made by reload. */ 322 target hook in previous scan over insns
323 made by reload. */
323 HOST_WIDE_INT offset; /* Current offset between the two regs. */ 324 HOST_WIDE_INT offset; /* Current offset between the two regs. */
324 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */ 325 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
325 int ref_outside_mem; /* "to" has been referenced outside a MEM. */ 326 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
326 rtx from_rtx; /* REG rtx for the register to be eliminated. 327 rtx from_rtx; /* REG rtx for the register to be eliminated.
327 We cannot simply compare the number since 328 We cannot simply compare the number since
444 static void delete_address_reloads_1 (rtx, rtx, rtx); 445 static void delete_address_reloads_1 (rtx, rtx, rtx);
445 static rtx inc_for_reload (rtx, rtx, rtx, int); 446 static rtx inc_for_reload (rtx, rtx, rtx, int);
446 #ifdef AUTO_INC_DEC 447 #ifdef AUTO_INC_DEC
447 static void add_auto_inc_notes (rtx, rtx); 448 static void add_auto_inc_notes (rtx, rtx);
448 #endif 449 #endif
449 static void copy_eh_notes (rtx, rtx);
450 static void substitute (rtx *, const_rtx, rtx); 450 static void substitute (rtx *, const_rtx, rtx);
451 static bool gen_reload_chain_without_interm_reg_p (int, int); 451 static bool gen_reload_chain_without_interm_reg_p (int, int);
452 static int reloads_conflict (int, int); 452 static int reloads_conflict (int, int);
453 static rtx gen_reload (rtx, rtx, int, enum reload_type); 453 static rtx gen_reload (rtx, rtx, int, enum reload_type);
454 static rtx emit_insn_if_valid_for_reload (rtx); 454 static rtx emit_insn_if_valid_for_reload (rtx);
636 basic_block *tos, *worklist, bb; 636 basic_block *tos, *worklist, bb;
637 637
638 /* If we're not optimizing, then just err on the safe side. */ 638 /* If we're not optimizing, then just err on the safe side. */
639 if (!optimize) 639 if (!optimize)
640 return true; 640 return true;
641 641
642 /* First determine which blocks can reach exit via normal paths. */ 642 /* First determine which blocks can reach exit via normal paths. */
643 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1); 643 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
644 644
645 FOR_EACH_BB (bb) 645 FOR_EACH_BB (bb)
646 bb->flags &= ~BB_REACHABLE; 646 bb->flags &= ~BB_REACHABLE;
647 647
648 /* Place the exit block on our worklist. */ 648 /* Place the exit block on our worklist. */
649 EXIT_BLOCK_PTR->flags |= BB_REACHABLE; 649 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
650 *tos++ = EXIT_BLOCK_PTR; 650 *tos++ = EXIT_BLOCK_PTR;
651 651
652 /* Iterate: find everything reachable from what we've already seen. */ 652 /* Iterate: find everything reachable from what we've already seen. */
653 while (tos != worklist) 653 while (tos != worklist)
654 { 654 {
655 bb = *--tos; 655 bb = *--tos;
656 656
798 previously-marked insns left by say regmove. */ 798 previously-marked insns left by say regmove. */
799 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE 799 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
800 && GET_MODE (insn) != VOIDmode) 800 && GET_MODE (insn) != VOIDmode)
801 PUT_MODE (insn, VOIDmode); 801 PUT_MODE (insn, VOIDmode);
802 802
803 if (INSN_P (insn)) 803 if (NONDEBUG_INSN_P (insn))
804 scan_paradoxical_subregs (PATTERN (insn)); 804 scan_paradoxical_subregs (PATTERN (insn));
805 805
806 if (set != 0 && REG_P (SET_DEST (set))) 806 if (set != 0 && REG_P (SET_DEST (set)))
807 { 807 {
808 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX); 808 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
898 Do not touch virtual registers. */ 898 Do not touch virtual registers. */
899 899
900 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1); 900 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
901 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++) 901 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
902 temp_pseudo_reg_arr[n++] = i; 902 temp_pseudo_reg_arr[n++] = i;
903 903
904 if (ira_conflicts_p) 904 if (ira_conflicts_p)
905 /* Ask IRA to order pseudo-registers for better stack slot 905 /* Ask IRA to order pseudo-registers for better stack slot
906 sharing. */ 906 sharing. */
907 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width); 907 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
908 908
995 so this problem goes away. But that's very hairy. */ 995 so this problem goes away. But that's very hairy. */
996 996
997 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) 997 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
998 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i]) 998 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
999 { 999 {
1000 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX); 1000 rtx x = eliminate_regs (reg_equiv_memory_loc[i], VOIDmode,
1001 1001 NULL_RTX);
1002 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]), 1002
1003 XEXP (x, 0))) 1003 if (strict_memory_address_addr_space_p
1004 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
1005 MEM_ADDR_SPACE (x)))
1004 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0; 1006 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
1005 else if (CONSTANT_P (XEXP (x, 0)) 1007 else if (CONSTANT_P (XEXP (x, 0))
1006 || (REG_P (XEXP (x, 0)) 1008 || (REG_P (XEXP (x, 0))
1007 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER) 1009 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
1008 || (GET_CODE (XEXP (x, 0)) == PLUS 1010 || (GET_CODE (XEXP (x, 0)) == PLUS
1069 since it might have changed elimination offsets. */ 1071 since it might have changed elimination offsets. */
1070 if (starting_frame_size != get_frame_size ()) 1072 if (starting_frame_size != get_frame_size ())
1071 something_changed = 1; 1073 something_changed = 1;
1072 1074
1073 /* Even if the frame size remained the same, we might still have 1075 /* Even if the frame size remained the same, we might still have
1074 changed elimination offsets, e.g. if find_reloads called 1076 changed elimination offsets, e.g. if find_reloads called
1075 force_const_mem requiring the back end to allocate a constant 1077 force_const_mem requiring the back end to allocate a constant
1076 pool base register that needs to be saved on the stack. */ 1078 pool base register that needs to be saved on the stack. */
1077 else if (!verify_initial_elim_offsets ()) 1079 else if (!verify_initial_elim_offsets ())
1078 something_changed = 1; 1080 something_changed = 1;
1079 1081
1180 pseudo. */ 1182 pseudo. */
1181 1183
1182 if (! frame_pointer_needed) 1184 if (! frame_pointer_needed)
1183 FOR_EACH_BB (bb) 1185 FOR_EACH_BB (bb)
1184 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM); 1186 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1185 1187
1186 /* Come here (with failure set nonzero) if we can't get enough spill 1188 /* Come here (with failure set nonzero) if we can't get enough spill
1187 regs. */ 1189 regs. */
1188 failed: 1190 failed:
1189 1191
1190 CLEAR_REG_SET (&changed_allocation_pseudos); 1192 CLEAR_REG_SET (&changed_allocation_pseudos);
1229 } 1231 }
1230 MEM_NOTRAP_P (reg) = 1; 1232 MEM_NOTRAP_P (reg) = 1;
1231 } 1233 }
1232 else if (reg_equiv_mem[i]) 1234 else if (reg_equiv_mem[i])
1233 XEXP (reg_equiv_mem[i], 0) = addr; 1235 XEXP (reg_equiv_mem[i], 0) = addr;
1236 }
1237
1238 /* We don't want complex addressing modes in debug insns
1239 if simpler ones will do, so delegitimize equivalences
1240 in debug insns. */
1241 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1242 {
1243 rtx reg = regno_reg_rtx[i];
1244 rtx equiv = 0;
1245 df_ref use, next;
1246
1247 if (reg_equiv_constant[i])
1248 equiv = reg_equiv_constant[i];
1249 else if (reg_equiv_invariant[i])
1250 equiv = reg_equiv_invariant[i];
1251 else if (reg && MEM_P (reg))
1252 equiv = targetm.delegitimize_address (reg);
1253 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1254 equiv = reg;
1255
1256 if (equiv == reg)
1257 continue;
1258
1259 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1260 {
1261 insn = DF_REF_INSN (use);
1262
1263 /* Make sure the next ref is for a different instruction,
1264 so that we're not affected by the rescan. */
1265 next = DF_REF_NEXT_REG (use);
1266 while (next && DF_REF_INSN (next) == insn)
1267 next = DF_REF_NEXT_REG (next);
1268
1269 if (DEBUG_INSN_P (insn))
1270 {
1271 if (!equiv)
1272 {
1273 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1274 df_insn_rescan_debug_internal (insn);
1275 }
1276 else
1277 INSN_VAR_LOCATION_LOC (insn)
1278 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1279 reg, equiv);
1280 }
1281 }
1234 } 1282 }
1235 } 1283 }
1236 1284
1237 /* We must set reload_completed now since the cleanup_subreg_operands call 1285 /* We must set reload_completed now since the cleanup_subreg_operands call
1238 below will re-recognize each insn and reload may have generated insns 1286 below will re-recognize each insn and reload may have generated insns
2563 to do so at other times because that would change the 2611 to do so at other times because that would change the
2564 structure of the insn in a way that reload can't handle. 2612 structure of the insn in a way that reload can't handle.
2565 We special-case the commonest situation in 2613 We special-case the commonest situation in
2566 eliminate_regs_in_insn, so just replace a PLUS with a 2614 eliminate_regs_in_insn, so just replace a PLUS with a
2567 PLUS here, unless inside a MEM. */ 2615 PLUS here, unless inside a MEM. */
2568 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT 2616 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2569 && INTVAL (XEXP (x, 1)) == - ep->previous_offset) 2617 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2570 return ep->to_rtx; 2618 return ep->to_rtx;
2571 else 2619 else
2572 return gen_rtx_PLUS (Pmode, ep->to_rtx, 2620 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2573 plus_constant (XEXP (x, 1), 2621 plus_constant (XEXP (x, 1),
2608 && REGNO (new0) >= FIRST_PSEUDO_REGISTER 2656 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2609 && reg_renumber[REGNO (new0)] < 0 2657 && reg_renumber[REGNO (new0)] < 0
2610 && reg_equiv_constant[REGNO (new0)] != 0) 2658 && reg_equiv_constant[REGNO (new0)] != 0)
2611 new0 = reg_equiv_constant[REGNO (new0)]; 2659 new0 = reg_equiv_constant[REGNO (new0)];
2612 2660
2613 new_rtx = form_sum (new0, new1); 2661 new_rtx = form_sum (GET_MODE (x), new0, new1);
2614 2662
2615 /* As above, if we are not inside a MEM we do not want to 2663 /* As above, if we are not inside a MEM we do not want to
2616 turn a PLUS into something else. We might try to do so here 2664 turn a PLUS into something else. We might try to do so here
2617 for an addition of 0 if we aren't optimizing. */ 2665 for an addition of 0 if we aren't optimizing. */
2618 if (! mem_mode && GET_CODE (new_rtx) != PLUS) 2666 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2629 so that we have (plus (mult ..) ..). This is needed in order 2677 so that we have (plus (mult ..) ..). This is needed in order
2630 to keep load-address insns valid. This case is pathological. 2678 to keep load-address insns valid. This case is pathological.
2631 We ignore the possibility of overflow here. */ 2679 We ignore the possibility of overflow here. */
2632 if (REG_P (XEXP (x, 0)) 2680 if (REG_P (XEXP (x, 0))
2633 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER 2681 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2634 && GET_CODE (XEXP (x, 1)) == CONST_INT) 2682 && CONST_INT_P (XEXP (x, 1)))
2635 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; 2683 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2636 ep++) 2684 ep++)
2637 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate) 2685 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2638 { 2686 {
2639 if (! mem_mode 2687 if (! mem_mode
2684 if (REG_NOTE_KIND (x) == REG_DEAD) 2732 if (REG_NOTE_KIND (x) == REG_DEAD)
2685 return (XEXP (x, 1) 2733 return (XEXP (x, 1)
2686 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true) 2734 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2687 : NULL_RTX); 2735 : NULL_RTX);
2688 2736
2689 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1)); 2737 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2690 } 2738 }
2691 } 2739 }
2692 2740
2693 /* ... fall through ... */ 2741 /* ... fall through ... */
2694 2742
2807 eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), 2855 eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2808 insn, true)); 2856 insn, true));
2809 2857
2810 case USE: 2858 case USE:
2811 /* Handle insn_list USE that a call to a pure function may generate. */ 2859 /* Handle insn_list USE that a call to a pure function may generate. */
2812 new_rtx = eliminate_regs_1 (XEXP (x, 0), 0, insn, false); 2860 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false);
2813 if (new_rtx != XEXP (x, 0)) 2861 if (new_rtx != XEXP (x, 0))
2814 return gen_rtx_USE (GET_MODE (x), new_rtx); 2862 return gen_rtx_USE (GET_MODE (x), new_rtx);
2815 return x; 2863 return x;
2816 2864
2817 case CLOBBER: 2865 case CLOBBER:
3041 this elimination can't be done. */ 3089 this elimination can't be done. */
3042 rtx src = SET_SRC (x); 3090 rtx src = SET_SRC (x);
3043 3091
3044 if (GET_CODE (src) == PLUS 3092 if (GET_CODE (src) == PLUS
3045 && XEXP (src, 0) == SET_DEST (x) 3093 && XEXP (src, 0) == SET_DEST (x)
3046 && GET_CODE (XEXP (src, 1)) == CONST_INT) 3094 && CONST_INT_P (XEXP (src, 1)))
3047 ep->offset -= INTVAL (XEXP (src, 1)); 3095 ep->offset -= INTVAL (XEXP (src, 1));
3048 else 3096 else
3049 ep->can_eliminate = 0; 3097 ep->can_eliminate = 0;
3050 } 3098 }
3051 } 3099 }
3052 3100
3053 elimination_effects (SET_DEST (x), 0); 3101 elimination_effects (SET_DEST (x), VOIDmode);
3054 elimination_effects (SET_SRC (x), 0); 3102 elimination_effects (SET_SRC (x), VOIDmode);
3055 return; 3103 return;
3056 3104
3057 case MEM: 3105 case MEM:
3058 /* Our only special processing is to pass the mode of the MEM to our 3106 /* Our only special processing is to pass the mode of the MEM to our
3059 recursive call. */ 3107 recursive call. */
3147 { 3195 {
3148 gcc_assert (GET_CODE (PATTERN (insn)) == USE 3196 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3149 || GET_CODE (PATTERN (insn)) == CLOBBER 3197 || GET_CODE (PATTERN (insn)) == CLOBBER
3150 || GET_CODE (PATTERN (insn)) == ADDR_VEC 3198 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3151 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC 3199 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3152 || GET_CODE (PATTERN (insn)) == ASM_INPUT); 3200 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3201 || DEBUG_INSN_P (insn));
3153 return 0; 3202 return 0;
3154 } 3203 }
3155 3204
3156 if (old_set != 0 && REG_P (SET_DEST (old_set)) 3205 if (old_set != 0 && REG_P (SET_DEST (old_set))
3157 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER) 3206 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3176 while (base != ep->to_rtx) 3225 while (base != ep->to_rtx)
3177 { 3226 {
3178 rtx prev_insn, prev_set; 3227 rtx prev_insn, prev_set;
3179 3228
3180 if (GET_CODE (base) == PLUS 3229 if (GET_CODE (base) == PLUS
3181 && GET_CODE (XEXP (base, 1)) == CONST_INT) 3230 && CONST_INT_P (XEXP (base, 1)))
3182 { 3231 {
3183 offset += INTVAL (XEXP (base, 1)); 3232 offset += INTVAL (XEXP (base, 1));
3184 base = XEXP (base, 0); 3233 base = XEXP (base, 0);
3185 } 3234 }
3186 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0 3235 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3251 { 3300 {
3252 if (GET_CODE (SET_SRC (old_set)) == PLUS) 3301 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3253 plus_src = SET_SRC (old_set); 3302 plus_src = SET_SRC (old_set);
3254 /* First see if the source is of the form (plus (...) CST). */ 3303 /* First see if the source is of the form (plus (...) CST). */
3255 if (plus_src 3304 if (plus_src
3256 && GET_CODE (XEXP (plus_src, 1)) == CONST_INT) 3305 && CONST_INT_P (XEXP (plus_src, 1)))
3257 plus_cst_src = plus_src; 3306 plus_cst_src = plus_src;
3258 else if (REG_P (SET_SRC (old_set)) 3307 else if (REG_P (SET_SRC (old_set))
3259 || plus_src) 3308 || plus_src)
3260 { 3309 {
3261 /* Otherwise, see if we have a REG_EQUAL note of the form 3310 /* Otherwise, see if we have a REG_EQUAL note of the form
3264 for (links = REG_NOTES (insn); links; links = XEXP (links, 1)) 3313 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3265 { 3314 {
3266 if ((REG_NOTE_KIND (links) == REG_EQUAL 3315 if ((REG_NOTE_KIND (links) == REG_EQUAL
3267 || REG_NOTE_KIND (links) == REG_EQUIV) 3316 || REG_NOTE_KIND (links) == REG_EQUIV)
3268 && GET_CODE (XEXP (links, 0)) == PLUS 3317 && GET_CODE (XEXP (links, 0)) == PLUS
3269 && GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT) 3318 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3270 { 3319 {
3271 plus_cst_src = XEXP (links, 0); 3320 plus_cst_src = XEXP (links, 0);
3272 break; 3321 break;
3273 } 3322 }
3274 } 3323 }
3324 old_set = single_set (insn); 3373 old_set = single_set (insn);
3325 3374
3326 /* First see if this insn remains valid when we make the 3375 /* First see if this insn remains valid when we make the
3327 change. If not, try to replace the whole pattern with 3376 change. If not, try to replace the whole pattern with
3328 a simple set (this may help if the original insn was a 3377 a simple set (this may help if the original insn was a
3329 PARALLEL that was only recognized as single_set due to 3378 PARALLEL that was only recognized as single_set due to
3330 REG_UNUSED notes). If this isn't valid either, keep 3379 REG_UNUSED notes). If this isn't valid either, keep
3331 the INSN_CODE the same and let reload fix it up. */ 3380 the INSN_CODE the same and let reload fix it up. */
3332 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0)) 3381 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3333 { 3382 {
3334 rtx new_pat = gen_rtx_SET (VOIDmode, 3383 rtx new_pat = gen_rtx_SET (VOIDmode,
3347 goto done; 3396 goto done;
3348 } 3397 }
3349 } 3398 }
3350 3399
3351 /* Determine the effects of this insn on elimination offsets. */ 3400 /* Determine the effects of this insn on elimination offsets. */
3352 elimination_effects (old_body, 0); 3401 elimination_effects (old_body, VOIDmode);
3353 3402
3354 /* Eliminate all eliminable registers occurring in operands that 3403 /* Eliminate all eliminable registers occurring in operands that
3355 can be handled by reload. */ 3404 can be handled by reload. */
3356 extract_insn (insn); 3405 extract_insn (insn);
3357 for (i = 0; i < recog_data.n_operands; i++) 3406 for (i = 0; i < recog_data.n_operands; i++)
3388 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0) 3437 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3389 || recog_data.operand_loc[i] == &XEXP (plus_src, 1))) 3438 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3390 in_plus = true; 3439 in_plus = true;
3391 3440
3392 substed_operand[i] 3441 substed_operand[i]
3393 = eliminate_regs_1 (recog_data.operand[i], 0, 3442 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3394 replace ? insn : NULL_RTX, 3443 replace ? insn : NULL_RTX,
3395 is_set_src || in_plus); 3444 is_set_src || in_plus);
3396 if (substed_operand[i] != orig_operand[i]) 3445 if (substed_operand[i] != orig_operand[i])
3397 val = 1; 3446 val = 1;
3398 /* Terminate the search in check_eliminable_occurrences at 3447 /* Terminate the search in check_eliminable_occurrences at
3518 to a register that we eliminate and could cause a different number 3567 to a register that we eliminate and could cause a different number
3519 of spill registers to be needed in the final reload pass than in 3568 of spill registers to be needed in the final reload pass than in
3520 the pre-passes. */ 3569 the pre-passes. */
3521 if (val && REG_NOTES (insn) != 0) 3570 if (val && REG_NOTES (insn) != 0)
3522 REG_NOTES (insn) 3571 REG_NOTES (insn)
3523 = eliminate_regs_1 (REG_NOTES (insn), 0, REG_NOTES (insn), true); 3572 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true);
3524 3573
3525 return val; 3574 return val;
3526 } 3575 }
3527 3576
3528 /* Loop through all elimination pairs. 3577 /* Loop through all elimination pairs.
3576 for (i = 0; i < NUM_ELIMINABLE_REGS; i++) 3625 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3577 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx 3626 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3578 && (GET_CODE (x) != SET 3627 && (GET_CODE (x) != SET
3579 || GET_CODE (SET_SRC (x)) != PLUS 3628 || GET_CODE (SET_SRC (x)) != PLUS
3580 || XEXP (SET_SRC (x), 0) != dest 3629 || XEXP (SET_SRC (x), 0) != dest
3581 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT)) 3630 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3582 { 3631 {
3583 reg_eliminate[i].can_eliminate_previous 3632 reg_eliminate[i].can_eliminate_previous
3584 = reg_eliminate[i].can_eliminate = 0; 3633 = reg_eliminate[i].can_eliminate = 0;
3585 num_eliminable--; 3634 num_eliminable--;
3586 } 3635 }
3699 { 3748 {
3700 int previous_frame_pointer_needed = frame_pointer_needed; 3749 int previous_frame_pointer_needed = frame_pointer_needed;
3701 struct elim_table *ep; 3750 struct elim_table *ep;
3702 3751
3703 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3752 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3704 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED) 3753 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3754 && targetm.frame_pointer_required ())
3705 #ifdef ELIMINABLE_REGS 3755 #ifdef ELIMINABLE_REGS
3706 || ! CAN_ELIMINATE (ep->from, ep->to) 3756 || ! targetm.can_eliminate (ep->from, ep->to)
3707 #endif 3757 #endif
3708 ) 3758 )
3709 ep->can_eliminate = 0; 3759 ep->can_eliminate = 0;
3710 3760
3711 /* Look for the case where we have discovered that we can't replace 3761 /* Look for the case where we have discovered that we can't replace
3807 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++) 3857 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3808 { 3858 {
3809 ep->from = ep1->from; 3859 ep->from = ep1->from;
3810 ep->to = ep1->to; 3860 ep->to = ep1->to;
3811 ep->can_eliminate = ep->can_eliminate_previous 3861 ep->can_eliminate = ep->can_eliminate_previous
3812 = (CAN_ELIMINATE (ep->from, ep->to) 3862 = (targetm.can_eliminate (ep->from, ep->to)
3813 && ! (ep->to == STACK_POINTER_REGNUM 3863 && ! (ep->to == STACK_POINTER_REGNUM
3814 && frame_pointer_needed 3864 && frame_pointer_needed
3815 && (! SUPPORTS_STACK_ALIGNMENT 3865 && (! SUPPORTS_STACK_ALIGNMENT
3816 || ! stack_realign_fp))); 3866 || ! stack_realign_fp)));
3817 } 3867 }
3818 #else 3868 #else
3819 reg_eliminate[0].from = reg_eliminate_1[0].from; 3869 reg_eliminate[0].from = reg_eliminate_1[0].from;
3908 { 3958 {
3909 /* Record the current hard register the pseudo is allocated to 3959 /* Record the current hard register the pseudo is allocated to
3910 in pseudo_previous_regs so we avoid reallocating it to the 3960 in pseudo_previous_regs so we avoid reallocating it to the
3911 same hard reg in a later pass. */ 3961 same hard reg in a later pass. */
3912 gcc_assert (reg_renumber[i] >= 0); 3962 gcc_assert (reg_renumber[i] >= 0);
3913 3963
3914 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]); 3964 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3915 /* Mark it as no longer having a hard register home. */ 3965 /* Mark it as no longer having a hard register home. */
3916 reg_renumber[i] = -1; 3966 reg_renumber[i] = -1;
3917 if (ira_conflicts_p) 3967 if (ira_conflicts_p)
3918 /* Inform IRA about the change. */ 3968 /* Inform IRA about the change. */
4084 4134
4085 static void 4135 static void
4086 fixup_eh_region_note (rtx insn, rtx prev, rtx next) 4136 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4087 { 4137 {
4088 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX); 4138 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4089 unsigned int trap_count;
4090 rtx i;
4091
4092 if (note == NULL) 4139 if (note == NULL)
4093 return; 4140 return;
4094 4141 if (!insn_could_throw_p (insn))
4095 if (may_trap_p (PATTERN (insn))) 4142 remove_note (insn, note);
4096 trap_count = 1; 4143 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4097 else
4098 {
4099 remove_note (insn, note);
4100 trap_count = 0;
4101 }
4102
4103 for (i = NEXT_INSN (prev); i != next; i = NEXT_INSN (i))
4104 if (INSN_P (i) && i != insn && may_trap_p (PATTERN (i)))
4105 {
4106 trap_count++;
4107 add_reg_note (i, REG_EH_REGION, XEXP (note, 0));
4108 }
4109 } 4144 }
4110 4145
4111 /* Reload pseudo-registers into hard regs around each insn as needed. 4146 /* Reload pseudo-registers into hard regs around each insn as needed.
4112 Additional register load insns are output before the insn that needs it 4147 Additional register load insns are output before the insn that needs it
4113 and perhaps store insns after insns that modify the reloaded pseudo reg. 4148 and perhaps store insns after insns that modify the reloaded pseudo reg.
4309 n = count_occurrences (PATTERN (p), reload_reg, 0); 4344 n = count_occurrences (PATTERN (p), reload_reg, 0);
4310 if (! n) 4345 if (! n)
4311 continue; 4346 continue;
4312 if (n == 1) 4347 if (n == 1)
4313 { 4348 {
4314 n = validate_replace_rtx (reload_reg, 4349 rtx replace_reg
4315 gen_rtx_fmt_e (code, 4350 = gen_rtx_fmt_e (code, mode, reload_reg);
4316 mode, 4351
4317 reload_reg), 4352 validate_replace_rtx_group (reload_reg,
4318 p); 4353 replace_reg, p);
4354 n = verify_changes (0);
4319 4355
4320 /* We must also verify that the constraints 4356 /* We must also verify that the constraints
4321 are met after the replacement. */ 4357 are met after the replacement. Make sure
4322 extract_insn (p); 4358 extract_insn is only called for an insn
4359 where the replacements were found to be
4360 valid so far. */
4323 if (n) 4361 if (n)
4324 n = constrain_operands (1); 4362 {
4363 extract_insn (p);
4364 n = constrain_operands (1);
4365 }
4366
4367 /* If the constraints were not met, then
4368 undo the replacement, else confirm it. */
4369 if (!n)
4370 cancel_changes (0);
4325 else 4371 else
4326 break; 4372 confirm_change_group ();
4327
4328 /* If the constraints were not met, then
4329 undo the replacement. */
4330 if (!n)
4331 {
4332 validate_replace_rtx (gen_rtx_fmt_e (code,
4333 mode,
4334 reload_reg),
4335 reload_reg, p);
4336 break;
4337 }
4338
4339 } 4373 }
4340 break; 4374 break;
4341 } 4375 }
4342 if (n == 1) 4376 if (n == 1)
4343 { 4377 {
4446 /* Discard all record of any value reloaded from X, 4480 /* Discard all record of any value reloaded from X,
4447 or reloaded in X from someplace else; 4481 or reloaded in X from someplace else;
4448 unless X is an output reload reg of the current insn. 4482 unless X is an output reload reg of the current insn.
4449 4483
4450 X may be a hard reg (the reload reg) 4484 X may be a hard reg (the reload reg)
4451 or it may be a pseudo reg that was reloaded from. 4485 or it may be a pseudo reg that was reloaded from.
4452 4486
4453 When DATA is non-NULL just mark the registers in regset 4487 When DATA is non-NULL just mark the registers in regset
4454 to be forgotten later. */ 4488 to be forgotten later. */
4455 4489
4456 static void 4490 static void
5213 /* We want constrain operands to treat this insn strictly in 5247 /* We want constrain operands to treat this insn strictly in
5214 its validity determination, i.e., the way it would after 5248 its validity determination, i.e., the way it would after
5215 reload has completed. */ 5249 reload has completed. */
5216 result = constrain_operands (1); 5250 result = constrain_operands (1);
5217 } 5251 }
5218 5252
5219 delete_insns_since (last); 5253 delete_insns_since (last);
5220 return result; 5254 return result;
5221 } 5255 }
5222 5256
5223 /* It looks like other cases in gen_reload are not possible for 5257 /* It looks like other cases in gen_reload are not possible for
5224 chain reloads or do need an intermediate hard registers. */ 5258 chain reloads or do need an intermediate hard registers. */
5225 return true; 5259 return true;
5226 } 5260 }
5227 5261
6946 && ! conflicts_with_override (reloadreg) 6980 && ! conflicts_with_override (reloadreg)
6947 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum, 6981 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6948 rl->when_needed, old, rl->out, j, 0)) 6982 rl->when_needed, old, rl->out, j, 0))
6949 { 6983 {
6950 rtx temp = PREV_INSN (insn); 6984 rtx temp = PREV_INSN (insn);
6951 while (temp && NOTE_P (temp)) 6985 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
6952 temp = PREV_INSN (temp); 6986 temp = PREV_INSN (temp);
6953 if (temp 6987 if (temp
6954 && NONJUMP_INSN_P (temp) 6988 && NONJUMP_INSN_P (temp)
6955 && GET_CODE (PATTERN (temp)) == SET 6989 && GET_CODE (PATTERN (temp)) == SET
6956 && SET_DEST (PATTERN (temp)) == old 6990 && SET_DEST (PATTERN (temp)) == old
6989 /* Inform IRA about the change. */ 7023 /* Inform IRA about the change. */
6990 ira_mark_allocation_change (REGNO (old)); 7024 ira_mark_allocation_change (REGNO (old));
6991 alter_reg (REGNO (old), -1, false); 7025 alter_reg (REGNO (old), -1, false);
6992 } 7026 }
6993 special = 1; 7027 special = 1;
7028
7029 /* Adjust any debug insns between temp and insn. */
7030 while ((temp = NEXT_INSN (temp)) != insn)
7031 if (DEBUG_INSN_P (temp))
7032 replace_rtx (PATTERN (temp), old, reloadreg);
7033 else
7034 gcc_assert (NOTE_P (temp));
6994 } 7035 }
6995 else 7036 else
6996 { 7037 {
6997 SET_DEST (PATTERN (temp)) = old; 7038 SET_DEST (PATTERN (temp)) = old;
6998 } 7039 }
7094 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing) 7135 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7095 second_reload_reg = 0; 7136 second_reload_reg = 0;
7096 else if (new_class == NO_REGS) 7137 else if (new_class == NO_REGS)
7097 { 7138 {
7098 if (reload_adjust_reg_for_icode (&second_reload_reg, 7139 if (reload_adjust_reg_for_icode (&second_reload_reg,
7099 third_reload_reg, sri.icode)) 7140 third_reload_reg,
7100 icode = sri.icode, third_reload_reg = 0; 7141 (enum insn_code) sri.icode))
7142 {
7143 icode = (enum insn_code) sri.icode;
7144 third_reload_reg = 0;
7145 }
7101 else 7146 else
7102 oldequiv = old, real_oldequiv = real_old; 7147 {
7148 oldequiv = old;
7149 real_oldequiv = real_old;
7150 }
7103 } 7151 }
7104 else if (sri.icode != CODE_FOR_nothing) 7152 else if (sri.icode != CODE_FOR_nothing)
7105 /* We currently lack a way to express this in reloads. */ 7153 /* We currently lack a way to express this in reloads. */
7106 gcc_unreachable (); 7154 gcc_unreachable ();
7107 else 7155 else
7113 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing) 7161 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7114 { 7162 {
7115 if (reload_adjust_reg_for_temp (&second_reload_reg, 7163 if (reload_adjust_reg_for_temp (&second_reload_reg,
7116 third_reload_reg, 7164 third_reload_reg,
7117 new_class, mode)) 7165 new_class, mode))
7118 third_reload_reg = 0, tertiary_icode = sri2.icode; 7166 {
7167 third_reload_reg = 0;
7168 tertiary_icode = (enum insn_code) sri2.icode;
7169 }
7119 else 7170 else
7120 oldequiv = old, real_oldequiv = real_old; 7171 {
7172 oldequiv = old;
7173 real_oldequiv = real_old;
7174 }
7121 } 7175 }
7122 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing) 7176 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7123 { 7177 {
7124 rtx intermediate = second_reload_reg; 7178 rtx intermediate = second_reload_reg;
7125 7179
7126 if (reload_adjust_reg_for_temp (&intermediate, NULL, 7180 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7127 new_class, mode) 7181 new_class, mode)
7128 && reload_adjust_reg_for_icode (&third_reload_reg, NULL, 7182 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7129 sri2.icode)) 7183 ((enum insn_code)
7184 sri2.icode)))
7130 { 7185 {
7131 second_reload_reg = intermediate; 7186 second_reload_reg = intermediate;
7132 tertiary_icode = sri2.icode; 7187 tertiary_icode = (enum insn_code) sri2.icode;
7133 } 7188 }
7134 else 7189 else
7135 oldequiv = old, real_oldequiv = real_old; 7190 {
7191 oldequiv = old;
7192 real_oldequiv = real_old;
7193 }
7136 } 7194 }
7137 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing) 7195 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7138 { 7196 {
7139 rtx intermediate = second_reload_reg; 7197 rtx intermediate = second_reload_reg;
7140 7198
7142 new_class, mode) 7200 new_class, mode)
7143 && reload_adjust_reg_for_temp (&third_reload_reg, NULL, 7201 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7144 new_t_class, mode)) 7202 new_t_class, mode))
7145 { 7203 {
7146 second_reload_reg = intermediate; 7204 second_reload_reg = intermediate;
7147 tertiary_icode = sri2.icode; 7205 tertiary_icode = (enum insn_code) sri2.icode;
7148 } 7206 }
7149 else 7207 else
7150 oldequiv = old, real_oldequiv = real_old; 7208 {
7209 oldequiv = old;
7210 real_oldequiv = real_old;
7211 }
7151 } 7212 }
7152 else 7213 else
7153 /* This could be handled more intelligently too. */ 7214 {
7154 oldequiv = old, real_oldequiv = real_old; 7215 /* This could be handled more intelligently too. */
7216 oldequiv = old;
7217 real_oldequiv = real_old;
7218 }
7155 } 7219 }
7156 } 7220 }
7157 7221
7158 /* If we still need a secondary reload register, check 7222 /* If we still need a secondary reload register, check
7159 to see if it is being used as a scratch or intermediate 7223 to see if it is being used as a scratch or intermediate
7226 gen_reload (reloadreg, real_oldequiv, rl->opnum, 7290 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7227 rl->when_needed); 7291 rl->when_needed);
7228 } 7292 }
7229 7293
7230 if (flag_non_call_exceptions) 7294 if (flag_non_call_exceptions)
7231 copy_eh_notes (insn, get_insns ()); 7295 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7232 7296
7233 /* End this sequence. */ 7297 /* End this sequence. */
7234 *where = get_insns (); 7298 *where = get_insns ();
7235 end_sequence (); 7299 end_sequence ();
7236 7300
7446 } 7510 }
7447 else 7511 else
7448 output_reload_insns[rl->opnum] = get_insns (); 7512 output_reload_insns[rl->opnum] = get_insns ();
7449 7513
7450 if (flag_non_call_exceptions) 7514 if (flag_non_call_exceptions)
7451 copy_eh_notes (insn, get_insns ()); 7515 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7452 7516
7453 end_sequence (); 7517 end_sequence ();
7454 } 7518 }
7455 7519
7456 /* Do input reloading for reload RL, which is for the insn described by CHAIN 7520 /* Do input reloading for reload RL, which is for the insn described by CHAIN
8058 reg_reloaded_insn[src_regno + k] = store_insn; 8122 reg_reloaded_insn[src_regno + k] = store_insn;
8059 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k); 8123 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8060 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k); 8124 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8061 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k, 8125 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8062 mode)) 8126 mode))
8063 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, 8127 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8064 src_regno + k); 8128 src_regno + k);
8065 else 8129 else
8066 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, 8130 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8067 src_regno + k); 8131 src_regno + k);
8068 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k); 8132 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8070 SET_HARD_REG_BIT (reg_reloaded_died, src_regno); 8134 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8071 else 8135 else
8072 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno); 8136 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8073 } 8137 }
8074 reg_last_reload_reg[out_regno] = src_reg; 8138 reg_last_reload_reg[out_regno] = src_reg;
8075 /* We have to set reg_has_output_reload here, or else 8139 /* We have to set reg_has_output_reload here, or else
8076 forget_old_reloads_1 will clear reg_last_reload_reg 8140 forget_old_reloads_1 will clear reg_last_reload_reg
8077 right away. */ 8141 right away. */
8078 SET_REGNO_REG_SET (&reg_has_output_reload, 8142 SET_REGNO_REG_SET (&reg_has_output_reload,
8079 out_regno); 8143 out_regno);
8080 } 8144 }
8421 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn)) 8485 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8422 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn), 8486 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8423 reg, 0); 8487 reg, 0);
8424 if (substed) 8488 if (substed)
8425 n_occurrences += count_occurrences (PATTERN (insn), 8489 n_occurrences += count_occurrences (PATTERN (insn),
8426 eliminate_regs (substed, 0, 8490 eliminate_regs (substed, VOIDmode,
8427 NULL_RTX), 0); 8491 NULL_RTX), 0);
8428 for (i1 = reg_equiv_alt_mem_list[REGNO (reg)]; i1; i1 = XEXP (i1, 1)) 8492 for (i1 = reg_equiv_alt_mem_list[REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8429 { 8493 {
8430 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed)); 8494 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8431 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0); 8495 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8560 return; 8624 return;
8561 set = single_set (next); 8625 set = single_set (next);
8562 set2 = single_set (prev); 8626 set2 = single_set (prev);
8563 if (! set || ! set2 8627 if (! set || ! set2
8564 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS 8628 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8565 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT 8629 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8566 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT) 8630 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8567 return; 8631 return;
8568 dst = SET_DEST (set); 8632 dst = SET_DEST (set);
8569 if (! rtx_equal_p (dst, SET_DEST (set2)) 8633 if (! rtx_equal_p (dst, SET_DEST (set2))
8570 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0)) 8634 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8571 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0)) 8635 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8786 RELOADREG, save that back, then decrement RELOADREG so it has 8850 RELOADREG, save that back, then decrement RELOADREG so it has
8787 the original value. */ 8851 the original value. */
8788 8852
8789 emit_insn (gen_add2_insn (reloadreg, inc)); 8853 emit_insn (gen_add2_insn (reloadreg, inc));
8790 store = emit_insn (gen_move_insn (incloc, reloadreg)); 8854 store = emit_insn (gen_move_insn (incloc, reloadreg));
8791 if (GET_CODE (inc) == CONST_INT) 8855 if (CONST_INT_P (inc))
8792 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc)))); 8856 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
8793 else 8857 else
8794 emit_insn (gen_sub2_insn (reloadreg, inc)); 8858 emit_insn (gen_sub2_insn (reloadreg, inc));
8795 } 8859 }
8796 8860
8821 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 8885 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8822 add_auto_inc_notes (insn, XVECEXP (x, i, j)); 8886 add_auto_inc_notes (insn, XVECEXP (x, i, j));
8823 } 8887 }
8824 } 8888 }
8825 #endif 8889 #endif
8826
8827 /* Copy EH notes from an insn to its reloads. */
8828 static void
8829 copy_eh_notes (rtx insn, rtx x)
8830 {
8831 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
8832 if (eh_note)
8833 {
8834 for (; x != 0; x = NEXT_INSN (x))
8835 {
8836 if (may_trap_p (PATTERN (x)))
8837 add_reg_note (x, REG_EH_REGION, XEXP (eh_note, 0));
8838 }
8839 }
8840 }
8841 8890
8842 /* This is used by reload pass, that does emit some instructions after 8891 /* This is used by reload pass, that does emit some instructions after
8843 abnormal calls moving basic block end, but in fact it wants to emit 8892 abnormal calls moving basic block end, but in fact it wants to emit
8844 them on the edge. Looks for abnormal call edges, find backward the 8893 them on the edge. Looks for abnormal call edges, find backward the
8845 proper call and fix the damage. 8894 proper call and fix the damage.
8919 insn = next; 8968 insn = next;
8920 } 8969 }
8921 } 8970 }
8922 8971
8923 /* It may be that we don't find any such trapping insn. In this 8972 /* It may be that we don't find any such trapping insn. In this
8924 case we discovered quite late that the insn that had been 8973 case we discovered quite late that the insn that had been
8925 marked as can_throw_internal in fact couldn't trap at all. 8974 marked as can_throw_internal in fact couldn't trap at all.
8926 So we should in fact delete the EH edges out of the block. */ 8975 So we should in fact delete the EH edges out of the block. */
8927 else 8976 else
8928 purge_dead_edges (bb); 8977 purge_dead_edges (bb);
8929 } 8978 }