Mercurial > hg > CbC > CbC_gcc
comparison gcc/reload1.c @ 131:84e7813d76e9
gcc-8.2
author | mir3636 |
---|---|
date | Thu, 25 Oct 2018 07:37:49 +0900 |
parents | 04ced10e8804 |
children | 1830386684a0 |
comparison
equal
deleted
inserted
replaced
111:04ced10e8804 | 131:84e7813d76e9 |
---|---|
1 /* Reload pseudo regs into hard regs for insns that require hard regs. | 1 /* Reload pseudo regs into hard regs for insns that require hard regs. |
2 Copyright (C) 1987-2017 Free Software Foundation, Inc. | 2 Copyright (C) 1987-2018 Free Software Foundation, Inc. |
3 | 3 |
4 This file is part of GCC. | 4 This file is part of GCC. |
5 | 5 |
6 GCC is free software; you can redistribute it and/or modify it under | 6 GCC is free software; you can redistribute it and/or modify it under |
7 the terms of the GNU General Public License as published by the Free | 7 the terms of the GNU General Public License as published by the Free |
198 | 198 |
199 /* Record the stack slot for each spilled hard register. */ | 199 /* Record the stack slot for each spilled hard register. */ |
200 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER]; | 200 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER]; |
201 | 201 |
202 /* Width allocated so far for that stack slot. */ | 202 /* Width allocated so far for that stack slot. */ |
203 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER]; | 203 static poly_uint64_pod spill_stack_slot_width[FIRST_PSEUDO_REGISTER]; |
204 | 204 |
205 /* Record which pseudos needed to be spilled. */ | 205 /* Record which pseudos needed to be spilled. */ |
206 static regset_head spilled_pseudos; | 206 static regset_head spilled_pseudos; |
207 | 207 |
208 /* Record which pseudos changed their allocation in finish_spills. */ | 208 /* Record which pseudos changed their allocation in finish_spills. */ |
259 | 259 |
260 struct elim_table | 260 struct elim_table |
261 { | 261 { |
262 int from; /* Register number to be eliminated. */ | 262 int from; /* Register number to be eliminated. */ |
263 int to; /* Register number used as replacement. */ | 263 int to; /* Register number used as replacement. */ |
264 HOST_WIDE_INT initial_offset; /* Initial difference between values. */ | 264 poly_int64_pod initial_offset; /* Initial difference between values. */ |
265 int can_eliminate; /* Nonzero if this elimination can be done. */ | 265 int can_eliminate; /* Nonzero if this elimination can be done. */ |
266 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE | 266 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE |
267 target hook in previous scan over insns | 267 target hook in previous scan over insns |
268 made by reload. */ | 268 made by reload. */ |
269 HOST_WIDE_INT offset; /* Current offset between the two regs. */ | 269 poly_int64_pod offset; /* Current offset between the two regs. */ |
270 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */ | 270 poly_int64_pod previous_offset; /* Offset at end of previous insn. */ |
271 int ref_outside_mem; /* "to" has been referenced outside a MEM. */ | 271 int ref_outside_mem; /* "to" has been referenced outside a MEM. */ |
272 rtx from_rtx; /* REG rtx for the register to be eliminated. | 272 rtx from_rtx; /* REG rtx for the register to be eliminated. |
273 We cannot simply compare the number since | 273 We cannot simply compare the number since |
274 we might then spuriously replace a hard | 274 we might then spuriously replace a hard |
275 register corresponding to a pseudo | 275 register corresponding to a pseudo |
311 have yet encountered a label and the second table is an array of arrays, | 311 have yet encountered a label and the second table is an array of arrays, |
312 one entry in the latter array for each elimination. */ | 312 one entry in the latter array for each elimination. */ |
313 | 313 |
314 static int first_label_num; | 314 static int first_label_num; |
315 static char *offsets_known_at; | 315 static char *offsets_known_at; |
316 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS]; | 316 static poly_int64_pod (*offsets_at)[NUM_ELIMINABLE_REGS]; |
317 | 317 |
318 vec<reg_equivs_t, va_gc> *reg_equivs; | 318 vec<reg_equivs_t, va_gc> *reg_equivs; |
319 | 319 |
320 /* Stack of addresses where an rtx has been changed. We can undo the | 320 /* Stack of addresses where an rtx has been changed. We can undo the |
321 changes by popping items off the stack and restoring the original | 321 changes by popping items off the stack and restoring the original |
396 static void do_output_reload (struct insn_chain *, struct reload *, int); | 396 static void do_output_reload (struct insn_chain *, struct reload *, int); |
397 static void emit_reload_insns (struct insn_chain *); | 397 static void emit_reload_insns (struct insn_chain *); |
398 static void delete_output_reload (rtx_insn *, int, int, rtx); | 398 static void delete_output_reload (rtx_insn *, int, int, rtx); |
399 static void delete_address_reloads (rtx_insn *, rtx_insn *); | 399 static void delete_address_reloads (rtx_insn *, rtx_insn *); |
400 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *); | 400 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *); |
401 static void inc_for_reload (rtx, rtx, rtx, int); | 401 static void inc_for_reload (rtx, rtx, rtx, poly_int64); |
402 static void add_auto_inc_notes (rtx_insn *, rtx); | 402 static void add_auto_inc_notes (rtx_insn *, rtx); |
403 static void substitute (rtx *, const_rtx, rtx); | 403 static void substitute (rtx *, const_rtx, rtx); |
404 static bool gen_reload_chain_without_interm_reg_p (int, int); | 404 static bool gen_reload_chain_without_interm_reg_p (int, int); |
405 static int reloads_conflict (int, int); | 405 static int reloads_conflict (int, int); |
406 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type); | 406 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type); |
885 /* This loop scans the entire function each go-round | 885 /* This loop scans the entire function each go-round |
886 and repeats until one repetition spills no additional hard regs. */ | 886 and repeats until one repetition spills no additional hard regs. */ |
887 for (;;) | 887 for (;;) |
888 { | 888 { |
889 int something_changed; | 889 int something_changed; |
890 HOST_WIDE_INT starting_frame_size; | 890 poly_int64 starting_frame_size; |
891 | 891 |
892 starting_frame_size = get_frame_size (); | 892 starting_frame_size = get_frame_size (); |
893 something_was_spilled = false; | 893 something_was_spilled = false; |
894 | 894 |
895 set_initial_elim_offsets (); | 895 set_initial_elim_offsets (); |
953 } | 953 } |
954 | 954 |
955 if (caller_save_needed) | 955 if (caller_save_needed) |
956 setup_save_areas (); | 956 setup_save_areas (); |
957 | 957 |
958 if (starting_frame_size && crtl->stack_alignment_needed) | 958 if (maybe_ne (starting_frame_size, 0) && crtl->stack_alignment_needed) |
959 { | 959 { |
960 /* If we have a stack frame, we must align it now. The | 960 /* If we have a stack frame, we must align it now. The |
961 stack size may be a part of the offset computation for | 961 stack size may be a part of the offset computation for |
962 register elimination. So if this changes the stack size, | 962 register elimination. So if this changes the stack size, |
963 then repeat the elimination bookkeeping. We don't | 963 then repeat the elimination bookkeeping. We don't |
966 TARGET_STARTING_FRAME_OFFSET not be already aligned to | 966 TARGET_STARTING_FRAME_OFFSET not be already aligned to |
967 STACK_BOUNDARY. */ | 967 STACK_BOUNDARY. */ |
968 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed); | 968 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed); |
969 } | 969 } |
970 /* If we allocated another stack slot, redo elimination bookkeeping. */ | 970 /* If we allocated another stack slot, redo elimination bookkeeping. */ |
971 if (something_was_spilled || starting_frame_size != get_frame_size ()) | 971 if (something_was_spilled |
972 || maybe_ne (starting_frame_size, get_frame_size ())) | |
972 { | 973 { |
973 if (update_eliminables_and_spill ()) | 974 if (update_eliminables_and_spill ()) |
974 finish_spills (0); | 975 finish_spills (0); |
975 continue; | 976 continue; |
976 } | 977 } |
992 | 993 |
993 something_changed = 0; | 994 something_changed = 0; |
994 | 995 |
995 /* If we allocated any new memory locations, make another pass | 996 /* If we allocated any new memory locations, make another pass |
996 since it might have changed elimination offsets. */ | 997 since it might have changed elimination offsets. */ |
997 if (something_was_spilled || starting_frame_size != get_frame_size ()) | 998 if (something_was_spilled |
999 || maybe_ne (starting_frame_size, get_frame_size ())) | |
998 something_changed = 1; | 1000 something_changed = 1; |
999 | 1001 |
1000 /* Even if the frame size remained the same, we might still have | 1002 /* Even if the frame size remained the same, we might still have |
1001 changed elimination offsets, e.g. if find_reloads called | 1003 changed elimination offsets, e.g. if find_reloads called |
1002 force_const_mem requiring the back end to allocate a constant | 1004 force_const_mem requiring the back end to allocate a constant |
1041 values into or out of the reload registers. */ | 1043 values into or out of the reload registers. */ |
1042 | 1044 |
1043 if (insns_need_reload != 0 || something_needs_elimination | 1045 if (insns_need_reload != 0 || something_needs_elimination |
1044 || something_needs_operands_changed) | 1046 || something_needs_operands_changed) |
1045 { | 1047 { |
1046 HOST_WIDE_INT old_frame_size = get_frame_size (); | 1048 poly_int64 old_frame_size = get_frame_size (); |
1047 | 1049 |
1048 reload_as_needed (global); | 1050 reload_as_needed (global); |
1049 | 1051 |
1050 gcc_assert (old_frame_size == get_frame_size ()); | 1052 gcc_assert (known_eq (old_frame_size, get_frame_size ())); |
1051 | 1053 |
1052 gcc_assert (verify_initial_elim_offsets ()); | 1054 gcc_assert (verify_initial_elim_offsets ()); |
1053 } | 1055 } |
1054 | 1056 |
1055 /* If we were able to eliminate the frame pointer, show that it is no | 1057 /* If we were able to eliminate the frame pointer, show that it is no |
1110 } | 1112 } |
1111 | 1113 |
1112 /* We don't want complex addressing modes in debug insns | 1114 /* We don't want complex addressing modes in debug insns |
1113 if simpler ones will do, so delegitimize equivalences | 1115 if simpler ones will do, so delegitimize equivalences |
1114 in debug insns. */ | 1116 in debug insns. */ |
1115 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0) | 1117 if (MAY_HAVE_DEBUG_BIND_INSNS && reg_renumber[i] < 0) |
1116 { | 1118 { |
1117 rtx reg = regno_reg_rtx[i]; | 1119 rtx reg = regno_reg_rtx[i]; |
1118 rtx equiv = 0; | 1120 rtx equiv = 0; |
1119 df_ref use, next; | 1121 df_ref use, next; |
1120 | 1122 |
1138 so that we're not affected by the rescan. */ | 1140 so that we're not affected by the rescan. */ |
1139 next = DF_REF_NEXT_REG (use); | 1141 next = DF_REF_NEXT_REG (use); |
1140 while (next && DF_REF_INSN (next) == insn) | 1142 while (next && DF_REF_INSN (next) == insn) |
1141 next = DF_REF_NEXT_REG (next); | 1143 next = DF_REF_NEXT_REG (next); |
1142 | 1144 |
1143 if (DEBUG_INSN_P (insn)) | 1145 if (DEBUG_BIND_INSN_P (insn)) |
1144 { | 1146 { |
1145 if (!equiv) | 1147 if (!equiv) |
1146 { | 1148 { |
1147 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC (); | 1149 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC (); |
1148 df_insn_rescan_debug_internal (insn); | 1150 df_insn_rescan_debug_internal (insn); |
1335 for (i = 0; i < XVECLEN (pat, 0); i++) | 1337 for (i = 0; i < XVECLEN (pat, 0); i++) |
1336 { | 1338 { |
1337 rtx t = XVECEXP (pat, 0, i); | 1339 rtx t = XVECEXP (pat, 0, i); |
1338 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0))) | 1340 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0))) |
1339 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0))); | 1341 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0))); |
1342 /* CLOBBER_HIGH is only supported for LRA. */ | |
1343 gcc_assert (GET_CODE (t) != CLOBBER_HIGH); | |
1340 } | 1344 } |
1341 | 1345 |
1342 /* Get the operand values and constraints out of the insn. */ | 1346 /* Get the operand values and constraints out of the insn. */ |
1343 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc, | 1347 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc, |
1344 constraints, operand_mode, NULL); | 1348 constraints, operand_mode, NULL); |
2138 || reg_equiv_init (i) == 0) | 2142 || reg_equiv_init (i) == 0) |
2139 && reg_equiv_memory_loc (i) == 0) | 2143 && reg_equiv_memory_loc (i) == 0) |
2140 { | 2144 { |
2141 rtx x = NULL_RTX; | 2145 rtx x = NULL_RTX; |
2142 machine_mode mode = GET_MODE (regno_reg_rtx[i]); | 2146 machine_mode mode = GET_MODE (regno_reg_rtx[i]); |
2143 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i); | 2147 poly_uint64 inherent_size = GET_MODE_SIZE (mode); |
2144 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode); | 2148 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode); |
2145 machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]); | 2149 machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]); |
2146 unsigned int total_size = GET_MODE_SIZE (wider_mode); | 2150 poly_uint64 total_size = GET_MODE_SIZE (wider_mode); |
2147 unsigned int min_align = GET_MODE_BITSIZE (reg_max_ref_mode[i]); | 2151 /* ??? Seems strange to derive the minimum alignment from the size, |
2148 int adjust = 0; | 2152 but that's the traditional behavior. For polynomial-size modes, |
2153 the natural extension is to use the minimum possible size. */ | |
2154 unsigned int min_align | |
2155 = constant_lower_bound (GET_MODE_BITSIZE (reg_max_ref_mode[i])); | |
2156 poly_int64 adjust = 0; | |
2149 | 2157 |
2150 something_was_spilled = true; | 2158 something_was_spilled = true; |
2151 | 2159 |
2152 if (ira_conflicts_p) | 2160 if (ira_conflicts_p) |
2153 { | 2161 { |
2170 inherent space, and no less total space, then the previous slot. */ | 2178 inherent space, and no less total space, then the previous slot. */ |
2171 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p)) | 2179 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p)) |
2172 { | 2180 { |
2173 rtx stack_slot; | 2181 rtx stack_slot; |
2174 | 2182 |
2183 /* The sizes are taken from a subreg operation, which guarantees | |
2184 that they're ordered. */ | |
2185 gcc_checking_assert (ordered_p (total_size, inherent_size)); | |
2186 | |
2175 /* No known place to spill from => no slot to reuse. */ | 2187 /* No known place to spill from => no slot to reuse. */ |
2176 x = assign_stack_local (mode, total_size, | 2188 x = assign_stack_local (mode, total_size, |
2177 min_align > inherent_align | 2189 min_align > inherent_align |
2178 || total_size > inherent_size ? -1 : 0); | 2190 || maybe_gt (total_size, inherent_size) |
2191 ? -1 : 0); | |
2179 | 2192 |
2180 stack_slot = x; | 2193 stack_slot = x; |
2181 | 2194 |
2182 /* Cancel the big-endian correction done in assign_stack_local. | 2195 /* Cancel the big-endian correction done in assign_stack_local. |
2183 Get the address of the beginning of the slot. This is so we | 2196 Get the address of the beginning of the slot. This is so we |
2184 can do a big-endian correction unconditionally below. */ | 2197 can do a big-endian correction unconditionally below. */ |
2185 if (BYTES_BIG_ENDIAN) | 2198 if (BYTES_BIG_ENDIAN) |
2186 { | 2199 { |
2187 adjust = inherent_size - total_size; | 2200 adjust = inherent_size - total_size; |
2188 if (adjust) | 2201 if (maybe_ne (adjust, 0)) |
2189 { | 2202 { |
2190 unsigned int total_bits = total_size * BITS_PER_UNIT; | 2203 poly_uint64 total_bits = total_size * BITS_PER_UNIT; |
2191 machine_mode mem_mode | 2204 machine_mode mem_mode |
2192 = int_mode_for_size (total_bits, 1).else_blk (); | 2205 = int_mode_for_size (total_bits, 1).else_blk (); |
2193 stack_slot = adjust_address_nv (x, mem_mode, adjust); | 2206 stack_slot = adjust_address_nv (x, mem_mode, adjust); |
2194 } | 2207 } |
2195 } | 2208 } |
2199 ira_mark_new_stack_slot (stack_slot, i, total_size); | 2212 ira_mark_new_stack_slot (stack_slot, i, total_size); |
2200 } | 2213 } |
2201 | 2214 |
2202 /* Reuse a stack slot if possible. */ | 2215 /* Reuse a stack slot if possible. */ |
2203 else if (spill_stack_slot[from_reg] != 0 | 2216 else if (spill_stack_slot[from_reg] != 0 |
2204 && spill_stack_slot_width[from_reg] >= total_size | 2217 && known_ge (spill_stack_slot_width[from_reg], total_size) |
2205 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg])) | 2218 && known_ge (GET_MODE_SIZE |
2206 >= inherent_size) | 2219 (GET_MODE (spill_stack_slot[from_reg])), |
2220 inherent_size) | |
2207 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align) | 2221 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align) |
2208 x = spill_stack_slot[from_reg]; | 2222 x = spill_stack_slot[from_reg]; |
2209 | 2223 |
2210 /* Allocate a bigger slot. */ | 2224 /* Allocate a bigger slot. */ |
2211 else | 2225 else |
2217 if (spill_stack_slot[from_reg]) | 2231 if (spill_stack_slot[from_reg]) |
2218 { | 2232 { |
2219 if (partial_subreg_p (mode, | 2233 if (partial_subreg_p (mode, |
2220 GET_MODE (spill_stack_slot[from_reg]))) | 2234 GET_MODE (spill_stack_slot[from_reg]))) |
2221 mode = GET_MODE (spill_stack_slot[from_reg]); | 2235 mode = GET_MODE (spill_stack_slot[from_reg]); |
2222 if (spill_stack_slot_width[from_reg] > total_size) | 2236 total_size = ordered_max (total_size, |
2223 total_size = spill_stack_slot_width[from_reg]; | 2237 spill_stack_slot_width[from_reg]); |
2224 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align) | 2238 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align) |
2225 min_align = MEM_ALIGN (spill_stack_slot[from_reg]); | 2239 min_align = MEM_ALIGN (spill_stack_slot[from_reg]); |
2226 } | 2240 } |
2227 | 2241 |
2242 /* The sizes are taken from a subreg operation, which guarantees | |
2243 that they're ordered. */ | |
2244 gcc_checking_assert (ordered_p (total_size, inherent_size)); | |
2245 | |
2228 /* Make a slot with that size. */ | 2246 /* Make a slot with that size. */ |
2229 x = assign_stack_local (mode, total_size, | 2247 x = assign_stack_local (mode, total_size, |
2230 min_align > inherent_align | 2248 min_align > inherent_align |
2231 || total_size > inherent_size ? -1 : 0); | 2249 || maybe_gt (total_size, inherent_size) |
2250 ? -1 : 0); | |
2232 stack_slot = x; | 2251 stack_slot = x; |
2233 | 2252 |
2234 /* Cancel the big-endian correction done in assign_stack_local. | 2253 /* Cancel the big-endian correction done in assign_stack_local. |
2235 Get the address of the beginning of the slot. This is so we | 2254 Get the address of the beginning of the slot. This is so we |
2236 can do a big-endian correction unconditionally below. */ | 2255 can do a big-endian correction unconditionally below. */ |
2237 if (BYTES_BIG_ENDIAN) | 2256 if (BYTES_BIG_ENDIAN) |
2238 { | 2257 { |
2239 adjust = GET_MODE_SIZE (mode) - total_size; | 2258 adjust = GET_MODE_SIZE (mode) - total_size; |
2240 if (adjust) | 2259 if (maybe_ne (adjust, 0)) |
2241 { | 2260 { |
2242 unsigned int total_bits = total_size * BITS_PER_UNIT; | 2261 poly_uint64 total_bits = total_size * BITS_PER_UNIT; |
2243 machine_mode mem_mode | 2262 machine_mode mem_mode |
2244 = int_mode_for_size (total_bits, 1).else_blk (); | 2263 = int_mode_for_size (total_bits, 1).else_blk (); |
2245 stack_slot = adjust_address_nv (x, mem_mode, adjust); | 2264 stack_slot = adjust_address_nv (x, mem_mode, adjust); |
2246 } | 2265 } |
2247 } | 2266 } |
2349 /* If neither of the above cases is true, compare each offset | 2368 /* If neither of the above cases is true, compare each offset |
2350 with those previously recorded and suppress any eliminations | 2369 with those previously recorded and suppress any eliminations |
2351 where the offsets disagree. */ | 2370 where the offsets disagree. */ |
2352 | 2371 |
2353 for (i = 0; i < NUM_ELIMINABLE_REGS; i++) | 2372 for (i = 0; i < NUM_ELIMINABLE_REGS; i++) |
2354 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i] | 2373 if (maybe_ne (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i], |
2355 != (initial_p ? reg_eliminate[i].initial_offset | 2374 (initial_p ? reg_eliminate[i].initial_offset |
2356 : reg_eliminate[i].offset)) | 2375 : reg_eliminate[i].offset))) |
2357 reg_eliminate[i].can_eliminate = 0; | 2376 reg_eliminate[i].can_eliminate = 0; |
2358 | 2377 |
2359 return; | 2378 return; |
2360 | 2379 |
2361 case JUMP_TABLE_DATA: | 2380 case JUMP_TABLE_DATA: |
2434 } | 2453 } |
2435 | 2454 |
2436 /* If we reach here, all eliminations must be at their initial | 2455 /* If we reach here, all eliminations must be at their initial |
2437 offset because we are doing a jump to a variable address. */ | 2456 offset because we are doing a jump to a variable address. */ |
2438 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++) | 2457 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++) |
2439 if (p->offset != p->initial_offset) | 2458 if (maybe_ne (p->offset, p->initial_offset)) |
2440 p->can_eliminate = 0; | 2459 p->can_eliminate = 0; |
2441 break; | 2460 break; |
2442 | 2461 |
2443 default: | 2462 default: |
2444 break; | 2463 break; |
2591 to do so at other times because that would change the | 2610 to do so at other times because that would change the |
2592 structure of the insn in a way that reload can't handle. | 2611 structure of the insn in a way that reload can't handle. |
2593 We special-case the commonest situation in | 2612 We special-case the commonest situation in |
2594 eliminate_regs_in_insn, so just replace a PLUS with a | 2613 eliminate_regs_in_insn, so just replace a PLUS with a |
2595 PLUS here, unless inside a MEM. */ | 2614 PLUS here, unless inside a MEM. */ |
2596 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1)) | 2615 if (mem_mode != 0 |
2597 && INTVAL (XEXP (x, 1)) == - ep->previous_offset) | 2616 && CONST_INT_P (XEXP (x, 1)) |
2617 && known_eq (INTVAL (XEXP (x, 1)), -ep->previous_offset)) | |
2598 return ep->to_rtx; | 2618 return ep->to_rtx; |
2599 else | 2619 else |
2600 return gen_rtx_PLUS (Pmode, ep->to_rtx, | 2620 return gen_rtx_PLUS (Pmode, ep->to_rtx, |
2601 plus_constant (Pmode, XEXP (x, 1), | 2621 plus_constant (Pmode, XEXP (x, 1), |
2602 ep->previous_offset)); | 2622 ep->previous_offset)); |
2809 else | 2829 else |
2810 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs); | 2830 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs); |
2811 | 2831 |
2812 if (new_rtx != SUBREG_REG (x)) | 2832 if (new_rtx != SUBREG_REG (x)) |
2813 { | 2833 { |
2814 int x_size = GET_MODE_SIZE (GET_MODE (x)); | 2834 poly_int64 x_size = GET_MODE_SIZE (GET_MODE (x)); |
2815 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx)); | 2835 poly_int64 new_size = GET_MODE_SIZE (GET_MODE (new_rtx)); |
2816 | 2836 |
2817 if (MEM_P (new_rtx) | 2837 if (MEM_P (new_rtx) |
2818 && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx)) | 2838 && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx)) |
2819 /* On RISC machines, combine can create rtl of the form | 2839 /* On RISC machines, combine can create rtl of the form |
2820 (set (subreg:m1 (reg:m2 R) 0) ...) | 2840 (set (subreg:m1 (reg:m2 R) 0) ...) |
2822 happen to the entire word. Moreover, it will use the | 2842 happen to the entire word. Moreover, it will use the |
2823 (reg:m2 R) later, expecting all bits to be preserved. | 2843 (reg:m2 R) later, expecting all bits to be preserved. |
2824 So if the number of words is the same, preserve the | 2844 So if the number of words is the same, preserve the |
2825 subreg so that push_reload can see it. */ | 2845 subreg so that push_reload can see it. */ |
2826 && !(WORD_REGISTER_OPERATIONS | 2846 && !(WORD_REGISTER_OPERATIONS |
2827 && (x_size - 1) / UNITS_PER_WORD | 2847 && known_equal_after_align_down (x_size - 1, |
2828 == (new_size -1 ) / UNITS_PER_WORD)) | 2848 new_size - 1, |
2829 || x_size == new_size) | 2849 UNITS_PER_WORD))) |
2850 || known_eq (x_size, new_size)) | |
2830 ) | 2851 ) |
2831 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x)); | 2852 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x)); |
2832 else if (insn && GET_CODE (insn) == DEBUG_INSN) | 2853 else if (insn && GET_CODE (insn) == DEBUG_INSN) |
2833 return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x)); | 2854 return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x)); |
2834 else | 2855 else |
2858 if (new_rtx != XEXP (x, 0)) | 2879 if (new_rtx != XEXP (x, 0)) |
2859 return gen_rtx_USE (GET_MODE (x), new_rtx); | 2880 return gen_rtx_USE (GET_MODE (x), new_rtx); |
2860 return x; | 2881 return x; |
2861 | 2882 |
2862 case CLOBBER: | 2883 case CLOBBER: |
2884 case CLOBBER_HIGH: | |
2863 case ASM_OPERANDS: | 2885 case ASM_OPERANDS: |
2864 gcc_assert (insn && DEBUG_INSN_P (insn)); | 2886 gcc_assert (insn && DEBUG_INSN_P (insn)); |
2865 break; | 2887 break; |
2866 | 2888 |
2867 case SET: | 2889 case SET: |
2991 update its offset. If we modify the target in any other way, we'll | 3013 update its offset. If we modify the target in any other way, we'll |
2992 have to disable the rule as well. */ | 3014 have to disable the rule as well. */ |
2993 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) | 3015 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
2994 if (ep->to_rtx == XEXP (x, 0)) | 3016 if (ep->to_rtx == XEXP (x, 0)) |
2995 { | 3017 { |
2996 int size = GET_MODE_SIZE (mem_mode); | 3018 poly_int64 size = GET_MODE_SIZE (mem_mode); |
2997 | 3019 |
2998 /* If more bytes than MEM_MODE are pushed, account for them. */ | 3020 /* If more bytes than MEM_MODE are pushed, account for them. */ |
2999 #ifdef PUSH_ROUNDING | 3021 #ifdef PUSH_ROUNDING |
3000 if (ep->to_rtx == stack_pointer_rtx) | 3022 if (ep->to_rtx == stack_pointer_rtx) |
3001 size = PUSH_ROUNDING (size); | 3023 size = PUSH_ROUNDING (size); |
3068 ep->can_eliminate = 0; | 3090 ep->can_eliminate = 0; |
3069 | 3091 |
3070 elimination_effects (XEXP (x, 0), mem_mode); | 3092 elimination_effects (XEXP (x, 0), mem_mode); |
3071 return; | 3093 return; |
3072 | 3094 |
3095 case CLOBBER_HIGH: | |
3096 /* CLOBBER_HIGH is only supported for LRA. */ | |
3097 return; | |
3098 | |
3073 case SET: | 3099 case SET: |
3074 /* Check for setting a register that we know about. */ | 3100 /* Check for setting a register that we know about. */ |
3075 if (REG_P (SET_DEST (x))) | 3101 if (REG_P (SET_DEST (x))) |
3076 { | 3102 { |
3077 /* See if this is setting the replacement register for an | 3103 /* See if this is setting the replacement register for an |
3200 { | 3226 { |
3201 gcc_assert (DEBUG_INSN_P (insn) | 3227 gcc_assert (DEBUG_INSN_P (insn) |
3202 || GET_CODE (PATTERN (insn)) == USE | 3228 || GET_CODE (PATTERN (insn)) == USE |
3203 || GET_CODE (PATTERN (insn)) == CLOBBER | 3229 || GET_CODE (PATTERN (insn)) == CLOBBER |
3204 || GET_CODE (PATTERN (insn)) == ASM_INPUT); | 3230 || GET_CODE (PATTERN (insn)) == ASM_INPUT); |
3205 if (DEBUG_INSN_P (insn)) | 3231 if (DEBUG_BIND_INSN_P (insn)) |
3206 INSN_VAR_LOCATION_LOC (insn) | 3232 INSN_VAR_LOCATION_LOC (insn) |
3207 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn); | 3233 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn); |
3208 return 0; | 3234 return 0; |
3209 } | 3235 } |
3210 | 3236 |
3342 } | 3368 } |
3343 } | 3369 } |
3344 if (plus_cst_src) | 3370 if (plus_cst_src) |
3345 { | 3371 { |
3346 rtx reg = XEXP (plus_cst_src, 0); | 3372 rtx reg = XEXP (plus_cst_src, 0); |
3347 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1)); | 3373 poly_int64 offset = INTVAL (XEXP (plus_cst_src, 1)); |
3348 | 3374 |
3349 if (GET_CODE (reg) == SUBREG) | 3375 if (GET_CODE (reg) == SUBREG) |
3350 reg = SUBREG_REG (reg); | 3376 reg = SUBREG_REG (reg); |
3351 | 3377 |
3352 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) | 3378 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
3362 /* If we have a nonzero offset, and the source is already | 3388 /* If we have a nonzero offset, and the source is already |
3363 a simple REG, the following transformation would | 3389 a simple REG, the following transformation would |
3364 increase the cost of the insn by replacing a simple REG | 3390 increase the cost of the insn by replacing a simple REG |
3365 with (plus (reg sp) CST). So try only when we already | 3391 with (plus (reg sp) CST). So try only when we already |
3366 had a PLUS before. */ | 3392 had a PLUS before. */ |
3367 if (offset == 0 || plus_src) | 3393 if (known_eq (offset, 0) || plus_src) |
3368 { | 3394 { |
3369 rtx new_src = plus_constant (GET_MODE (to_rtx), | 3395 rtx new_src = plus_constant (GET_MODE (to_rtx), |
3370 to_rtx, offset); | 3396 to_rtx, offset); |
3371 | 3397 |
3372 new_body = old_body; | 3398 new_body = old_body; |
3560 | 3586 |
3561 If anything changes, return nonzero. */ | 3587 If anything changes, return nonzero. */ |
3562 | 3588 |
3563 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) | 3589 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
3564 { | 3590 { |
3565 if (ep->previous_offset != ep->offset && ep->ref_outside_mem) | 3591 if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem) |
3566 ep->can_eliminate = 0; | 3592 ep->can_eliminate = 0; |
3567 | 3593 |
3568 ep->ref_outside_mem = 0; | 3594 ep->ref_outside_mem = 0; |
3569 | 3595 |
3570 if (ep->previous_offset != ep->offset) | 3596 if (maybe_ne (ep->previous_offset, ep->offset)) |
3571 val = 1; | 3597 val = 1; |
3572 } | 3598 } |
3573 | 3599 |
3574 done: | 3600 done: |
3575 /* If we changed something, perform elimination in REG_NOTES. This is | 3601 /* If we changed something, perform elimination in REG_NOTES. This is |
3731 insn. The changes we make were determined by the earlier call to | 3757 insn. The changes we make were determined by the earlier call to |
3732 elimination_effects. */ | 3758 elimination_effects. */ |
3733 | 3759 |
3734 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) | 3760 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
3735 { | 3761 { |
3736 if (ep->previous_offset != ep->offset && ep->ref_outside_mem) | 3762 if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem) |
3737 ep->can_eliminate = 0; | 3763 ep->can_eliminate = 0; |
3738 | 3764 |
3739 ep->ref_outside_mem = 0; | 3765 ep->ref_outside_mem = 0; |
3740 } | 3766 } |
3741 | 3767 |
3756 | 3782 |
3757 num_not_at_initial_offset = 0; | 3783 num_not_at_initial_offset = 0; |
3758 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) | 3784 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
3759 { | 3785 { |
3760 ep->previous_offset = ep->offset; | 3786 ep->previous_offset = ep->offset; |
3761 if (ep->can_eliminate && ep->offset != ep->initial_offset) | 3787 if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset)) |
3762 num_not_at_initial_offset++; | 3788 num_not_at_initial_offset++; |
3763 } | 3789 } |
3764 } | 3790 } |
3765 | 3791 |
3766 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register | 3792 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register |
3788 if (GET_CODE (dest) == SUBREG) | 3814 if (GET_CODE (dest) == SUBREG) |
3789 dest = SUBREG_REG (dest); | 3815 dest = SUBREG_REG (dest); |
3790 | 3816 |
3791 if (dest == hard_frame_pointer_rtx) | 3817 if (dest == hard_frame_pointer_rtx) |
3792 return; | 3818 return; |
3819 | |
3820 /* CLOBBER_HIGH is only supported for LRA. */ | |
3821 gcc_assert (GET_CODE (x) != CLOBBER_HIGH); | |
3793 | 3822 |
3794 for (i = 0; i < NUM_ELIMINABLE_REGS; i++) | 3823 for (i = 0; i < NUM_ELIMINABLE_REGS; i++) |
3795 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx | 3824 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx |
3796 && (GET_CODE (x) != SET | 3825 && (GET_CODE (x) != SET |
3797 || GET_CODE (SET_SRC (x)) != PLUS | 3826 || GET_CODE (SET_SRC (x)) != PLUS |
3810 cause incorrect code to be generated if we did not check for it. */ | 3839 cause incorrect code to be generated if we did not check for it. */ |
3811 | 3840 |
3812 static bool | 3841 static bool |
3813 verify_initial_elim_offsets (void) | 3842 verify_initial_elim_offsets (void) |
3814 { | 3843 { |
3815 HOST_WIDE_INT t; | 3844 poly_int64 t; |
3816 struct elim_table *ep; | 3845 struct elim_table *ep; |
3817 | 3846 |
3818 if (!num_eliminable) | 3847 if (!num_eliminable) |
3819 return true; | 3848 return true; |
3820 | 3849 |
3821 targetm.compute_frame_layout (); | 3850 targetm.compute_frame_layout (); |
3822 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) | 3851 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
3823 { | 3852 { |
3824 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t); | 3853 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t); |
3825 if (t != ep->initial_offset) | 3854 if (maybe_ne (t, ep->initial_offset)) |
3826 return false; | 3855 return false; |
3827 } | 3856 } |
3828 | 3857 |
3829 return true; | 3858 return true; |
3830 } | 3859 } |
3891 num_not_at_initial_offset = 0; | 3920 num_not_at_initial_offset = 0; |
3892 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++) | 3921 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++) |
3893 { | 3922 { |
3894 ep->offset = ep->previous_offset | 3923 ep->offset = ep->previous_offset |
3895 = offsets_at[label_nr - first_label_num][i]; | 3924 = offsets_at[label_nr - first_label_num][i]; |
3896 if (ep->can_eliminate && ep->offset != ep->initial_offset) | 3925 if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset)) |
3897 num_not_at_initial_offset++; | 3926 num_not_at_initial_offset++; |
3898 } | 3927 } |
3899 } | 3928 } |
3900 | 3929 |
3901 /* See if anything that happened changes which eliminations are valid. | 3930 /* See if anything that happened changes which eliminations are valid. |
4093 first_label_num = get_first_label_num (); | 4122 first_label_num = get_first_label_num (); |
4094 num_labels = max_label_num () - first_label_num; | 4123 num_labels = max_label_num () - first_label_num; |
4095 | 4124 |
4096 /* Allocate the tables used to store offset information at labels. */ | 4125 /* Allocate the tables used to store offset information at labels. */ |
4097 offsets_known_at = XNEWVEC (char, num_labels); | 4126 offsets_known_at = XNEWVEC (char, num_labels); |
4098 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT)); | 4127 offsets_at = (poly_int64_pod (*)[NUM_ELIMINABLE_REGS]) |
4128 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (poly_int64)); | |
4099 | 4129 |
4100 /* Look for REG_EQUIV notes; record what each pseudo is equivalent | 4130 /* Look for REG_EQUIV notes; record what each pseudo is equivalent |
4101 to. If DO_SUBREGS is true, also find all paradoxical subregs and | 4131 to. If DO_SUBREGS is true, also find all paradoxical subregs and |
4102 find largest such for each pseudo. FIRST is the head of the insn | 4132 find largest such for each pseudo. FIRST is the head of the insn |
4103 list. */ | 4133 list. */ |
4423 CASE_CONST_ANY: | 4453 CASE_CONST_ANY: |
4424 case CC0: | 4454 case CC0: |
4425 case PC: | 4455 case PC: |
4426 case USE: | 4456 case USE: |
4427 case CLOBBER: | 4457 case CLOBBER: |
4458 case CLOBBER_HIGH: | |
4428 return; | 4459 return; |
4429 | 4460 |
4430 case SUBREG: | 4461 case SUBREG: |
4431 if (REG_P (SUBREG_REG (x))) | 4462 if (REG_P (SUBREG_REG (x))) |
4432 { | 4463 { |
4643 rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX); | 4674 rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX); |
4644 if (p) | 4675 if (p) |
4645 { | 4676 { |
4646 remove_note (insn, p); | 4677 remove_note (insn, p); |
4647 fixup_args_size_notes (prev, PREV_INSN (next), | 4678 fixup_args_size_notes (prev, PREV_INSN (next), |
4648 INTVAL (XEXP (p, 0))); | 4679 get_args_size (p)); |
4649 } | 4680 } |
4650 | 4681 |
4651 /* If this was an ASM, make sure that all the reload insns | 4682 /* If this was an ASM, make sure that all the reload insns |
4652 we have generated are valid. If not, give an error | 4683 we have generated are valid. If not, give an error |
4653 and delete them. */ | 4684 and delete them. */ |
4877 | 4908 |
4878 When DATA is non-NULL just mark the registers in regset | 4909 When DATA is non-NULL just mark the registers in regset |
4879 to be forgotten later. */ | 4910 to be forgotten later. */ |
4880 | 4911 |
4881 static void | 4912 static void |
4882 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED, | 4913 forget_old_reloads_1 (rtx x, const_rtx setter, |
4883 void *data) | 4914 void *data) |
4884 { | 4915 { |
4885 unsigned int regno; | 4916 unsigned int regno; |
4886 unsigned int nr; | 4917 unsigned int nr; |
4887 regset regs = (regset) data; | 4918 regset regs = (regset) data; |
4896 x = SUBREG_REG (x); | 4927 x = SUBREG_REG (x); |
4897 } | 4928 } |
4898 | 4929 |
4899 if (!REG_P (x)) | 4930 if (!REG_P (x)) |
4900 return; | 4931 return; |
4932 | |
4933 /* CLOBBER_HIGH is only supported for LRA. */ | |
4934 gcc_assert (setter == NULL_RTX || GET_CODE (setter) != CLOBBER_HIGH); | |
4901 | 4935 |
4902 regno = REGNO (x); | 4936 regno = REGNO (x); |
4903 | 4937 |
4904 if (regno >= FIRST_PSEUDO_REGISTER) | 4938 if (regno >= FIRST_PSEUDO_REGISTER) |
4905 nr = 1; | 4939 nr = 1; |
6345 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of | 6379 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of |
6346 mode OUTERMODE that is available in a hard reg of mode INNERMODE. | 6380 mode OUTERMODE that is available in a hard reg of mode INNERMODE. |
6347 SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo, | 6381 SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo, |
6348 otherwise it is NULL. */ | 6382 otherwise it is NULL. */ |
6349 | 6383 |
6350 static int | 6384 static poly_int64 |
6351 compute_reload_subreg_offset (machine_mode outermode, | 6385 compute_reload_subreg_offset (machine_mode outermode, |
6352 rtx subreg, | 6386 rtx subreg, |
6353 machine_mode innermode) | 6387 machine_mode innermode) |
6354 { | 6388 { |
6355 int outer_offset; | 6389 poly_int64 outer_offset; |
6356 machine_mode middlemode; | 6390 machine_mode middlemode; |
6357 | 6391 |
6358 if (!subreg) | 6392 if (!subreg) |
6359 return subreg_lowpart_offset (outermode, innermode); | 6393 return subreg_lowpart_offset (outermode, innermode); |
6360 | 6394 |
6504 register be allocated here. In `emit_reload_insns' we suppress | 6538 register be allocated here. In `emit_reload_insns' we suppress |
6505 one of the loads in the case described above. */ | 6539 one of the loads in the case described above. */ |
6506 | 6540 |
6507 if (inheritance) | 6541 if (inheritance) |
6508 { | 6542 { |
6509 int byte = 0; | 6543 poly_int64 byte = 0; |
6510 int regno = -1; | 6544 int regno = -1; |
6511 machine_mode mode = VOIDmode; | 6545 machine_mode mode = VOIDmode; |
6512 rtx subreg = NULL_RTX; | 6546 rtx subreg = NULL_RTX; |
6513 | 6547 |
6514 if (rld[r].in == 0) | 6548 if (rld[r].in == 0) |
6554 regno = subreg_regno (rld[r].in); | 6588 regno = subreg_regno (rld[r].in); |
6555 #endif | 6589 #endif |
6556 | 6590 |
6557 if (regno >= 0 | 6591 if (regno >= 0 |
6558 && reg_last_reload_reg[regno] != 0 | 6592 && reg_last_reload_reg[regno] != 0 |
6559 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno])) | 6593 && (known_ge |
6560 >= GET_MODE_SIZE (mode) + byte) | 6594 (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno])), |
6595 GET_MODE_SIZE (mode) + byte)) | |
6561 /* Verify that the register it's in can be used in | 6596 /* Verify that the register it's in can be used in |
6562 mode MODE. */ | 6597 mode MODE. */ |
6563 && (REG_CAN_CHANGE_MODE_P | 6598 && (REG_CAN_CHANGE_MODE_P |
6564 (REGNO (reg_last_reload_reg[regno]), | 6599 (REGNO (reg_last_reload_reg[regno]), |
6565 GET_MODE (reg_last_reload_reg[regno]), | 6600 GET_MODE (reg_last_reload_reg[regno]), |
7343 } | 7378 } |
7344 special = 1; | 7379 special = 1; |
7345 | 7380 |
7346 /* Adjust any debug insns between temp and insn. */ | 7381 /* Adjust any debug insns between temp and insn. */ |
7347 while ((temp = NEXT_INSN (temp)) != insn) | 7382 while ((temp = NEXT_INSN (temp)) != insn) |
7348 if (DEBUG_INSN_P (temp)) | 7383 if (DEBUG_BIND_INSN_P (temp)) |
7349 INSN_VAR_LOCATION_LOC (temp) | 7384 INSN_VAR_LOCATION_LOC (temp) |
7350 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp), | 7385 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp), |
7351 old, reloadreg); | 7386 old, reloadreg); |
7352 else | 7387 else |
7353 gcc_assert (NOTE_P (temp)); | 7388 gcc_assert (DEBUG_INSN_P (temp) || NOTE_P (temp)); |
7354 } | 7389 } |
7355 else | 7390 else |
7356 { | 7391 { |
7357 SET_DEST (PATTERN (temp)) = old; | 7392 SET_DEST (PATTERN (temp)) = old; |
7358 } | 7393 } |
8004 return; | 8039 return; |
8005 } | 8040 } |
8006 /* Likewise for a SUBREG of an operand that dies. */ | 8041 /* Likewise for a SUBREG of an operand that dies. */ |
8007 else if (GET_CODE (old) == SUBREG | 8042 else if (GET_CODE (old) == SUBREG |
8008 && REG_P (SUBREG_REG (old)) | 8043 && REG_P (SUBREG_REG (old)) |
8009 && 0 != (note = find_reg_note (insn, REG_UNUSED, | 8044 && (note = find_reg_note (insn, REG_UNUSED, |
8010 SUBREG_REG (old)))) | 8045 SUBREG_REG (old))) != 0) |
8011 { | 8046 { |
8012 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx); | 8047 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx); |
8013 return; | 8048 return; |
8014 } | 8049 } |
8015 else if (GET_CODE (old) == SCRATCH) | 8050 else if (GET_CODE (old) == SCRATCH) |
9072 | 9107 |
9073 INC_AMOUNT is the number to increment or decrement by (always positive). | 9108 INC_AMOUNT is the number to increment or decrement by (always positive). |
9074 This cannot be deduced from VALUE. */ | 9109 This cannot be deduced from VALUE. */ |
9075 | 9110 |
9076 static void | 9111 static void |
9077 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount) | 9112 inc_for_reload (rtx reloadreg, rtx in, rtx value, poly_int64 inc_amount) |
9078 { | 9113 { |
9079 /* REG or MEM to be copied and incremented. */ | 9114 /* REG or MEM to be copied and incremented. */ |
9080 rtx incloc = find_replacement (&XEXP (value, 0)); | 9115 rtx incloc = find_replacement (&XEXP (value, 0)); |
9081 /* Nonzero if increment after copying. */ | 9116 /* Nonzero if increment after copying. */ |
9082 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC | 9117 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC |
9102 else | 9137 else |
9103 { | 9138 { |
9104 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC) | 9139 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC) |
9105 inc_amount = -inc_amount; | 9140 inc_amount = -inc_amount; |
9106 | 9141 |
9107 inc = GEN_INT (inc_amount); | 9142 inc = gen_int_mode (inc_amount, Pmode); |
9108 } | 9143 } |
9109 | 9144 |
9110 /* If this is post-increment, first copy the location to the reload reg. */ | 9145 /* If this is post-increment, first copy the location to the reload reg. */ |
9111 if (post && real_in != reloadreg) | 9146 if (post && real_in != reloadreg) |
9112 emit_insn (gen_move_insn (reloadreg, real_in)); | 9147 emit_insn (gen_move_insn (reloadreg, real_in)); |