Mercurial > hg > CbC > CbC_gcc
diff gcc/reload1.c @ 131:84e7813d76e9
gcc-8.2
author | mir3636 |
---|---|
date | Thu, 25 Oct 2018 07:37:49 +0900 |
parents | 04ced10e8804 |
children | 1830386684a0 |
line wrap: on
line diff
--- a/gcc/reload1.c Fri Oct 27 22:46:09 2017 +0900 +++ b/gcc/reload1.c Thu Oct 25 07:37:49 2018 +0900 @@ -1,5 +1,5 @@ /* Reload pseudo regs into hard regs for insns that require hard regs. - Copyright (C) 1987-2017 Free Software Foundation, Inc. + Copyright (C) 1987-2018 Free Software Foundation, Inc. This file is part of GCC. @@ -200,7 +200,7 @@ static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER]; /* Width allocated so far for that stack slot. */ -static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER]; +static poly_uint64_pod spill_stack_slot_width[FIRST_PSEUDO_REGISTER]; /* Record which pseudos needed to be spilled. */ static regset_head spilled_pseudos; @@ -261,13 +261,13 @@ { int from; /* Register number to be eliminated. */ int to; /* Register number used as replacement. */ - HOST_WIDE_INT initial_offset; /* Initial difference between values. */ + poly_int64_pod initial_offset; /* Initial difference between values. */ int can_eliminate; /* Nonzero if this elimination can be done. */ int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE target hook in previous scan over insns made by reload. */ - HOST_WIDE_INT offset; /* Current offset between the two regs. */ - HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */ + poly_int64_pod offset; /* Current offset between the two regs. */ + poly_int64_pod previous_offset; /* Offset at end of previous insn. */ int ref_outside_mem; /* "to" has been referenced outside a MEM. */ rtx from_rtx; /* REG rtx for the register to be eliminated. We cannot simply compare the number since @@ -313,7 +313,7 @@ static int first_label_num; static char *offsets_known_at; -static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS]; +static poly_int64_pod (*offsets_at)[NUM_ELIMINABLE_REGS]; vec<reg_equivs_t, va_gc> *reg_equivs; @@ -398,7 +398,7 @@ static void delete_output_reload (rtx_insn *, int, int, rtx); static void delete_address_reloads (rtx_insn *, rtx_insn *); static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *); -static void inc_for_reload (rtx, rtx, rtx, int); +static void inc_for_reload (rtx, rtx, rtx, poly_int64); static void add_auto_inc_notes (rtx_insn *, rtx); static void substitute (rtx *, const_rtx, rtx); static bool gen_reload_chain_without_interm_reg_p (int, int); @@ -887,7 +887,7 @@ for (;;) { int something_changed; - HOST_WIDE_INT starting_frame_size; + poly_int64 starting_frame_size; starting_frame_size = get_frame_size (); something_was_spilled = false; @@ -955,7 +955,7 @@ if (caller_save_needed) setup_save_areas (); - if (starting_frame_size && crtl->stack_alignment_needed) + if (maybe_ne (starting_frame_size, 0) && crtl->stack_alignment_needed) { /* If we have a stack frame, we must align it now. The stack size may be a part of the offset computation for @@ -968,7 +968,8 @@ assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed); } /* If we allocated another stack slot, redo elimination bookkeeping. */ - if (something_was_spilled || starting_frame_size != get_frame_size ()) + if (something_was_spilled + || maybe_ne (starting_frame_size, get_frame_size ())) { if (update_eliminables_and_spill ()) finish_spills (0); @@ -994,7 +995,8 @@ /* If we allocated any new memory locations, make another pass since it might have changed elimination offsets. */ - if (something_was_spilled || starting_frame_size != get_frame_size ()) + if (something_was_spilled + || maybe_ne (starting_frame_size, get_frame_size ())) something_changed = 1; /* Even if the frame size remained the same, we might still have @@ -1043,11 +1045,11 @@ if (insns_need_reload != 0 || something_needs_elimination || something_needs_operands_changed) { - HOST_WIDE_INT old_frame_size = get_frame_size (); + poly_int64 old_frame_size = get_frame_size (); reload_as_needed (global); - gcc_assert (old_frame_size == get_frame_size ()); + gcc_assert (known_eq (old_frame_size, get_frame_size ())); gcc_assert (verify_initial_elim_offsets ()); } @@ -1112,7 +1114,7 @@ /* We don't want complex addressing modes in debug insns if simpler ones will do, so delegitimize equivalences in debug insns. */ - if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0) + if (MAY_HAVE_DEBUG_BIND_INSNS && reg_renumber[i] < 0) { rtx reg = regno_reg_rtx[i]; rtx equiv = 0; @@ -1140,7 +1142,7 @@ while (next && DF_REF_INSN (next) == insn) next = DF_REF_NEXT_REG (next); - if (DEBUG_INSN_P (insn)) + if (DEBUG_BIND_INSN_P (insn)) { if (!equiv) { @@ -1337,6 +1339,8 @@ rtx t = XVECEXP (pat, 0, i); if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0))) SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0))); + /* CLOBBER_HIGH is only supported for LRA. */ + gcc_assert (GET_CODE (t) != CLOBBER_HIGH); } /* Get the operand values and constraints out of the insn. */ @@ -2140,12 +2144,16 @@ { rtx x = NULL_RTX; machine_mode mode = GET_MODE (regno_reg_rtx[i]); - unsigned int inherent_size = PSEUDO_REGNO_BYTES (i); + poly_uint64 inherent_size = GET_MODE_SIZE (mode); unsigned int inherent_align = GET_MODE_ALIGNMENT (mode); machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]); - unsigned int total_size = GET_MODE_SIZE (wider_mode); - unsigned int min_align = GET_MODE_BITSIZE (reg_max_ref_mode[i]); - int adjust = 0; + poly_uint64 total_size = GET_MODE_SIZE (wider_mode); + /* ??? Seems strange to derive the minimum alignment from the size, + but that's the traditional behavior. For polynomial-size modes, + the natural extension is to use the minimum possible size. */ + unsigned int min_align + = constant_lower_bound (GET_MODE_BITSIZE (reg_max_ref_mode[i])); + poly_int64 adjust = 0; something_was_spilled = true; @@ -2172,10 +2180,15 @@ { rtx stack_slot; + /* The sizes are taken from a subreg operation, which guarantees + that they're ordered. */ + gcc_checking_assert (ordered_p (total_size, inherent_size)); + /* No known place to spill from => no slot to reuse. */ x = assign_stack_local (mode, total_size, min_align > inherent_align - || total_size > inherent_size ? -1 : 0); + || maybe_gt (total_size, inherent_size) + ? -1 : 0); stack_slot = x; @@ -2185,9 +2198,9 @@ if (BYTES_BIG_ENDIAN) { adjust = inherent_size - total_size; - if (adjust) + if (maybe_ne (adjust, 0)) { - unsigned int total_bits = total_size * BITS_PER_UNIT; + poly_uint64 total_bits = total_size * BITS_PER_UNIT; machine_mode mem_mode = int_mode_for_size (total_bits, 1).else_blk (); stack_slot = adjust_address_nv (x, mem_mode, adjust); @@ -2201,9 +2214,10 @@ /* Reuse a stack slot if possible. */ else if (spill_stack_slot[from_reg] != 0 - && spill_stack_slot_width[from_reg] >= total_size - && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg])) - >= inherent_size) + && known_ge (spill_stack_slot_width[from_reg], total_size) + && known_ge (GET_MODE_SIZE + (GET_MODE (spill_stack_slot[from_reg])), + inherent_size) && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align) x = spill_stack_slot[from_reg]; @@ -2219,16 +2233,21 @@ if (partial_subreg_p (mode, GET_MODE (spill_stack_slot[from_reg]))) mode = GET_MODE (spill_stack_slot[from_reg]); - if (spill_stack_slot_width[from_reg] > total_size) - total_size = spill_stack_slot_width[from_reg]; + total_size = ordered_max (total_size, + spill_stack_slot_width[from_reg]); if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align) min_align = MEM_ALIGN (spill_stack_slot[from_reg]); } + /* The sizes are taken from a subreg operation, which guarantees + that they're ordered. */ + gcc_checking_assert (ordered_p (total_size, inherent_size)); + /* Make a slot with that size. */ x = assign_stack_local (mode, total_size, min_align > inherent_align - || total_size > inherent_size ? -1 : 0); + || maybe_gt (total_size, inherent_size) + ? -1 : 0); stack_slot = x; /* Cancel the big-endian correction done in assign_stack_local. @@ -2237,9 +2256,9 @@ if (BYTES_BIG_ENDIAN) { adjust = GET_MODE_SIZE (mode) - total_size; - if (adjust) + if (maybe_ne (adjust, 0)) { - unsigned int total_bits = total_size * BITS_PER_UNIT; + poly_uint64 total_bits = total_size * BITS_PER_UNIT; machine_mode mem_mode = int_mode_for_size (total_bits, 1).else_blk (); stack_slot = adjust_address_nv (x, mem_mode, adjust); @@ -2351,9 +2370,9 @@ where the offsets disagree. */ for (i = 0; i < NUM_ELIMINABLE_REGS; i++) - if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i] - != (initial_p ? reg_eliminate[i].initial_offset - : reg_eliminate[i].offset)) + if (maybe_ne (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i], + (initial_p ? reg_eliminate[i].initial_offset + : reg_eliminate[i].offset))) reg_eliminate[i].can_eliminate = 0; return; @@ -2436,7 +2455,7 @@ /* If we reach here, all eliminations must be at their initial offset because we are doing a jump to a variable address. */ for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++) - if (p->offset != p->initial_offset) + if (maybe_ne (p->offset, p->initial_offset)) p->can_eliminate = 0; break; @@ -2593,8 +2612,9 @@ We special-case the commonest situation in eliminate_regs_in_insn, so just replace a PLUS with a PLUS here, unless inside a MEM. */ - if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1)) - && INTVAL (XEXP (x, 1)) == - ep->previous_offset) + if (mem_mode != 0 + && CONST_INT_P (XEXP (x, 1)) + && known_eq (INTVAL (XEXP (x, 1)), -ep->previous_offset)) return ep->to_rtx; else return gen_rtx_PLUS (Pmode, ep->to_rtx, @@ -2811,8 +2831,8 @@ if (new_rtx != SUBREG_REG (x)) { - int x_size = GET_MODE_SIZE (GET_MODE (x)); - int new_size = GET_MODE_SIZE (GET_MODE (new_rtx)); + poly_int64 x_size = GET_MODE_SIZE (GET_MODE (x)); + poly_int64 new_size = GET_MODE_SIZE (GET_MODE (new_rtx)); if (MEM_P (new_rtx) && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx)) @@ -2824,9 +2844,10 @@ So if the number of words is the same, preserve the subreg so that push_reload can see it. */ && !(WORD_REGISTER_OPERATIONS - && (x_size - 1) / UNITS_PER_WORD - == (new_size -1 ) / UNITS_PER_WORD)) - || x_size == new_size) + && known_equal_after_align_down (x_size - 1, + new_size - 1, + UNITS_PER_WORD))) + || known_eq (x_size, new_size)) ) return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x)); else if (insn && GET_CODE (insn) == DEBUG_INSN) @@ -2860,6 +2881,7 @@ return x; case CLOBBER: + case CLOBBER_HIGH: case ASM_OPERANDS: gcc_assert (insn && DEBUG_INSN_P (insn)); break; @@ -2993,7 +3015,7 @@ for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) if (ep->to_rtx == XEXP (x, 0)) { - int size = GET_MODE_SIZE (mem_mode); + poly_int64 size = GET_MODE_SIZE (mem_mode); /* If more bytes than MEM_MODE are pushed, account for them. */ #ifdef PUSH_ROUNDING @@ -3070,6 +3092,10 @@ elimination_effects (XEXP (x, 0), mem_mode); return; + case CLOBBER_HIGH: + /* CLOBBER_HIGH is only supported for LRA. */ + return; + case SET: /* Check for setting a register that we know about. */ if (REG_P (SET_DEST (x))) @@ -3202,7 +3228,7 @@ || GET_CODE (PATTERN (insn)) == USE || GET_CODE (PATTERN (insn)) == CLOBBER || GET_CODE (PATTERN (insn)) == ASM_INPUT); - if (DEBUG_INSN_P (insn)) + if (DEBUG_BIND_INSN_P (insn)) INSN_VAR_LOCATION_LOC (insn) = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn); return 0; @@ -3344,7 +3370,7 @@ if (plus_cst_src) { rtx reg = XEXP (plus_cst_src, 0); - HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1)); + poly_int64 offset = INTVAL (XEXP (plus_cst_src, 1)); if (GET_CODE (reg) == SUBREG) reg = SUBREG_REG (reg); @@ -3364,7 +3390,7 @@ increase the cost of the insn by replacing a simple REG with (plus (reg sp) CST). So try only when we already had a PLUS before. */ - if (offset == 0 || plus_src) + if (known_eq (offset, 0) || plus_src) { rtx new_src = plus_constant (GET_MODE (to_rtx), to_rtx, offset); @@ -3562,12 +3588,12 @@ for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) { - if (ep->previous_offset != ep->offset && ep->ref_outside_mem) + if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem) ep->can_eliminate = 0; ep->ref_outside_mem = 0; - if (ep->previous_offset != ep->offset) + if (maybe_ne (ep->previous_offset, ep->offset)) val = 1; } @@ -3733,7 +3759,7 @@ for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) { - if (ep->previous_offset != ep->offset && ep->ref_outside_mem) + if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem) ep->can_eliminate = 0; ep->ref_outside_mem = 0; @@ -3758,7 +3784,7 @@ for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) { ep->previous_offset = ep->offset; - if (ep->can_eliminate && ep->offset != ep->initial_offset) + if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset)) num_not_at_initial_offset++; } } @@ -3791,6 +3817,9 @@ if (dest == hard_frame_pointer_rtx) return; + /* CLOBBER_HIGH is only supported for LRA. */ + gcc_assert (GET_CODE (x) != CLOBBER_HIGH); + for (i = 0; i < NUM_ELIMINABLE_REGS; i++) if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx && (GET_CODE (x) != SET @@ -3812,7 +3841,7 @@ static bool verify_initial_elim_offsets (void) { - HOST_WIDE_INT t; + poly_int64 t; struct elim_table *ep; if (!num_eliminable) @@ -3822,7 +3851,7 @@ for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) { INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t); - if (t != ep->initial_offset) + if (maybe_ne (t, ep->initial_offset)) return false; } @@ -3893,7 +3922,7 @@ { ep->offset = ep->previous_offset = offsets_at[label_nr - first_label_num][i]; - if (ep->can_eliminate && ep->offset != ep->initial_offset) + if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset)) num_not_at_initial_offset++; } } @@ -4095,7 +4124,8 @@ /* Allocate the tables used to store offset information at labels. */ offsets_known_at = XNEWVEC (char, num_labels); - offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT)); + offsets_at = (poly_int64_pod (*)[NUM_ELIMINABLE_REGS]) + xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (poly_int64)); /* Look for REG_EQUIV notes; record what each pseudo is equivalent to. If DO_SUBREGS is true, also find all paradoxical subregs and @@ -4425,6 +4455,7 @@ case PC: case USE: case CLOBBER: + case CLOBBER_HIGH: return; case SUBREG: @@ -4645,7 +4676,7 @@ { remove_note (insn, p); fixup_args_size_notes (prev, PREV_INSN (next), - INTVAL (XEXP (p, 0))); + get_args_size (p)); } /* If this was an ASM, make sure that all the reload insns @@ -4879,7 +4910,7 @@ to be forgotten later. */ static void -forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED, +forget_old_reloads_1 (rtx x, const_rtx setter, void *data) { unsigned int regno; @@ -4899,6 +4930,9 @@ if (!REG_P (x)) return; + /* CLOBBER_HIGH is only supported for LRA. */ + gcc_assert (setter == NULL_RTX || GET_CODE (setter) != CLOBBER_HIGH); + regno = REGNO (x); if (regno >= FIRST_PSEUDO_REGISTER) @@ -6347,12 +6381,12 @@ SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo, otherwise it is NULL. */ -static int +static poly_int64 compute_reload_subreg_offset (machine_mode outermode, rtx subreg, machine_mode innermode) { - int outer_offset; + poly_int64 outer_offset; machine_mode middlemode; if (!subreg) @@ -6506,7 +6540,7 @@ if (inheritance) { - int byte = 0; + poly_int64 byte = 0; int regno = -1; machine_mode mode = VOIDmode; rtx subreg = NULL_RTX; @@ -6556,8 +6590,9 @@ if (regno >= 0 && reg_last_reload_reg[regno] != 0 - && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno])) - >= GET_MODE_SIZE (mode) + byte) + && (known_ge + (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno])), + GET_MODE_SIZE (mode) + byte)) /* Verify that the register it's in can be used in mode MODE. */ && (REG_CAN_CHANGE_MODE_P @@ -7345,12 +7380,12 @@ /* Adjust any debug insns between temp and insn. */ while ((temp = NEXT_INSN (temp)) != insn) - if (DEBUG_INSN_P (temp)) + if (DEBUG_BIND_INSN_P (temp)) INSN_VAR_LOCATION_LOC (temp) = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp), old, reloadreg); else - gcc_assert (NOTE_P (temp)); + gcc_assert (DEBUG_INSN_P (temp) || NOTE_P (temp)); } else { @@ -8006,8 +8041,8 @@ /* Likewise for a SUBREG of an operand that dies. */ else if (GET_CODE (old) == SUBREG && REG_P (SUBREG_REG (old)) - && 0 != (note = find_reg_note (insn, REG_UNUSED, - SUBREG_REG (old)))) + && (note = find_reg_note (insn, REG_UNUSED, + SUBREG_REG (old))) != 0) { XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx); return; @@ -9074,7 +9109,7 @@ This cannot be deduced from VALUE. */ static void -inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount) +inc_for_reload (rtx reloadreg, rtx in, rtx value, poly_int64 inc_amount) { /* REG or MEM to be copied and incremented. */ rtx incloc = find_replacement (&XEXP (value, 0)); @@ -9104,7 +9139,7 @@ if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC) inc_amount = -inc_amount; - inc = GEN_INT (inc_amount); + inc = gen_int_mode (inc_amount, Pmode); } /* If this is post-increment, first copy the location to the reload reg. */