Mercurial > hg > CbC > CbC_gcc
diff gcc/function.c @ 132:d34655255c78
update gcc-8.2
author | mir3636 |
---|---|
date | Thu, 25 Oct 2018 10:21:07 +0900 |
parents | 4c56639505ff 84e7813d76e9 |
children | 4e440907fcbf |
line wrap: on
line diff
--- a/gcc/function.c Thu Oct 25 08:08:40 2018 +0900 +++ b/gcc/function.c Thu Oct 25 10:21:07 2018 +0900 @@ -1,5 +1,5 @@ /* Expands front end tree to back end RTL for GCC. - Copyright (C) 1987-2017 Free Software Foundation, Inc. + Copyright (C) 1987-2018 Free Software Foundation, Inc. This file is part of GCC. @@ -73,12 +73,12 @@ #include "shrink-wrap.h" #include "toplev.h" #include "rtl-iter.h" -#include "tree-chkp.h" -#include "rtl-chkp.h" #include "tree-dfa.h" #include "tree-ssa.h" #include "stringpool.h" #include "attribs.h" +#include "gimple.h" +#include "options.h" @@ -220,7 +220,7 @@ This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; the caller may have to do that. */ -HOST_WIDE_INT +poly_int64 get_frame_size (void) { if (FRAME_GROWS_DOWNWARD) @@ -234,20 +234,29 @@ return FALSE. */ bool -frame_offset_overflow (HOST_WIDE_INT offset, tree func) -{ - unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; - - if (size > (HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1)) - /* Leave room for the fixed part of the frame. */ - - 64 * UNITS_PER_WORD) +frame_offset_overflow (poly_int64 offset, tree func) +{ + poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset; + unsigned HOST_WIDE_INT limit + = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1)) + /* Leave room for the fixed part of the frame. */ + - 64 * UNITS_PER_WORD); + + if (!coeffs_in_range_p (size, 0U, limit)) { - error_at (DECL_SOURCE_LOCATION (func), - "total size of local objects too large"); - return TRUE; + unsigned HOST_WIDE_INT hwisize; + if (size.is_constant (&hwisize)) + error_at (DECL_SOURCE_LOCATION (func), + "total size of local objects %wu exceeds maximum %wu", + hwisize, limit); + else + error_at (DECL_SOURCE_LOCATION (func), + "total size of local objects exceeds maximum %wu", + limit); + return true; } - return FALSE; + return false; } /* Return the minimum spill slot alignment for a register of mode MODE. */ @@ -286,11 +295,11 @@ given a start/length pair that lies at the end of the frame. */ static bool -try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length, - HOST_WIDE_INT size, unsigned int alignment, - HOST_WIDE_INT *poffset) -{ - HOST_WIDE_INT this_frame_offset; +try_fit_stack_local (poly_int64 start, poly_int64 length, + poly_int64 size, unsigned int alignment, + poly_int64_pod *poffset) +{ + poly_int64 this_frame_offset; int frame_off, frame_alignment, frame_phase; /* Calculate how many bytes the start of local variables is off from @@ -301,33 +310,31 @@ /* Round the frame offset to the specified alignment. */ - /* We must be careful here, since FRAME_OFFSET might be negative and - division with a negative dividend isn't as well defined as we might - like. So we instead assume that ALIGNMENT is a power of two and - use logical operations which are unambiguous. */ if (FRAME_GROWS_DOWNWARD) this_frame_offset - = (FLOOR_ROUND (start + length - size - frame_phase, - (unsigned HOST_WIDE_INT) alignment) + = (aligned_lower_bound (start + length - size - frame_phase, alignment) + frame_phase); else this_frame_offset - = (CEIL_ROUND (start - frame_phase, - (unsigned HOST_WIDE_INT) alignment) - + frame_phase); + = aligned_upper_bound (start - frame_phase, alignment) + frame_phase; /* See if it fits. If this space is at the edge of the frame, consider extending the frame to make it fit. Our caller relies on this when allocating a new slot. */ - if (frame_offset == start && this_frame_offset < frame_offset) - frame_offset = this_frame_offset; - else if (this_frame_offset < start) - return false; - else if (start + length == frame_offset - && this_frame_offset + size > start + length) - frame_offset = this_frame_offset + size; - else if (this_frame_offset + size > start + length) - return false; + if (maybe_lt (this_frame_offset, start)) + { + if (known_eq (frame_offset, start)) + frame_offset = this_frame_offset; + else + return false; + } + else if (maybe_gt (this_frame_offset + size, start + length)) + { + if (known_eq (frame_offset, start + length)) + frame_offset = this_frame_offset + size; + else + return false; + } *poffset = this_frame_offset; return true; @@ -338,7 +345,7 @@ function's frame_space_list. */ static void -add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end) +add_frame_space (poly_int64 start, poly_int64 end) { struct frame_space *space = ggc_alloc<frame_space> (); space->next = crtl->frame_space_list; @@ -365,12 +372,12 @@ We do not round to stack_boundary here. */ rtx -assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size, +assign_stack_local_1 (machine_mode mode, poly_int64 size, int align, int kind) { rtx x, addr; - int bigend_correction = 0; - HOST_WIDE_INT slot_offset = 0, old_frame_offset; + poly_int64 bigend_correction = 0; + poly_int64 slot_offset = 0, old_frame_offset; unsigned int alignment, alignment_in_bits; if (align == 0) @@ -381,7 +388,7 @@ else if (align == -1) { alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; - size = CEIL_ROUND (size, alignment); + size = aligned_upper_bound (size, alignment); } else if (align == -2) alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */ @@ -417,7 +424,7 @@ requested size is 0 or the estimated stack alignment >= mode alignment. */ gcc_assert ((kind & ASLK_REDUCE_ALIGN) - || size == 0 + || known_eq (size, 0) || (crtl->stack_alignment_estimated >= GET_MODE_ALIGNMENT (mode))); alignment_in_bits = crtl->stack_alignment_estimated; @@ -432,7 +439,7 @@ if (crtl->max_used_stack_slot_alignment < alignment_in_bits) crtl->max_used_stack_slot_alignment = alignment_in_bits; - if (mode != BLKmode || size != 0) + if (mode != BLKmode || maybe_ne (size, 0)) { if (kind & ASLK_RECORD_PAD) { @@ -445,9 +452,9 @@ alignment, &slot_offset)) continue; *psp = space->next; - if (slot_offset > space->start) + if (known_gt (slot_offset, space->start)) add_frame_space (space->start, slot_offset); - if (slot_offset + size < space->start + space->length) + if (known_lt (slot_offset + size, space->start + space->length)) add_frame_space (slot_offset + size, space->start + space->length); goto found_space; @@ -469,9 +476,9 @@ if (kind & ASLK_RECORD_PAD) { - if (slot_offset > frame_offset) + if (known_gt (slot_offset, frame_offset)) add_frame_space (frame_offset, slot_offset); - if (slot_offset + size < old_frame_offset) + if (known_lt (slot_offset + size, old_frame_offset)) add_frame_space (slot_offset + size, old_frame_offset); } } @@ -482,9 +489,9 @@ if (kind & ASLK_RECORD_PAD) { - if (slot_offset > old_frame_offset) + if (known_gt (slot_offset, old_frame_offset)) add_frame_space (old_frame_offset, slot_offset); - if (slot_offset + size < frame_offset) + if (known_lt (slot_offset + size, frame_offset)) add_frame_space (slot_offset + size, frame_offset); } } @@ -492,8 +499,17 @@ found_space: /* On a big-endian machine, if we are allocating more space than we will use, use the least significant bytes of those that are allocated. */ - if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size) - bigend_correction = size - GET_MODE_SIZE (mode); + if (mode != BLKmode) + { + /* The slot size can sometimes be smaller than the mode size; + e.g. the rs6000 port allocates slots with a vector mode + that have the size of only one element. However, the slot + size must always be ordered wrt to the mode size, in the + same way as for a subreg. */ + gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size)); + if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size)) + bigend_correction = size - GET_MODE_SIZE (mode); + } /* If we have already instantiated virtual registers, return the actual address relative to the frame pointer. */ @@ -523,7 +539,7 @@ /* Wrap up assign_stack_local_1 with last parameter as false. */ rtx -assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align) +assign_stack_local (machine_mode mode, poly_int64 size, int align) { return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD); } @@ -550,7 +566,7 @@ /* The rtx to used to reference the slot. */ rtx slot; /* The size, in units, of the slot. */ - HOST_WIDE_INT size; + poly_int64 size; /* The type of the object in the slot, or zero if it doesn't correspond to a type. We use this to determine whether a slot can be reused. It can be reused if objects of the type of the new slot will always @@ -564,10 +580,10 @@ int level; /* The offset of the slot from the frame_pointer, including extra space for alignment. This info is for combine_temp_slots. */ - HOST_WIDE_INT base_offset; + poly_int64 base_offset; /* The size of the slot, including extra space for alignment. This info is for combine_temp_slots. */ - HOST_WIDE_INT full_size; + poly_int64 full_size; }; /* Entry for the below hash table. */ @@ -745,18 +761,14 @@ return p; /* Last resort: Address is a virtual stack var address. */ - if (GET_CODE (x) == PLUS - && XEXP (x, 0) == virtual_stack_vars_rtx - && CONST_INT_P (XEXP (x, 1))) + poly_int64 offset; + if (strip_offset (x, &offset) == virtual_stack_vars_rtx) { int i; for (i = max_slot_level (); i >= 0; i--) for (p = *temp_slots_at_level (i); p; p = p->next) - { - if (INTVAL (XEXP (x, 1)) >= p->base_offset - && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size) - return p; - } + if (known_in_range_p (offset, p->base_offset, p->full_size)) + return p; } return NULL; @@ -773,16 +785,13 @@ TYPE is the type that will be used for the stack slot. */ rtx -assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size, - tree type) +assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type) { unsigned int align; struct temp_slot *p, *best_p = 0, *selected = NULL, **pp; rtx slot; - /* If SIZE is -1 it means that somebody tried to allocate a temporary - of a variable size. */ - gcc_assert (size != -1); + gcc_assert (known_size_p (size)); align = get_stack_local_alignment (type, mode); @@ -797,13 +806,16 @@ { for (p = avail_temp_slots; p; p = p->next) { - if (p->align >= align && p->size >= size + if (p->align >= align + && known_ge (p->size, size) && GET_MODE (p->slot) == mode && objects_must_conflict_p (p->type, type) - && (best_p == 0 || best_p->size > p->size - || (best_p->size == p->size && best_p->align > p->align))) + && (best_p == 0 + || (known_eq (best_p->size, p->size) + ? best_p->align > p->align + : known_ge (best_p->size, p->size)))) { - if (p->align == align && p->size == size) + if (p->align == align && known_eq (p->size, size)) { selected = p; cut_slot_from_list (selected, &avail_temp_slots); @@ -827,9 +839,9 @@ if (GET_MODE (best_p->slot) == BLKmode) { int alignment = best_p->align / BITS_PER_UNIT; - HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment); - - if (best_p->size - rounded_size >= alignment) + poly_int64 rounded_size = aligned_upper_bound (size, alignment); + + if (known_ge (best_p->size - rounded_size, alignment)) { p = ggc_alloc<temp_slot> (); p->in_use = 0; @@ -852,7 +864,7 @@ /* If we still didn't find one, make a new temporary. */ if (selected == 0) { - HOST_WIDE_INT frame_offset_old = frame_offset; + poly_int64 frame_offset_old = frame_offset; p = ggc_alloc<temp_slot> (); @@ -866,9 +878,9 @@ gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT); p->slot = assign_stack_local_1 (mode, (mode == BLKmode - ? CEIL_ROUND (size, - (int) align - / BITS_PER_UNIT) + ? aligned_upper_bound (size, + (int) align + / BITS_PER_UNIT) : size), align, 0); @@ -933,7 +945,7 @@ reuse. First two arguments are same as in preceding function. */ rtx -assign_stack_temp (machine_mode mode, HOST_WIDE_INT size) +assign_stack_temp (machine_mode mode, poly_int64 size) { return assign_stack_temp_for_type (mode, size, NULL_TREE); } @@ -973,25 +985,26 @@ if (mode == BLKmode || memory_required) { - HOST_WIDE_INT size = int_size_in_bytes (type); + poly_int64 size; rtx tmp; - /* Zero sized arrays are GNU C extension. Set size to 1 to avoid - problems with allocating the stack space. */ - if (size == 0) - size = 1; - /* Unfortunately, we don't yet know how to allocate variable-sized temporaries. However, sometimes we can find a fixed upper limit on the size, so try that instead. */ - else if (size == -1) + if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size)) size = max_int_size_in_bytes (type); + /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid + problems with allocating the stack space. */ + if (known_eq (size, 0)) + size = 1; + /* The size of the temporary may be too large to fit into an integer. */ /* ??? Not sure this should happen except for user silliness, so limit this to things that aren't compiler-generated temporaries. The rest of the time we'll die in assign_stack_temp_for_type. */ - if (decl && size == -1 + if (decl + && !known_size_p (size) && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST) { error ("size of variable %q+D is too large", decl); @@ -1052,14 +1065,14 @@ if (GET_MODE (q->slot) != BLKmode) continue; - if (p->base_offset + p->full_size == q->base_offset) + if (known_eq (p->base_offset + p->full_size, q->base_offset)) { /* Q comes after P; combine Q into P. */ p->size += q->size; p->full_size += q->full_size; delete_q = 1; } - else if (q->base_offset + q->full_size == p->base_offset) + else if (known_eq (q->base_offset + q->full_size, p->base_offset)) { /* P comes after Q; combine P into Q. */ q->size += p->size; @@ -1364,11 +1377,11 @@ routines. They contain the offsets of the virtual registers from their respective hard registers. */ -static int in_arg_offset; -static int var_offset; -static int dynamic_offset; -static int out_arg_offset; -static int cfa_offset; +static poly_int64 in_arg_offset; +static poly_int64 var_offset; +static poly_int64 dynamic_offset; +static poly_int64 out_arg_offset; +static poly_int64 cfa_offset; /* In most machines, the stack pointer register is equivalent to the bottom of the stack. */ @@ -1404,7 +1417,7 @@ : 0) + (STACK_POINTER_OFFSET)) #else #define STACK_DYNAMIC_OFFSET(FNDECL) \ -((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \ + ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \ + (STACK_POINTER_OFFSET)) #endif #endif @@ -1415,10 +1428,10 @@ offset indirectly through the pointer. Otherwise, return 0. */ static rtx -instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset) +instantiate_new_reg (rtx x, poly_int64_pod *poffset) { rtx new_rtx; - HOST_WIDE_INT offset; + poly_int64 offset; if (x == virtual_incoming_args_rtx) { @@ -1477,7 +1490,7 @@ if (rtx x = *loc) { rtx new_rtx; - HOST_WIDE_INT offset; + poly_int64 offset; switch (GET_CODE (x)) { case REG: @@ -1530,7 +1543,7 @@ static void instantiate_virtual_regs_in_insn (rtx_insn *insn) { - HOST_WIDE_INT offset; + poly_int64 offset; int insn_code, i; bool any_change = false; rtx set, new_rtx, x; @@ -1569,7 +1582,8 @@ to the generic case is avoiding a new pseudo and eliminating a move insn in the initial rtl stream. */ new_rtx = instantiate_new_reg (SET_SRC (set), &offset); - if (new_rtx && offset != 0 + if (new_rtx + && maybe_ne (offset, 0) && REG_P (SET_DEST (set)) && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) { @@ -1595,17 +1609,18 @@ /* Handle a plus involving a virtual register by determining if the operands remain valid if they're modified in place. */ + poly_int64 delta; if (GET_CODE (SET_SRC (set)) == PLUS && recog_data.n_operands >= 3 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0) && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1) - && CONST_INT_P (recog_data.operand[2]) + && poly_int_rtx_p (recog_data.operand[2], &delta) && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset))) { - offset += INTVAL (recog_data.operand[2]); + offset += delta; /* If the sum is zero, then replace with a plain move. */ - if (offset == 0 + if (known_eq (offset, 0) && REG_P (SET_DEST (set)) && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) { @@ -1683,7 +1698,7 @@ new_rtx = instantiate_new_reg (x, &offset); if (new_rtx == NULL) continue; - if (offset == 0) + if (known_eq (offset, 0)) x = new_rtx; else { @@ -1708,7 +1723,7 @@ new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset); if (new_rtx == NULL) continue; - if (offset != 0) + if (maybe_ne (offset, 0)) { start_sequence (); new_rtx = expand_simple_binop @@ -1953,10 +1968,11 @@ Fortunately, they shouldn't contain virtual registers either. */ if (GET_CODE (PATTERN (insn)) == USE || GET_CODE (PATTERN (insn)) == CLOBBER - || GET_CODE (PATTERN (insn)) == ASM_INPUT) + || GET_CODE (PATTERN (insn)) == ASM_INPUT + || DEBUG_MARKER_INSN_P (insn)) continue; - else if (DEBUG_INSN_P (insn)) - instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn)); + else if (DEBUG_BIND_INSN_P (insn)) + instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn)); else instantiate_virtual_regs_in_insn (insn); @@ -2086,6 +2102,9 @@ if (TREE_ADDRESSABLE (type)) return 1; + if (TYPE_EMPTY_P (type)) + return 0; + if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type)) return 1; @@ -2189,14 +2208,6 @@ return false; } - /* Decl is implicitly addressible by bound stores and loads - if it is an aggregate holding bounds. */ - if (chkp_function_instrumented_p (current_function_decl) - && TREE_TYPE (decl) - && !BOUNDED_P (decl) - && chkp_type_has_pointer (TREE_TYPE (decl))) - return false; - /* Only register-like things go in registers. */ if (DECL_MODE (decl) == BLKmode) return false; @@ -2268,15 +2279,6 @@ BOOL_BITFIELD loaded_in_reg : 1; }; -struct bounds_parm_data -{ - assign_parm_data_one parm_data; - tree bounds_parm; - tree ptr_parm; - rtx ptr_entry; - int bound_no; -}; - /* A subroutine of assign_parms. Initialize ALL. */ static void @@ -2391,23 +2393,6 @@ fnargs.safe_insert (0, decl); all->function_result_decl = decl; - - /* If function is instrumented then bounds of the - passed structure address is the second argument. */ - if (chkp_function_instrumented_p (fndecl)) - { - decl = build_decl (DECL_SOURCE_LOCATION (fndecl), - PARM_DECL, get_identifier (".result_bnd"), - pointer_bounds_type_node); - DECL_ARG_TYPE (decl) = pointer_bounds_type_node; - DECL_ARTIFICIAL (decl) = 1; - DECL_NAMELESS (decl) = 1; - TREE_CONSTANT (decl) = 1; - - DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs); - DECL_CHAIN (all->orig_fnargs) = decl; - fnargs.safe_insert (1, decl); - } } /* If the target wants to split complex arguments into scalars, do so. */ @@ -2530,6 +2515,9 @@ return; } + targetm.calls.warn_parameter_passing_abi (all->args_so_far, + data->passed_type); + entry_parm = targetm.calls.function_incoming_arg (all->args_so_far, data->promoted_mode, data->passed_type, @@ -2548,7 +2536,7 @@ it came in a register so that REG_PARM_STACK_SPACE isn't skipped. In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0 as it was the previous time. */ - in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type); + in_regs = (entry_parm != 0); #ifdef STACK_PARMS_IN_REG_PARM_AREA in_regs = true; #endif @@ -2637,12 +2625,8 @@ assign_parm_is_stack_parm (struct assign_parm_data_all *all, struct assign_parm_data_one *data) { - /* Bounds are never passed on the stack to keep compatibility - with not instrumented code. */ - if (POINTER_BOUNDS_TYPE_P (data->passed_type)) - return false; /* Trivially true if we've no incoming register. */ - else if (data->entry_parm == NULL) + if (data->entry_parm == NULL) ; /* Also true if we're partially in registers and partially not, since we've arranged to drop the entire argument on the stack. */ @@ -2700,9 +2684,9 @@ set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode)); if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm)) { - int offset = subreg_lowpart_offset (DECL_MODE (parm), - data->promoted_mode); - if (offset) + poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm), + data->promoted_mode); + if (maybe_ne (offset, 0)) set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset); } } @@ -2715,12 +2699,15 @@ is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're intentionally forcing upward padding. Otherwise we have to come up with a guess at the alignment based on OFFSET_RTX. */ + poly_int64 offset; if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm) align = boundary; - else if (CONST_INT_P (offset_rtx)) + else if (poly_int_rtx_p (offset_rtx, &offset)) { - align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary; - align = least_bit_hwi (align); + align = least_bit_hwi (boundary); + unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT; + if (offset_align != 0) + align = MIN (align, offset_align); } set_mem_align (stack_parm, align); @@ -2867,7 +2854,7 @@ /* Only assign_parm_setup_block knows how to deal with register arguments that are padded at the least significant end. */ if (REG_P (data->entry_parm) - && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD + && known_lt (GET_MODE_SIZE (data->promoted_mode), UNITS_PER_WORD) && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1) == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) return true; @@ -2930,7 +2917,7 @@ SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD)); stack_parm = assign_stack_local (BLKmode, size_stored, DECL_ALIGN (parm)); - if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size) + if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size)) PUT_MODE (stack_parm, GET_MODE (entry_parm)); set_mem_attributes (stack_parm, parm, 1); } @@ -3426,12 +3413,13 @@ if (data->stack_parm) { - int offset = subreg_lowpart_offset (data->nominal_mode, - GET_MODE (data->stack_parm)); + poly_int64 offset + = subreg_lowpart_offset (data->nominal_mode, + GET_MODE (data->stack_parm)); /* ??? This may need a big-endian conversion on sparc64. */ data->stack_parm = adjust_address (data->stack_parm, data->nominal_mode, 0); - if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm)) + if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm)) set_mem_offset (data->stack_parm, MEM_OFFSET (data->stack_parm) + offset); } @@ -3553,121 +3541,6 @@ } } -/* Load bounds of PARM from bounds table. */ -static void -assign_parm_load_bounds (struct assign_parm_data_one *data, - tree parm, - rtx entry, - unsigned bound_no) -{ - bitmap_iterator bi; - unsigned i, offs = 0; - int bnd_no = -1; - rtx slot = NULL, ptr = NULL; - - if (parm) - { - bitmap slots; - bitmap_obstack_initialize (NULL); - slots = BITMAP_ALLOC (NULL); - chkp_find_bound_slots (TREE_TYPE (parm), slots); - EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi) - { - if (bound_no) - bound_no--; - else - { - bnd_no = i; - break; - } - } - BITMAP_FREE (slots); - bitmap_obstack_release (NULL); - } - - /* We may have bounds not associated with any pointer. */ - if (bnd_no != -1) - offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT; - - /* Find associated pointer. */ - if (bnd_no == -1) - { - /* If bounds are not associated with any bounds, - then it is passed in a register or special slot. */ - gcc_assert (data->entry_parm); - ptr = const0_rtx; - } - else if (MEM_P (entry)) - slot = adjust_address (entry, Pmode, offs); - else if (REG_P (entry)) - ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no); - else if (GET_CODE (entry) == PARALLEL) - ptr = chkp_get_value_with_offs (entry, GEN_INT (offs)); - else - gcc_unreachable (); - data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr, - data->entry_parm); -} - -/* Assign RTL expressions to the function's bounds parameters BNDARGS. */ - -static void -assign_bounds (vec<bounds_parm_data> &bndargs, - struct assign_parm_data_all &all, - bool assign_regs, bool assign_special, - bool assign_bt) -{ - unsigned i, pass; - bounds_parm_data *pbdata; - - if (!bndargs.exists ()) - return; - - /* We make few passes to store input bounds. Firstly handle bounds - passed in registers. After that we load bounds passed in special - slots. Finally we load bounds from Bounds Table. */ - for (pass = 0; pass < 3; pass++) - FOR_EACH_VEC_ELT (bndargs, i, pbdata) - { - /* Pass 0 => regs only. */ - if (pass == 0 - && (!assign_regs - ||(!pbdata->parm_data.entry_parm - || GET_CODE (pbdata->parm_data.entry_parm) != REG))) - continue; - /* Pass 1 => slots only. */ - else if (pass == 1 - && (!assign_special - || (!pbdata->parm_data.entry_parm - || GET_CODE (pbdata->parm_data.entry_parm) == REG))) - continue; - /* Pass 2 => BT only. */ - else if (pass == 2 - && (!assign_bt - || pbdata->parm_data.entry_parm)) - continue; - - if (!pbdata->parm_data.entry_parm - || GET_CODE (pbdata->parm_data.entry_parm) != REG) - assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm, - pbdata->ptr_entry, pbdata->bound_no); - - set_decl_incoming_rtl (pbdata->bounds_parm, - pbdata->parm_data.entry_parm, false); - - if (assign_parm_setup_block_p (&pbdata->parm_data)) - assign_parm_setup_block (&all, pbdata->bounds_parm, - &pbdata->parm_data); - else if (pbdata->parm_data.passed_pointer - || use_register_for_decl (pbdata->bounds_parm)) - assign_parm_setup_reg (&all, pbdata->bounds_parm, - &pbdata->parm_data); - else - assign_parm_setup_stack (&all, pbdata->bounds_parm, - &pbdata->parm_data); - } -} - #ifndef noCbC #include "c/cbc-tree.h" #endif @@ -3681,11 +3554,7 @@ struct assign_parm_data_all all; tree parm; vec<tree> fnargs; - unsigned i, bound_no = 0; - tree last_arg = NULL; - rtx last_arg_entry = NULL; - vec<bounds_parm_data> bndargs = vNULL; - bounds_parm_data bdata; + unsigned i; crtl->args.internal_arg_pointer = targetm.calls.internal_arg_pointer (); @@ -3736,15 +3605,6 @@ assign_parm_find_stack_rtl (parm, &data); assign_parm_adjust_entry_rtl (&data); } - if (!POINTER_BOUNDS_TYPE_P (data.passed_type)) - { - /* Remember where last non bounds arg was passed in case - we have to load associated bounds for it from Bounds - Table. */ - last_arg = parm; - last_arg_entry = data.entry_parm; - bound_no = 0; - } /* Record permanently how this parm was passed. */ if (data.passed_pointer) { @@ -3758,64 +3618,21 @@ assign_parm_adjust_stack_rtl (&data); - /* Bounds should be loaded in the particular order to - have registers allocated correctly. Collect info about - input bounds and load them later. */ - if (POINTER_BOUNDS_TYPE_P (data.passed_type)) - { - /* Expect bounds in instrumented functions only. */ - gcc_assert (chkp_function_instrumented_p (fndecl)); - - bdata.parm_data = data; - bdata.bounds_parm = parm; - bdata.ptr_parm = last_arg; - bdata.ptr_entry = last_arg_entry; - bdata.bound_no = bound_no; - bndargs.safe_push (bdata); - } + if (assign_parm_setup_block_p (&data)) + assign_parm_setup_block (&all, parm, &data); + else if (data.passed_pointer || use_register_for_decl (parm)) + assign_parm_setup_reg (&all, parm, &data); else - { - if (assign_parm_setup_block_p (&data)) - assign_parm_setup_block (&all, parm, &data); - else if (data.passed_pointer || use_register_for_decl (parm)) - assign_parm_setup_reg (&all, parm, &data); - else - assign_parm_setup_stack (&all, parm, &data); - } + assign_parm_setup_stack (&all, parm, &data); if (cfun->stdarg && !DECL_CHAIN (parm)) - { - int pretend_bytes = 0; - - assign_parms_setup_varargs (&all, &data, false); - - if (chkp_function_instrumented_p (fndecl)) - { - /* We expect this is the last parm. Otherwise it is wrong - to assign bounds right now. */ - gcc_assert (i == (fnargs.length () - 1)); - assign_bounds (bndargs, all, true, false, false); - targetm.calls.setup_incoming_vararg_bounds (all.args_so_far, - data.promoted_mode, - data.passed_type, - &pretend_bytes, - false); - assign_bounds (bndargs, all, false, true, true); - bndargs.release (); - } - } + assign_parms_setup_varargs (&all, &data, false); /* Update info on where next arg arrives in registers. */ targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, data.passed_type, data.named_arg); - - if (POINTER_BOUNDS_TYPE_P (data.passed_type)) - bound_no++; } - assign_bounds (bndargs, all, true, true, true); - bndargs.release (); - if (targetm.calls.split_complex_arg) assign_parms_unsplit_complex (&all, fnargs); @@ -3890,14 +3707,15 @@ /* Adjust function incoming argument size for alignment and minimum length. */ - crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space); - crtl->args.size = CEIL_ROUND (crtl->args.size, - PARM_BOUNDARY / BITS_PER_UNIT); + crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space); + crtl->args.size = aligned_upper_bound (crtl->args.size, + PARM_BOUNDARY / BITS_PER_UNIT); if (ARGS_GROW_DOWNWARD) { crtl->args.arg_offset_rtx - = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant) + = (all.stack_args_size.var == 0 + ? gen_int_mode (-all.stack_args_size.constant, Pmode) : expand_expr (size_diffop (all.stack_args_size.var, size_int (-all.stack_args_size.constant)), NULL_RTX, VOIDmode, EXPAND_NORMAL)); @@ -3942,10 +3760,6 @@ real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result), fndecl, true); - if (chkp_function_instrumented_p (fndecl)) - crtl->return_bnd - = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result), - fndecl, true); REG_FUNCTION_VALUE_P (real_decl_rtl) = 1; /* The delay slot scheduler assumes that crtl->return_rtx holds the hard register containing the return value, not a @@ -3985,7 +3799,7 @@ statements to add to the beginning of the function. */ gimple_seq -gimplify_parameters (void) +gimplify_parameters (gimple_seq *cleanup) { struct assign_parm_data_all all; tree parm; @@ -4050,6 +3864,16 @@ else if (TREE_CODE (type) == COMPLEX_TYPE || TREE_CODE (type) == VECTOR_TYPE) DECL_GIMPLE_REG_P (local) = 1; + + if (!is_gimple_reg (local) + && flag_stack_reuse != SR_NONE) + { + tree clobber = build_constructor (type, NULL); + gimple *clobber_stmt; + TREE_THIS_VOLATILE (clobber) = 1; + clobber_stmt = gimple_build_assign (local, clobber); + gimple_seq_add_stmt (cleanup, clobber_stmt); + } } else { @@ -4137,22 +3961,27 @@ { if (reg_parm_stack_space > 0) { - if (initial_offset_ptr->var) + if (initial_offset_ptr->var + || !ordered_p (initial_offset_ptr->constant, + reg_parm_stack_space)) { initial_offset_ptr->var = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), ssize_int (reg_parm_stack_space)); initial_offset_ptr->constant = 0; } - else if (initial_offset_ptr->constant < reg_parm_stack_space) - initial_offset_ptr->constant = reg_parm_stack_space; + else + initial_offset_ptr->constant + = ordered_max (initial_offset_ptr->constant, + reg_parm_stack_space); } } part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0); - sizetree - = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode)); + sizetree = (type + ? arg_size_in_bytes (type) + : size_int (GET_MODE_SIZE (passed_mode))); where_pad = targetm.calls.function_arg_padding (passed_mode, type); boundary = targetm.calls.function_arg_boundary (passed_mode, type); round_boundary = targetm.calls.function_arg_round_boundary (passed_mode, @@ -4271,9 +4100,9 @@ struct args_size *alignment_pad) { tree save_var = NULL_TREE; - HOST_WIDE_INT save_constant = 0; + poly_int64 save_constant = 0; int boundary_in_bytes = boundary / BITS_PER_UNIT; - HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET; + poly_int64 sp_offset = STACK_POINTER_OFFSET; #ifdef SPARC_STACK_BOUNDARY_HACK /* ??? The SPARC port may claim a STACK_BOUNDARY higher than @@ -4294,7 +4123,10 @@ if (boundary > BITS_PER_UNIT) { - if (offset_ptr->var) + int misalign; + if (offset_ptr->var + || !known_misalignment (offset_ptr->constant + sp_offset, + boundary_in_bytes, &misalign)) { tree sp_offset_tree = ssize_int (sp_offset); tree offset = size_binop (PLUS_EXPR, @@ -4315,13 +4147,13 @@ } else { - offset_ptr->constant = -sp_offset + - (ARGS_GROW_DOWNWARD - ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes) - : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)); - - if (boundary > PARM_BOUNDARY) - alignment_pad->constant = offset_ptr->constant - save_constant; + if (ARGS_GROW_DOWNWARD) + offset_ptr->constant -= misalign; + else + offset_ptr->constant += -misalign & (boundary_in_bytes - 1); + + if (boundary > PARM_BOUNDARY) + alignment_pad->constant = offset_ptr->constant - save_constant; } } } @@ -4330,8 +4162,10 @@ pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree) { unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT; - if (passed_mode != BLKmode) - offset_ptr->constant += -GET_MODE_SIZE (passed_mode) & (align - 1); + int misalign; + if (passed_mode != BLKmode + && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign)) + offset_ptr->constant += -misalign & (align - 1); else { if (TREE_CODE (sizetree) != INTEGER_CST @@ -4720,11 +4554,11 @@ int n_blocks; tree *block_vector; - /* For SDB and XCOFF debugging output, we start numbering the blocks + /* For XCOFF debugging output, we start numbering the blocks from 1 within each function, rather than keeping a running count. */ -#if SDB_DEBUGGING_INFO || defined (XCOFF_DEBUGGING_INFO) - if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG) +#if defined (XCOFF_DEBUGGING_INFO) + if (write_symbols == XCOFF_DEBUG) next_block_index = 1; #endif @@ -4793,6 +4627,9 @@ targetm.set_current_function (fndecl); this_fn_optabs = this_target_optabs; + /* Initialize global alignment variables after op. */ + parse_alignment_opts (); + if (opts != optimization_default_node) { init_tree_optimization_optabs (opts); @@ -4942,6 +4779,12 @@ if (!profile_flag && !flag_instrument_function_entry_exit) DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1; } + + /* Don't enable begin stmt markers if var-tracking at assignments is + disabled. The markers make little sense without the variable + binding annotations among them. */ + cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt + && MAY_HAVE_DEBUG_MARKER_STMTS; } /* This is like allocate_struct_function, but pushes a new cfun for FNDECL @@ -5201,14 +5044,6 @@ /* Set DECL_REGISTER flag so that expand_function_end will copy the result to the real return register(s). */ DECL_REGISTER (res) = 1; - - if (chkp_function_instrumented_p (current_function_decl)) - { - tree return_type = TREE_TYPE (res); - rtx bounds = targetm.calls.chkp_function_value_bounds (return_type, - subr, 1); - SET_DECL_BOUNDS_RTL (res, bounds); - } } /* Initialize rtx for parameters and local variables. @@ -5259,7 +5094,7 @@ } /* The following was moved from init_function_start. - The move is supposed to make sdb output more accurate. */ + The move was supposed to make sdb output more accurate. */ /* Indicate the beginning of the function body, as opposed to parm setup. */ emit_note (NOTE_INSN_FUNCTION_BEG); @@ -5356,7 +5191,6 @@ void diddle_return_value (void (*doit) (rtx, void *), void *arg) { - diddle_return_value_1 (doit, arg, crtl->return_bnd); diddle_return_value_1 (doit, arg, crtl->return_rtx); } @@ -5450,7 +5284,7 @@ do_pending_stack_adjust (); /* Output a linenumber for the end of the function. - SDB depends on this. */ + SDB depended on this. */ set_curr_insn_location (input_location); /* Before the return label (if any), clobber the return @@ -6625,8 +6459,9 @@ /* Only do the transformation for pseudos. */ if (! REG_P (output) || rtx_equal_p (output, input) - || (GET_MODE (input) != VOIDmode - && GET_MODE (input) != GET_MODE (output))) + || !(REG_P (input) || SUBREG_P (input) + || MEM_P (input) || CONSTANT_P (input)) + || !general_operand (input, GET_MODE (output))) continue; /* We can't do anything if the output is also used as input,