comparison gcc/function.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children d34655255c78 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 /* Expands front end tree to back end RTL for GCC. 1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc. 2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
71 #include "cfgcleanup.h" 71 #include "cfgcleanup.h"
72 #include "cfgexpand.h" 72 #include "cfgexpand.h"
73 #include "shrink-wrap.h" 73 #include "shrink-wrap.h"
74 #include "toplev.h" 74 #include "toplev.h"
75 #include "rtl-iter.h" 75 #include "rtl-iter.h"
76 #include "tree-chkp.h"
77 #include "rtl-chkp.h"
78 #include "tree-dfa.h" 76 #include "tree-dfa.h"
79 #include "tree-ssa.h" 77 #include "tree-ssa.h"
80 #include "stringpool.h" 78 #include "stringpool.h"
81 #include "attribs.h" 79 #include "attribs.h"
80 #include "gimple.h"
81 #include "options.h"
82 82
83 /* So we can assign to cfun in this file. */ 83 /* So we can assign to cfun in this file. */
84 #undef cfun 84 #undef cfun
85 85
86 #ifndef STACK_ALIGNMENT_NEEDED 86 #ifndef STACK_ALIGNMENT_NEEDED
216 216
217 /* Return size needed for stack frame based on slots so far allocated. 217 /* Return size needed for stack frame based on slots so far allocated.
218 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; 218 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
219 the caller may have to do that. */ 219 the caller may have to do that. */
220 220
221 HOST_WIDE_INT 221 poly_int64
222 get_frame_size (void) 222 get_frame_size (void)
223 { 223 {
224 if (FRAME_GROWS_DOWNWARD) 224 if (FRAME_GROWS_DOWNWARD)
225 return -frame_offset; 225 return -frame_offset;
226 else 226 else
230 /* Issue an error message and return TRUE if frame OFFSET overflows in 230 /* Issue an error message and return TRUE if frame OFFSET overflows in
231 the signed target pointer arithmetics for function FUNC. Otherwise 231 the signed target pointer arithmetics for function FUNC. Otherwise
232 return FALSE. */ 232 return FALSE. */
233 233
234 bool 234 bool
235 frame_offset_overflow (HOST_WIDE_INT offset, tree func) 235 frame_offset_overflow (poly_int64 offset, tree func)
236 { 236 {
237 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; 237 poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
238 238 unsigned HOST_WIDE_INT limit
239 if (size > (HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1)) 239 = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
240 /* Leave room for the fixed part of the frame. */ 240 /* Leave room for the fixed part of the frame. */
241 - 64 * UNITS_PER_WORD) 241 - 64 * UNITS_PER_WORD);
242 { 242
243 error_at (DECL_SOURCE_LOCATION (func), 243 if (!coeffs_in_range_p (size, 0U, limit))
244 "total size of local objects too large"); 244 {
245 return TRUE; 245 unsigned HOST_WIDE_INT hwisize;
246 } 246 if (size.is_constant (&hwisize))
247 247 error_at (DECL_SOURCE_LOCATION (func),
248 return FALSE; 248 "total size of local objects %wu exceeds maximum %wu",
249 hwisize, limit);
250 else
251 error_at (DECL_SOURCE_LOCATION (func),
252 "total size of local objects exceeds maximum %wu",
253 limit);
254 return true;
255 }
256
257 return false;
249 } 258 }
250 259
251 /* Return the minimum spill slot alignment for a register of mode MODE. */ 260 /* Return the minimum spill slot alignment for a register of mode MODE. */
252 261
253 unsigned int 262 unsigned int
282 offset to be used for the stack slot in *POFFSET and return true; 291 offset to be used for the stack slot in *POFFSET and return true;
283 return false otherwise. This function will extend the frame size when 292 return false otherwise. This function will extend the frame size when
284 given a start/length pair that lies at the end of the frame. */ 293 given a start/length pair that lies at the end of the frame. */
285 294
286 static bool 295 static bool
287 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length, 296 try_fit_stack_local (poly_int64 start, poly_int64 length,
288 HOST_WIDE_INT size, unsigned int alignment, 297 poly_int64 size, unsigned int alignment,
289 HOST_WIDE_INT *poffset) 298 poly_int64_pod *poffset)
290 { 299 {
291 HOST_WIDE_INT this_frame_offset; 300 poly_int64 this_frame_offset;
292 int frame_off, frame_alignment, frame_phase; 301 int frame_off, frame_alignment, frame_phase;
293 302
294 /* Calculate how many bytes the start of local variables is off from 303 /* Calculate how many bytes the start of local variables is off from
295 stack alignment. */ 304 stack alignment. */
296 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; 305 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
297 frame_off = targetm.starting_frame_offset () % frame_alignment; 306 frame_off = targetm.starting_frame_offset () % frame_alignment;
298 frame_phase = frame_off ? frame_alignment - frame_off : 0; 307 frame_phase = frame_off ? frame_alignment - frame_off : 0;
299 308
300 /* Round the frame offset to the specified alignment. */ 309 /* Round the frame offset to the specified alignment. */
301 310
302 /* We must be careful here, since FRAME_OFFSET might be negative and
303 division with a negative dividend isn't as well defined as we might
304 like. So we instead assume that ALIGNMENT is a power of two and
305 use logical operations which are unambiguous. */
306 if (FRAME_GROWS_DOWNWARD) 311 if (FRAME_GROWS_DOWNWARD)
307 this_frame_offset 312 this_frame_offset
308 = (FLOOR_ROUND (start + length - size - frame_phase, 313 = (aligned_lower_bound (start + length - size - frame_phase, alignment)
309 (unsigned HOST_WIDE_INT) alignment)
310 + frame_phase); 314 + frame_phase);
311 else 315 else
312 this_frame_offset 316 this_frame_offset
313 = (CEIL_ROUND (start - frame_phase, 317 = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
314 (unsigned HOST_WIDE_INT) alignment)
315 + frame_phase);
316 318
317 /* See if it fits. If this space is at the edge of the frame, 319 /* See if it fits. If this space is at the edge of the frame,
318 consider extending the frame to make it fit. Our caller relies on 320 consider extending the frame to make it fit. Our caller relies on
319 this when allocating a new slot. */ 321 this when allocating a new slot. */
320 if (frame_offset == start && this_frame_offset < frame_offset) 322 if (maybe_lt (this_frame_offset, start))
321 frame_offset = this_frame_offset; 323 {
322 else if (this_frame_offset < start) 324 if (known_eq (frame_offset, start))
323 return false; 325 frame_offset = this_frame_offset;
324 else if (start + length == frame_offset 326 else
325 && this_frame_offset + size > start + length) 327 return false;
326 frame_offset = this_frame_offset + size; 328 }
327 else if (this_frame_offset + size > start + length) 329 else if (maybe_gt (this_frame_offset + size, start + length))
328 return false; 330 {
331 if (known_eq (frame_offset, start + length))
332 frame_offset = this_frame_offset + size;
333 else
334 return false;
335 }
329 336
330 *poffset = this_frame_offset; 337 *poffset = this_frame_offset;
331 return true; 338 return true;
332 } 339 }
333 340
334 /* Create a new frame_space structure describing free space in the stack 341 /* Create a new frame_space structure describing free space in the stack
335 frame beginning at START and ending at END, and chain it into the 342 frame beginning at START and ending at END, and chain it into the
336 function's frame_space_list. */ 343 function's frame_space_list. */
337 344
338 static void 345 static void
339 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end) 346 add_frame_space (poly_int64 start, poly_int64 end)
340 { 347 {
341 struct frame_space *space = ggc_alloc<frame_space> (); 348 struct frame_space *space = ggc_alloc<frame_space> ();
342 space->next = crtl->frame_space_list; 349 space->next = crtl->frame_space_list;
343 crtl->frame_space_list = space; 350 crtl->frame_space_list = space;
344 space->start = start; 351 space->start = start;
361 track the same stack slot in two independent lists. 368 track the same stack slot in two independent lists.
362 369
363 We do not round to stack_boundary here. */ 370 We do not round to stack_boundary here. */
364 371
365 rtx 372 rtx
366 assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size, 373 assign_stack_local_1 (machine_mode mode, poly_int64 size,
367 int align, int kind) 374 int align, int kind)
368 { 375 {
369 rtx x, addr; 376 rtx x, addr;
370 int bigend_correction = 0; 377 poly_int64 bigend_correction = 0;
371 HOST_WIDE_INT slot_offset = 0, old_frame_offset; 378 poly_int64 slot_offset = 0, old_frame_offset;
372 unsigned int alignment, alignment_in_bits; 379 unsigned int alignment, alignment_in_bits;
373 380
374 if (align == 0) 381 if (align == 0)
375 { 382 {
376 alignment = get_stack_local_alignment (NULL, mode); 383 alignment = get_stack_local_alignment (NULL, mode);
377 alignment /= BITS_PER_UNIT; 384 alignment /= BITS_PER_UNIT;
378 } 385 }
379 else if (align == -1) 386 else if (align == -1)
380 { 387 {
381 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; 388 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
382 size = CEIL_ROUND (size, alignment); 389 size = aligned_upper_bound (size, alignment);
383 } 390 }
384 else if (align == -2) 391 else if (align == -2)
385 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */ 392 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
386 else 393 else
387 alignment = align / BITS_PER_UNIT; 394 alignment = align / BITS_PER_UNIT;
413 { 420 {
414 /* It is OK to reduce the alignment as long as the 421 /* It is OK to reduce the alignment as long as the
415 requested size is 0 or the estimated stack 422 requested size is 0 or the estimated stack
416 alignment >= mode alignment. */ 423 alignment >= mode alignment. */
417 gcc_assert ((kind & ASLK_REDUCE_ALIGN) 424 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
418 || size == 0 425 || known_eq (size, 0)
419 || (crtl->stack_alignment_estimated 426 || (crtl->stack_alignment_estimated
420 >= GET_MODE_ALIGNMENT (mode))); 427 >= GET_MODE_ALIGNMENT (mode)));
421 alignment_in_bits = crtl->stack_alignment_estimated; 428 alignment_in_bits = crtl->stack_alignment_estimated;
422 alignment = alignment_in_bits / BITS_PER_UNIT; 429 alignment = alignment_in_bits / BITS_PER_UNIT;
423 } 430 }
428 if (crtl->stack_alignment_needed < alignment_in_bits) 435 if (crtl->stack_alignment_needed < alignment_in_bits)
429 crtl->stack_alignment_needed = alignment_in_bits; 436 crtl->stack_alignment_needed = alignment_in_bits;
430 if (crtl->max_used_stack_slot_alignment < alignment_in_bits) 437 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
431 crtl->max_used_stack_slot_alignment = alignment_in_bits; 438 crtl->max_used_stack_slot_alignment = alignment_in_bits;
432 439
433 if (mode != BLKmode || size != 0) 440 if (mode != BLKmode || maybe_ne (size, 0))
434 { 441 {
435 if (kind & ASLK_RECORD_PAD) 442 if (kind & ASLK_RECORD_PAD)
436 { 443 {
437 struct frame_space **psp; 444 struct frame_space **psp;
438 445
441 struct frame_space *space = *psp; 448 struct frame_space *space = *psp;
442 if (!try_fit_stack_local (space->start, space->length, size, 449 if (!try_fit_stack_local (space->start, space->length, size,
443 alignment, &slot_offset)) 450 alignment, &slot_offset))
444 continue; 451 continue;
445 *psp = space->next; 452 *psp = space->next;
446 if (slot_offset > space->start) 453 if (known_gt (slot_offset, space->start))
447 add_frame_space (space->start, slot_offset); 454 add_frame_space (space->start, slot_offset);
448 if (slot_offset + size < space->start + space->length) 455 if (known_lt (slot_offset + size, space->start + space->length))
449 add_frame_space (slot_offset + size, 456 add_frame_space (slot_offset + size,
450 space->start + space->length); 457 space->start + space->length);
451 goto found_space; 458 goto found_space;
452 } 459 }
453 } 460 }
465 frame_offset -= size; 472 frame_offset -= size;
466 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset); 473 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
467 474
468 if (kind & ASLK_RECORD_PAD) 475 if (kind & ASLK_RECORD_PAD)
469 { 476 {
470 if (slot_offset > frame_offset) 477 if (known_gt (slot_offset, frame_offset))
471 add_frame_space (frame_offset, slot_offset); 478 add_frame_space (frame_offset, slot_offset);
472 if (slot_offset + size < old_frame_offset) 479 if (known_lt (slot_offset + size, old_frame_offset))
473 add_frame_space (slot_offset + size, old_frame_offset); 480 add_frame_space (slot_offset + size, old_frame_offset);
474 } 481 }
475 } 482 }
476 else 483 else
477 { 484 {
478 frame_offset += size; 485 frame_offset += size;
479 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset); 486 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
480 487
481 if (kind & ASLK_RECORD_PAD) 488 if (kind & ASLK_RECORD_PAD)
482 { 489 {
483 if (slot_offset > old_frame_offset) 490 if (known_gt (slot_offset, old_frame_offset))
484 add_frame_space (old_frame_offset, slot_offset); 491 add_frame_space (old_frame_offset, slot_offset);
485 if (slot_offset + size < frame_offset) 492 if (known_lt (slot_offset + size, frame_offset))
486 add_frame_space (slot_offset + size, frame_offset); 493 add_frame_space (slot_offset + size, frame_offset);
487 } 494 }
488 } 495 }
489 496
490 found_space: 497 found_space:
491 /* On a big-endian machine, if we are allocating more space than we will use, 498 /* On a big-endian machine, if we are allocating more space than we will use,
492 use the least significant bytes of those that are allocated. */ 499 use the least significant bytes of those that are allocated. */
493 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size) 500 if (mode != BLKmode)
494 bigend_correction = size - GET_MODE_SIZE (mode); 501 {
502 /* The slot size can sometimes be smaller than the mode size;
503 e.g. the rs6000 port allocates slots with a vector mode
504 that have the size of only one element. However, the slot
505 size must always be ordered wrt to the mode size, in the
506 same way as for a subreg. */
507 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
508 if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
509 bigend_correction = size - GET_MODE_SIZE (mode);
510 }
495 511
496 /* If we have already instantiated virtual registers, return the actual 512 /* If we have already instantiated virtual registers, return the actual
497 address relative to the frame pointer. */ 513 address relative to the frame pointer. */
498 if (virtuals_instantiated) 514 if (virtuals_instantiated)
499 addr = plus_constant (Pmode, frame_pointer_rtx, 515 addr = plus_constant (Pmode, frame_pointer_rtx,
519 } 535 }
520 536
521 /* Wrap up assign_stack_local_1 with last parameter as false. */ 537 /* Wrap up assign_stack_local_1 with last parameter as false. */
522 538
523 rtx 539 rtx
524 assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align) 540 assign_stack_local (machine_mode mode, poly_int64 size, int align)
525 { 541 {
526 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD); 542 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
527 } 543 }
528 544
529 /* In order to evaluate some expressions, such as function calls returning 545 /* In order to evaluate some expressions, such as function calls returning
546 /* Points to previous temporary slot. */ 562 /* Points to previous temporary slot. */
547 struct temp_slot *prev; 563 struct temp_slot *prev;
548 /* The rtx to used to reference the slot. */ 564 /* The rtx to used to reference the slot. */
549 rtx slot; 565 rtx slot;
550 /* The size, in units, of the slot. */ 566 /* The size, in units, of the slot. */
551 HOST_WIDE_INT size; 567 poly_int64 size;
552 /* The type of the object in the slot, or zero if it doesn't correspond 568 /* The type of the object in the slot, or zero if it doesn't correspond
553 to a type. We use this to determine whether a slot can be reused. 569 to a type. We use this to determine whether a slot can be reused.
554 It can be reused if objects of the type of the new slot will always 570 It can be reused if objects of the type of the new slot will always
555 conflict with objects of the type of the old slot. */ 571 conflict with objects of the type of the old slot. */
556 tree type; 572 tree type;
560 char in_use; 576 char in_use;
561 /* Nesting level at which this slot is being used. */ 577 /* Nesting level at which this slot is being used. */
562 int level; 578 int level;
563 /* The offset of the slot from the frame_pointer, including extra space 579 /* The offset of the slot from the frame_pointer, including extra space
564 for alignment. This info is for combine_temp_slots. */ 580 for alignment. This info is for combine_temp_slots. */
565 HOST_WIDE_INT base_offset; 581 poly_int64 base_offset;
566 /* The size of the slot, including extra space for alignment. This 582 /* The size of the slot, including extra space for alignment. This
567 info is for combine_temp_slots. */ 583 info is for combine_temp_slots. */
568 HOST_WIDE_INT full_size; 584 poly_int64 full_size;
569 }; 585 };
570 586
571 /* Entry for the below hash table. */ 587 /* Entry for the below hash table. */
572 struct GTY((for_user)) temp_slot_address_entry { 588 struct GTY((for_user)) temp_slot_address_entry {
573 hashval_t hash; 589 hashval_t hash;
741 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1)) 757 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
742 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0) 758 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
743 return p; 759 return p;
744 760
745 /* Last resort: Address is a virtual stack var address. */ 761 /* Last resort: Address is a virtual stack var address. */
746 if (GET_CODE (x) == PLUS 762 poly_int64 offset;
747 && XEXP (x, 0) == virtual_stack_vars_rtx 763 if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
748 && CONST_INT_P (XEXP (x, 1)))
749 { 764 {
750 int i; 765 int i;
751 for (i = max_slot_level (); i >= 0; i--) 766 for (i = max_slot_level (); i >= 0; i--)
752 for (p = *temp_slots_at_level (i); p; p = p->next) 767 for (p = *temp_slots_at_level (i); p; p = p->next)
753 { 768 if (known_in_range_p (offset, p->base_offset, p->full_size))
754 if (INTVAL (XEXP (x, 1)) >= p->base_offset 769 return p;
755 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
756 return p;
757 }
758 } 770 }
759 771
760 return NULL; 772 return NULL;
761 } 773 }
762 774
769 since assign_stack_local will do any required rounding. 781 since assign_stack_local will do any required rounding.
770 782
771 TYPE is the type that will be used for the stack slot. */ 783 TYPE is the type that will be used for the stack slot. */
772 784
773 rtx 785 rtx
774 assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size, 786 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
775 tree type)
776 { 787 {
777 unsigned int align; 788 unsigned int align;
778 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp; 789 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
779 rtx slot; 790 rtx slot;
780 791
781 /* If SIZE is -1 it means that somebody tried to allocate a temporary 792 gcc_assert (known_size_p (size));
782 of a variable size. */
783 gcc_assert (size != -1);
784 793
785 align = get_stack_local_alignment (type, mode); 794 align = get_stack_local_alignment (type, mode);
786 795
787 /* Try to find an available, already-allocated temporary of the proper 796 /* Try to find an available, already-allocated temporary of the proper
788 mode which meets the size and alignment requirements. Choose the 797 mode which meets the size and alignment requirements. Choose the
793 VIRTUAL_STACK_VARS_REGNUM). */ 802 VIRTUAL_STACK_VARS_REGNUM). */
794 if (!virtuals_instantiated) 803 if (!virtuals_instantiated)
795 { 804 {
796 for (p = avail_temp_slots; p; p = p->next) 805 for (p = avail_temp_slots; p; p = p->next)
797 { 806 {
798 if (p->align >= align && p->size >= size 807 if (p->align >= align
808 && known_ge (p->size, size)
799 && GET_MODE (p->slot) == mode 809 && GET_MODE (p->slot) == mode
800 && objects_must_conflict_p (p->type, type) 810 && objects_must_conflict_p (p->type, type)
801 && (best_p == 0 || best_p->size > p->size 811 && (best_p == 0
802 || (best_p->size == p->size && best_p->align > p->align))) 812 || (known_eq (best_p->size, p->size)
813 ? best_p->align > p->align
814 : known_ge (best_p->size, p->size))))
803 { 815 {
804 if (p->align == align && p->size == size) 816 if (p->align == align && known_eq (p->size, size))
805 { 817 {
806 selected = p; 818 selected = p;
807 cut_slot_from_list (selected, &avail_temp_slots); 819 cut_slot_from_list (selected, &avail_temp_slots);
808 best_p = 0; 820 best_p = 0;
809 break; 821 break;
823 temp_slot so that the extra bytes don't get wasted. Do this only 835 temp_slot so that the extra bytes don't get wasted. Do this only
824 for BLKmode slots, so that we can be sure of the alignment. */ 836 for BLKmode slots, so that we can be sure of the alignment. */
825 if (GET_MODE (best_p->slot) == BLKmode) 837 if (GET_MODE (best_p->slot) == BLKmode)
826 { 838 {
827 int alignment = best_p->align / BITS_PER_UNIT; 839 int alignment = best_p->align / BITS_PER_UNIT;
828 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment); 840 poly_int64 rounded_size = aligned_upper_bound (size, alignment);
829 841
830 if (best_p->size - rounded_size >= alignment) 842 if (known_ge (best_p->size - rounded_size, alignment))
831 { 843 {
832 p = ggc_alloc<temp_slot> (); 844 p = ggc_alloc<temp_slot> ();
833 p->in_use = 0; 845 p->in_use = 0;
834 p->size = best_p->size - rounded_size; 846 p->size = best_p->size - rounded_size;
835 p->base_offset = best_p->base_offset + rounded_size; 847 p->base_offset = best_p->base_offset + rounded_size;
848 } 860 }
849 861
850 /* If we still didn't find one, make a new temporary. */ 862 /* If we still didn't find one, make a new temporary. */
851 if (selected == 0) 863 if (selected == 0)
852 { 864 {
853 HOST_WIDE_INT frame_offset_old = frame_offset; 865 poly_int64 frame_offset_old = frame_offset;
854 866
855 p = ggc_alloc<temp_slot> (); 867 p = ggc_alloc<temp_slot> ();
856 868
857 /* We are passing an explicit alignment request to assign_stack_local. 869 /* We are passing an explicit alignment request to assign_stack_local.
858 One side effect of that is assign_stack_local will not round SIZE 870 One side effect of that is assign_stack_local will not round SIZE
862 and round it now. We also make sure ALIGNMENT is at least 874 and round it now. We also make sure ALIGNMENT is at least
863 BIGGEST_ALIGNMENT. */ 875 BIGGEST_ALIGNMENT. */
864 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT); 876 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
865 p->slot = assign_stack_local_1 (mode, 877 p->slot = assign_stack_local_1 (mode,
866 (mode == BLKmode 878 (mode == BLKmode
867 ? CEIL_ROUND (size, 879 ? aligned_upper_bound (size,
868 (int) align 880 (int) align
869 / BITS_PER_UNIT) 881 / BITS_PER_UNIT)
870 : size), 882 : size),
871 align, 0); 883 align, 0);
872 884
873 p->align = align; 885 p->align = align;
874 886
929 941
930 /* Allocate a temporary stack slot and record it for possible later 942 /* Allocate a temporary stack slot and record it for possible later
931 reuse. First two arguments are same as in preceding function. */ 943 reuse. First two arguments are same as in preceding function. */
932 944
933 rtx 945 rtx
934 assign_stack_temp (machine_mode mode, HOST_WIDE_INT size) 946 assign_stack_temp (machine_mode mode, poly_int64 size)
935 { 947 {
936 return assign_stack_temp_for_type (mode, size, NULL_TREE); 948 return assign_stack_temp_for_type (mode, size, NULL_TREE);
937 } 949 }
938 950
939 /* Assign a temporary. 951 /* Assign a temporary.
969 end. See also create_tmp_var for the gimplification-time check. */ 981 end. See also create_tmp_var for the gimplification-time check. */
970 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); 982 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
971 983
972 if (mode == BLKmode || memory_required) 984 if (mode == BLKmode || memory_required)
973 { 985 {
974 HOST_WIDE_INT size = int_size_in_bytes (type); 986 poly_int64 size;
975 rtx tmp; 987 rtx tmp;
976
977 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
978 problems with allocating the stack space. */
979 if (size == 0)
980 size = 1;
981 988
982 /* Unfortunately, we don't yet know how to allocate variable-sized 989 /* Unfortunately, we don't yet know how to allocate variable-sized
983 temporaries. However, sometimes we can find a fixed upper limit on 990 temporaries. However, sometimes we can find a fixed upper limit on
984 the size, so try that instead. */ 991 the size, so try that instead. */
985 else if (size == -1) 992 if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
986 size = max_int_size_in_bytes (type); 993 size = max_int_size_in_bytes (type);
994
995 /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid
996 problems with allocating the stack space. */
997 if (known_eq (size, 0))
998 size = 1;
987 999
988 /* The size of the temporary may be too large to fit into an integer. */ 1000 /* The size of the temporary may be too large to fit into an integer. */
989 /* ??? Not sure this should happen except for user silliness, so limit 1001 /* ??? Not sure this should happen except for user silliness, so limit
990 this to things that aren't compiler-generated temporaries. The 1002 this to things that aren't compiler-generated temporaries. The
991 rest of the time we'll die in assign_stack_temp_for_type. */ 1003 rest of the time we'll die in assign_stack_temp_for_type. */
992 if (decl && size == -1 1004 if (decl
1005 && !known_size_p (size)
993 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST) 1006 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
994 { 1007 {
995 error ("size of variable %q+D is too large", decl); 1008 error ("size of variable %q+D is too large", decl);
996 size = 1; 1009 size = 1;
997 } 1010 }
1048 next_q = q->next; 1061 next_q = q->next;
1049 1062
1050 if (GET_MODE (q->slot) != BLKmode) 1063 if (GET_MODE (q->slot) != BLKmode)
1051 continue; 1064 continue;
1052 1065
1053 if (p->base_offset + p->full_size == q->base_offset) 1066 if (known_eq (p->base_offset + p->full_size, q->base_offset))
1054 { 1067 {
1055 /* Q comes after P; combine Q into P. */ 1068 /* Q comes after P; combine Q into P. */
1056 p->size += q->size; 1069 p->size += q->size;
1057 p->full_size += q->full_size; 1070 p->full_size += q->full_size;
1058 delete_q = 1; 1071 delete_q = 1;
1059 } 1072 }
1060 else if (q->base_offset + q->full_size == p->base_offset) 1073 else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1061 { 1074 {
1062 /* P comes after Q; combine P into Q. */ 1075 /* P comes after Q; combine P into Q. */
1063 q->size += p->size; 1076 q->size += p->size;
1064 q->full_size += p->full_size; 1077 q->full_size += p->full_size;
1065 delete_p = 1; 1078 delete_p = 1;
1360 1373
1361 The following four variables are used for communication between the 1374 The following four variables are used for communication between the
1362 routines. They contain the offsets of the virtual registers from their 1375 routines. They contain the offsets of the virtual registers from their
1363 respective hard registers. */ 1376 respective hard registers. */
1364 1377
1365 static int in_arg_offset; 1378 static poly_int64 in_arg_offset;
1366 static int var_offset; 1379 static poly_int64 var_offset;
1367 static int dynamic_offset; 1380 static poly_int64 dynamic_offset;
1368 static int out_arg_offset; 1381 static poly_int64 out_arg_offset;
1369 static int cfa_offset; 1382 static poly_int64 cfa_offset;
1370 1383
1371 /* In most machines, the stack pointer register is equivalent to the bottom 1384 /* In most machines, the stack pointer register is equivalent to the bottom
1372 of the stack. */ 1385 of the stack. */
1373 1386
1374 #ifndef STACK_POINTER_OFFSET 1387 #ifndef STACK_POINTER_OFFSET
1400 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \ 1413 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1401 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \ 1414 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1402 : 0) + (STACK_POINTER_OFFSET)) 1415 : 0) + (STACK_POINTER_OFFSET))
1403 #else 1416 #else
1404 #define STACK_DYNAMIC_OFFSET(FNDECL) \ 1417 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1405 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \ 1418 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1406 + (STACK_POINTER_OFFSET)) 1419 + (STACK_POINTER_OFFSET))
1407 #endif 1420 #endif
1408 #endif 1421 #endif
1409 1422
1410 1423
1411 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX 1424 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1412 is a virtual register, return the equivalent hard register and set the 1425 is a virtual register, return the equivalent hard register and set the
1413 offset indirectly through the pointer. Otherwise, return 0. */ 1426 offset indirectly through the pointer. Otherwise, return 0. */
1414 1427
1415 static rtx 1428 static rtx
1416 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset) 1429 instantiate_new_reg (rtx x, poly_int64_pod *poffset)
1417 { 1430 {
1418 rtx new_rtx; 1431 rtx new_rtx;
1419 HOST_WIDE_INT offset; 1432 poly_int64 offset;
1420 1433
1421 if (x == virtual_incoming_args_rtx) 1434 if (x == virtual_incoming_args_rtx)
1422 { 1435 {
1423 if (stack_realign_drap) 1436 if (stack_realign_drap)
1424 { 1437 {
1473 { 1486 {
1474 rtx *loc = *iter; 1487 rtx *loc = *iter;
1475 if (rtx x = *loc) 1488 if (rtx x = *loc)
1476 { 1489 {
1477 rtx new_rtx; 1490 rtx new_rtx;
1478 HOST_WIDE_INT offset; 1491 poly_int64 offset;
1479 switch (GET_CODE (x)) 1492 switch (GET_CODE (x))
1480 { 1493 {
1481 case REG: 1494 case REG:
1482 new_rtx = instantiate_new_reg (x, &offset); 1495 new_rtx = instantiate_new_reg (x, &offset);
1483 if (new_rtx) 1496 if (new_rtx)
1526 registers present inside of insn. The result will be a valid insn. */ 1539 registers present inside of insn. The result will be a valid insn. */
1527 1540
1528 static void 1541 static void
1529 instantiate_virtual_regs_in_insn (rtx_insn *insn) 1542 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1530 { 1543 {
1531 HOST_WIDE_INT offset; 1544 poly_int64 offset;
1532 int insn_code, i; 1545 int insn_code, i;
1533 bool any_change = false; 1546 bool any_change = false;
1534 rtx set, new_rtx, x; 1547 rtx set, new_rtx, x;
1535 rtx_insn *seq; 1548 rtx_insn *seq;
1536 1549
1565 /* Handle a straight copy from a virtual register by generating a 1578 /* Handle a straight copy from a virtual register by generating a
1566 new add insn. The difference between this and falling through 1579 new add insn. The difference between this and falling through
1567 to the generic case is avoiding a new pseudo and eliminating a 1580 to the generic case is avoiding a new pseudo and eliminating a
1568 move insn in the initial rtl stream. */ 1581 move insn in the initial rtl stream. */
1569 new_rtx = instantiate_new_reg (SET_SRC (set), &offset); 1582 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1570 if (new_rtx && offset != 0 1583 if (new_rtx
1584 && maybe_ne (offset, 0)
1571 && REG_P (SET_DEST (set)) 1585 && REG_P (SET_DEST (set))
1572 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) 1586 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1573 { 1587 {
1574 start_sequence (); 1588 start_sequence ();
1575 1589
1591 extract_insn (insn); 1605 extract_insn (insn);
1592 insn_code = INSN_CODE (insn); 1606 insn_code = INSN_CODE (insn);
1593 1607
1594 /* Handle a plus involving a virtual register by determining if the 1608 /* Handle a plus involving a virtual register by determining if the
1595 operands remain valid if they're modified in place. */ 1609 operands remain valid if they're modified in place. */
1610 poly_int64 delta;
1596 if (GET_CODE (SET_SRC (set)) == PLUS 1611 if (GET_CODE (SET_SRC (set)) == PLUS
1597 && recog_data.n_operands >= 3 1612 && recog_data.n_operands >= 3
1598 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0) 1613 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1599 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1) 1614 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1600 && CONST_INT_P (recog_data.operand[2]) 1615 && poly_int_rtx_p (recog_data.operand[2], &delta)
1601 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset))) 1616 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1602 { 1617 {
1603 offset += INTVAL (recog_data.operand[2]); 1618 offset += delta;
1604 1619
1605 /* If the sum is zero, then replace with a plain move. */ 1620 /* If the sum is zero, then replace with a plain move. */
1606 if (offset == 0 1621 if (known_eq (offset, 0)
1607 && REG_P (SET_DEST (set)) 1622 && REG_P (SET_DEST (set))
1608 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) 1623 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1609 { 1624 {
1610 start_sequence (); 1625 start_sequence ();
1611 emit_move_insn (SET_DEST (set), new_rtx); 1626 emit_move_insn (SET_DEST (set), new_rtx);
1679 1694
1680 case REG: 1695 case REG:
1681 new_rtx = instantiate_new_reg (x, &offset); 1696 new_rtx = instantiate_new_reg (x, &offset);
1682 if (new_rtx == NULL) 1697 if (new_rtx == NULL)
1683 continue; 1698 continue;
1684 if (offset == 0) 1699 if (known_eq (offset, 0))
1685 x = new_rtx; 1700 x = new_rtx;
1686 else 1701 else
1687 { 1702 {
1688 start_sequence (); 1703 start_sequence ();
1689 1704
1704 1719
1705 case SUBREG: 1720 case SUBREG:
1706 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset); 1721 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1707 if (new_rtx == NULL) 1722 if (new_rtx == NULL)
1708 continue; 1723 continue;
1709 if (offset != 0) 1724 if (maybe_ne (offset, 0))
1710 { 1725 {
1711 start_sequence (); 1726 start_sequence ();
1712 new_rtx = expand_simple_binop 1727 new_rtx = expand_simple_binop
1713 (GET_MODE (new_rtx), PLUS, new_rtx, 1728 (GET_MODE (new_rtx), PLUS, new_rtx,
1714 gen_int_mode (offset, GET_MODE (new_rtx)), 1729 gen_int_mode (offset, GET_MODE (new_rtx)),
1949 { 1964 {
1950 /* These patterns in the instruction stream can never be recognized. 1965 /* These patterns in the instruction stream can never be recognized.
1951 Fortunately, they shouldn't contain virtual registers either. */ 1966 Fortunately, they shouldn't contain virtual registers either. */
1952 if (GET_CODE (PATTERN (insn)) == USE 1967 if (GET_CODE (PATTERN (insn)) == USE
1953 || GET_CODE (PATTERN (insn)) == CLOBBER 1968 || GET_CODE (PATTERN (insn)) == CLOBBER
1954 || GET_CODE (PATTERN (insn)) == ASM_INPUT) 1969 || GET_CODE (PATTERN (insn)) == ASM_INPUT
1970 || DEBUG_MARKER_INSN_P (insn))
1955 continue; 1971 continue;
1956 else if (DEBUG_INSN_P (insn)) 1972 else if (DEBUG_BIND_INSN_P (insn))
1957 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn)); 1973 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1958 else 1974 else
1959 instantiate_virtual_regs_in_insn (insn); 1975 instantiate_virtual_regs_in_insn (insn);
1960 1976
1961 if (insn->deleted ()) 1977 if (insn->deleted ())
1962 continue; 1978 continue;
2081 2097
2082 /* Types that are TREE_ADDRESSABLE must be constructed in memory, 2098 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2083 and thus can't be returned in registers. */ 2099 and thus can't be returned in registers. */
2084 if (TREE_ADDRESSABLE (type)) 2100 if (TREE_ADDRESSABLE (type))
2085 return 1; 2101 return 1;
2102
2103 if (TYPE_EMPTY_P (type))
2104 return 0;
2086 2105
2087 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type)) 2106 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2088 return 1; 2107 return 1;
2089 2108
2090 if (targetm.calls.return_in_memory (type, fntype)) 2109 if (targetm.calls.return_in_memory (type, fntype))
2185 return true; 2204 return true;
2186 /* We don't set DECL_REGISTER for the function_result_decl. */ 2205 /* We don't set DECL_REGISTER for the function_result_decl. */
2187 return false; 2206 return false;
2188 } 2207 }
2189 2208
2190 /* Decl is implicitly addressible by bound stores and loads
2191 if it is an aggregate holding bounds. */
2192 if (chkp_function_instrumented_p (current_function_decl)
2193 && TREE_TYPE (decl)
2194 && !BOUNDED_P (decl)
2195 && chkp_type_has_pointer (TREE_TYPE (decl)))
2196 return false;
2197
2198 /* Only register-like things go in registers. */ 2209 /* Only register-like things go in registers. */
2199 if (DECL_MODE (decl) == BLKmode) 2210 if (DECL_MODE (decl) == BLKmode)
2200 return false; 2211 return false;
2201 2212
2202 /* If -ffloat-store specified, don't put explicit float variables 2213 /* If -ffloat-store specified, don't put explicit float variables
2262 int partial; 2273 int partial;
2263 BOOL_BITFIELD named_arg : 1; 2274 BOOL_BITFIELD named_arg : 1;
2264 BOOL_BITFIELD passed_pointer : 1; 2275 BOOL_BITFIELD passed_pointer : 1;
2265 BOOL_BITFIELD on_stack : 1; 2276 BOOL_BITFIELD on_stack : 1;
2266 BOOL_BITFIELD loaded_in_reg : 1; 2277 BOOL_BITFIELD loaded_in_reg : 1;
2267 };
2268
2269 struct bounds_parm_data
2270 {
2271 assign_parm_data_one parm_data;
2272 tree bounds_parm;
2273 tree ptr_parm;
2274 rtx ptr_entry;
2275 int bound_no;
2276 }; 2278 };
2277 2279
2278 /* A subroutine of assign_parms. Initialize ALL. */ 2280 /* A subroutine of assign_parms. Initialize ALL. */
2279 2281
2280 static void 2282 static void
2387 DECL_CHAIN (decl) = all->orig_fnargs; 2389 DECL_CHAIN (decl) = all->orig_fnargs;
2388 all->orig_fnargs = decl; 2390 all->orig_fnargs = decl;
2389 fnargs.safe_insert (0, decl); 2391 fnargs.safe_insert (0, decl);
2390 2392
2391 all->function_result_decl = decl; 2393 all->function_result_decl = decl;
2392
2393 /* If function is instrumented then bounds of the
2394 passed structure address is the second argument. */
2395 if (chkp_function_instrumented_p (fndecl))
2396 {
2397 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2398 PARM_DECL, get_identifier (".result_bnd"),
2399 pointer_bounds_type_node);
2400 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2401 DECL_ARTIFICIAL (decl) = 1;
2402 DECL_NAMELESS (decl) = 1;
2403 TREE_CONSTANT (decl) = 1;
2404
2405 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2406 DECL_CHAIN (all->orig_fnargs) = decl;
2407 fnargs.safe_insert (1, decl);
2408 }
2409 } 2394 }
2410 2395
2411 /* If the target wants to split complex arguments into scalars, do so. */ 2396 /* If the target wants to split complex arguments into scalars, do so. */
2412 if (targetm.calls.split_complex_arg) 2397 if (targetm.calls.split_complex_arg)
2413 split_complex_args (&fnargs); 2398 split_complex_args (&fnargs);
2526 { 2511 {
2527 data->entry_parm = data->stack_parm = const0_rtx; 2512 data->entry_parm = data->stack_parm = const0_rtx;
2528 return; 2513 return;
2529 } 2514 }
2530 2515
2516 targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2517 data->passed_type);
2518
2531 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far, 2519 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2532 data->promoted_mode, 2520 data->promoted_mode,
2533 data->passed_type, 2521 data->passed_type,
2534 data->named_arg); 2522 data->named_arg);
2535 2523
2544 have been passed in registers, but wasn't only because it is 2532 have been passed in registers, but wasn't only because it is
2545 __builtin_va_alist, we want locate_and_pad_parm to treat it as if 2533 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2546 it came in a register so that REG_PARM_STACK_SPACE isn't skipped. 2534 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2547 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0 2535 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2548 as it was the previous time. */ 2536 as it was the previous time. */
2549 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type); 2537 in_regs = (entry_parm != 0);
2550 #ifdef STACK_PARMS_IN_REG_PARM_AREA 2538 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2551 in_regs = true; 2539 in_regs = true;
2552 #endif 2540 #endif
2553 if (!in_regs && !data->named_arg) 2541 if (!in_regs && !data->named_arg)
2554 { 2542 {
2633 2621
2634 static bool 2622 static bool
2635 assign_parm_is_stack_parm (struct assign_parm_data_all *all, 2623 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2636 struct assign_parm_data_one *data) 2624 struct assign_parm_data_one *data)
2637 { 2625 {
2638 /* Bounds are never passed on the stack to keep compatibility
2639 with not instrumented code. */
2640 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2641 return false;
2642 /* Trivially true if we've no incoming register. */ 2626 /* Trivially true if we've no incoming register. */
2643 else if (data->entry_parm == NULL) 2627 if (data->entry_parm == NULL)
2644 ; 2628 ;
2645 /* Also true if we're partially in registers and partially not, 2629 /* Also true if we're partially in registers and partially not,
2646 since we've arranged to drop the entire argument on the stack. */ 2630 since we've arranged to drop the entire argument on the stack. */
2647 else if (data->partial != 0) 2631 else if (data->partial != 0)
2648 ; 2632 ;
2696 && data->promoted_mode != DECL_MODE (parm)) 2680 && data->promoted_mode != DECL_MODE (parm))
2697 { 2681 {
2698 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode)); 2682 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2699 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm)) 2683 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2700 { 2684 {
2701 int offset = subreg_lowpart_offset (DECL_MODE (parm), 2685 poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2702 data->promoted_mode); 2686 data->promoted_mode);
2703 if (offset) 2687 if (maybe_ne (offset, 0))
2704 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset); 2688 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2705 } 2689 }
2706 } 2690 }
2707 } 2691 }
2708 2692
2711 2695
2712 /* If we're padding upward, we know that the alignment of the slot 2696 /* If we're padding upward, we know that the alignment of the slot
2713 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're 2697 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2714 intentionally forcing upward padding. Otherwise we have to come 2698 intentionally forcing upward padding. Otherwise we have to come
2715 up with a guess at the alignment based on OFFSET_RTX. */ 2699 up with a guess at the alignment based on OFFSET_RTX. */
2700 poly_int64 offset;
2716 if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm) 2701 if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm)
2717 align = boundary; 2702 align = boundary;
2718 else if (CONST_INT_P (offset_rtx)) 2703 else if (poly_int_rtx_p (offset_rtx, &offset))
2719 { 2704 {
2720 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary; 2705 align = least_bit_hwi (boundary);
2721 align = least_bit_hwi (align); 2706 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2707 if (offset_align != 0)
2708 align = MIN (align, offset_align);
2722 } 2709 }
2723 set_mem_align (stack_parm, align); 2710 set_mem_align (stack_parm, align);
2724 2711
2725 if (data->entry_parm) 2712 if (data->entry_parm)
2726 set_reg_attrs_for_parm (data->entry_parm, stack_parm); 2713 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2863 2850
2864 #ifdef BLOCK_REG_PADDING 2851 #ifdef BLOCK_REG_PADDING
2865 /* Only assign_parm_setup_block knows how to deal with register arguments 2852 /* Only assign_parm_setup_block knows how to deal with register arguments
2866 that are padded at the least significant end. */ 2853 that are padded at the least significant end. */
2867 if (REG_P (data->entry_parm) 2854 if (REG_P (data->entry_parm)
2868 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD 2855 && known_lt (GET_MODE_SIZE (data->promoted_mode), UNITS_PER_WORD)
2869 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1) 2856 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2870 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) 2857 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2871 return true; 2858 return true;
2872 #endif 2859 #endif
2873 2860
2926 if (stack_parm == 0) 2913 if (stack_parm == 0)
2927 { 2914 {
2928 SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD)); 2915 SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD));
2929 stack_parm = assign_stack_local (BLKmode, size_stored, 2916 stack_parm = assign_stack_local (BLKmode, size_stored,
2930 DECL_ALIGN (parm)); 2917 DECL_ALIGN (parm));
2931 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size) 2918 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2932 PUT_MODE (stack_parm, GET_MODE (entry_parm)); 2919 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2933 set_mem_attributes (stack_parm, parm, 1); 2920 set_mem_attributes (stack_parm, parm, 1);
2934 } 2921 }
2935 2922
2936 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle 2923 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
3422 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg, 3409 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3423 TYPE_UNSIGNED (TREE_TYPE (parm))); 3410 TYPE_UNSIGNED (TREE_TYPE (parm)));
3424 3411
3425 if (data->stack_parm) 3412 if (data->stack_parm)
3426 { 3413 {
3427 int offset = subreg_lowpart_offset (data->nominal_mode, 3414 poly_int64 offset
3428 GET_MODE (data->stack_parm)); 3415 = subreg_lowpart_offset (data->nominal_mode,
3416 GET_MODE (data->stack_parm));
3429 /* ??? This may need a big-endian conversion on sparc64. */ 3417 /* ??? This may need a big-endian conversion on sparc64. */
3430 data->stack_parm 3418 data->stack_parm
3431 = adjust_address (data->stack_parm, data->nominal_mode, 0); 3419 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3432 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm)) 3420 if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3433 set_mem_offset (data->stack_parm, 3421 set_mem_offset (data->stack_parm,
3434 MEM_OFFSET (data->stack_parm) + offset); 3422 MEM_OFFSET (data->stack_parm) + offset);
3435 } 3423 }
3436 } 3424 }
3437 3425
3549 i++; 3537 i++;
3550 } 3538 }
3551 } 3539 }
3552 } 3540 }
3553 3541
3554 /* Load bounds of PARM from bounds table. */
3555 static void
3556 assign_parm_load_bounds (struct assign_parm_data_one *data,
3557 tree parm,
3558 rtx entry,
3559 unsigned bound_no)
3560 {
3561 bitmap_iterator bi;
3562 unsigned i, offs = 0;
3563 int bnd_no = -1;
3564 rtx slot = NULL, ptr = NULL;
3565
3566 if (parm)
3567 {
3568 bitmap slots;
3569 bitmap_obstack_initialize (NULL);
3570 slots = BITMAP_ALLOC (NULL);
3571 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3572 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3573 {
3574 if (bound_no)
3575 bound_no--;
3576 else
3577 {
3578 bnd_no = i;
3579 break;
3580 }
3581 }
3582 BITMAP_FREE (slots);
3583 bitmap_obstack_release (NULL);
3584 }
3585
3586 /* We may have bounds not associated with any pointer. */
3587 if (bnd_no != -1)
3588 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3589
3590 /* Find associated pointer. */
3591 if (bnd_no == -1)
3592 {
3593 /* If bounds are not associated with any bounds,
3594 then it is passed in a register or special slot. */
3595 gcc_assert (data->entry_parm);
3596 ptr = const0_rtx;
3597 }
3598 else if (MEM_P (entry))
3599 slot = adjust_address (entry, Pmode, offs);
3600 else if (REG_P (entry))
3601 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3602 else if (GET_CODE (entry) == PARALLEL)
3603 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3604 else
3605 gcc_unreachable ();
3606 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3607 data->entry_parm);
3608 }
3609
3610 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3611
3612 static void
3613 assign_bounds (vec<bounds_parm_data> &bndargs,
3614 struct assign_parm_data_all &all,
3615 bool assign_regs, bool assign_special,
3616 bool assign_bt)
3617 {
3618 unsigned i, pass;
3619 bounds_parm_data *pbdata;
3620
3621 if (!bndargs.exists ())
3622 return;
3623
3624 /* We make few passes to store input bounds. Firstly handle bounds
3625 passed in registers. After that we load bounds passed in special
3626 slots. Finally we load bounds from Bounds Table. */
3627 for (pass = 0; pass < 3; pass++)
3628 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3629 {
3630 /* Pass 0 => regs only. */
3631 if (pass == 0
3632 && (!assign_regs
3633 ||(!pbdata->parm_data.entry_parm
3634 || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
3635 continue;
3636 /* Pass 1 => slots only. */
3637 else if (pass == 1
3638 && (!assign_special
3639 || (!pbdata->parm_data.entry_parm
3640 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
3641 continue;
3642 /* Pass 2 => BT only. */
3643 else if (pass == 2
3644 && (!assign_bt
3645 || pbdata->parm_data.entry_parm))
3646 continue;
3647
3648 if (!pbdata->parm_data.entry_parm
3649 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3650 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3651 pbdata->ptr_entry, pbdata->bound_no);
3652
3653 set_decl_incoming_rtl (pbdata->bounds_parm,
3654 pbdata->parm_data.entry_parm, false);
3655
3656 if (assign_parm_setup_block_p (&pbdata->parm_data))
3657 assign_parm_setup_block (&all, pbdata->bounds_parm,
3658 &pbdata->parm_data);
3659 else if (pbdata->parm_data.passed_pointer
3660 || use_register_for_decl (pbdata->bounds_parm))
3661 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3662 &pbdata->parm_data);
3663 else
3664 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3665 &pbdata->parm_data);
3666 }
3667 }
3668
3669 /* Assign RTL expressions to the function's parameters. This may involve 3542 /* Assign RTL expressions to the function's parameters. This may involve
3670 copying them into registers and using those registers as the DECL_RTL. */ 3543 copying them into registers and using those registers as the DECL_RTL. */
3671 3544
3672 static void 3545 static void
3673 assign_parms (tree fndecl) 3546 assign_parms (tree fndecl)
3674 { 3547 {
3675 struct assign_parm_data_all all; 3548 struct assign_parm_data_all all;
3676 tree parm; 3549 tree parm;
3677 vec<tree> fnargs; 3550 vec<tree> fnargs;
3678 unsigned i, bound_no = 0; 3551 unsigned i;
3679 tree last_arg = NULL;
3680 rtx last_arg_entry = NULL;
3681 vec<bounds_parm_data> bndargs = vNULL;
3682 bounds_parm_data bdata;
3683 3552
3684 crtl->args.internal_arg_pointer 3553 crtl->args.internal_arg_pointer
3685 = targetm.calls.internal_arg_pointer (); 3554 = targetm.calls.internal_arg_pointer ();
3686 3555
3687 assign_parms_initialize_all (&all); 3556 assign_parms_initialize_all (&all);
3728 if (assign_parm_is_stack_parm (&all, &data)) 3597 if (assign_parm_is_stack_parm (&all, &data))
3729 { 3598 {
3730 assign_parm_find_stack_rtl (parm, &data); 3599 assign_parm_find_stack_rtl (parm, &data);
3731 assign_parm_adjust_entry_rtl (&data); 3600 assign_parm_adjust_entry_rtl (&data);
3732 } 3601 }
3733 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3734 {
3735 /* Remember where last non bounds arg was passed in case
3736 we have to load associated bounds for it from Bounds
3737 Table. */
3738 last_arg = parm;
3739 last_arg_entry = data.entry_parm;
3740 bound_no = 0;
3741 }
3742 /* Record permanently how this parm was passed. */ 3602 /* Record permanently how this parm was passed. */
3743 if (data.passed_pointer) 3603 if (data.passed_pointer)
3744 { 3604 {
3745 rtx incoming_rtl 3605 rtx incoming_rtl
3746 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)), 3606 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3750 else 3610 else
3751 set_decl_incoming_rtl (parm, data.entry_parm, false); 3611 set_decl_incoming_rtl (parm, data.entry_parm, false);
3752 3612
3753 assign_parm_adjust_stack_rtl (&data); 3613 assign_parm_adjust_stack_rtl (&data);
3754 3614
3755 /* Bounds should be loaded in the particular order to 3615 if (assign_parm_setup_block_p (&data))
3756 have registers allocated correctly. Collect info about 3616 assign_parm_setup_block (&all, parm, &data);
3757 input bounds and load them later. */ 3617 else if (data.passed_pointer || use_register_for_decl (parm))
3758 if (POINTER_BOUNDS_TYPE_P (data.passed_type)) 3618 assign_parm_setup_reg (&all, parm, &data);
3759 {
3760 /* Expect bounds in instrumented functions only. */
3761 gcc_assert (chkp_function_instrumented_p (fndecl));
3762
3763 bdata.parm_data = data;
3764 bdata.bounds_parm = parm;
3765 bdata.ptr_parm = last_arg;
3766 bdata.ptr_entry = last_arg_entry;
3767 bdata.bound_no = bound_no;
3768 bndargs.safe_push (bdata);
3769 }
3770 else 3619 else
3771 { 3620 assign_parm_setup_stack (&all, parm, &data);
3772 if (assign_parm_setup_block_p (&data))
3773 assign_parm_setup_block (&all, parm, &data);
3774 else if (data.passed_pointer || use_register_for_decl (parm))
3775 assign_parm_setup_reg (&all, parm, &data);
3776 else
3777 assign_parm_setup_stack (&all, parm, &data);
3778 }
3779 3621
3780 if (cfun->stdarg && !DECL_CHAIN (parm)) 3622 if (cfun->stdarg && !DECL_CHAIN (parm))
3781 { 3623 assign_parms_setup_varargs (&all, &data, false);
3782 int pretend_bytes = 0;
3783
3784 assign_parms_setup_varargs (&all, &data, false);
3785
3786 if (chkp_function_instrumented_p (fndecl))
3787 {
3788 /* We expect this is the last parm. Otherwise it is wrong
3789 to assign bounds right now. */
3790 gcc_assert (i == (fnargs.length () - 1));
3791 assign_bounds (bndargs, all, true, false, false);
3792 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3793 data.promoted_mode,
3794 data.passed_type,
3795 &pretend_bytes,
3796 false);
3797 assign_bounds (bndargs, all, false, true, true);
3798 bndargs.release ();
3799 }
3800 }
3801 3624
3802 /* Update info on where next arg arrives in registers. */ 3625 /* Update info on where next arg arrives in registers. */
3803 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, 3626 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3804 data.passed_type, data.named_arg); 3627 data.passed_type, data.named_arg);
3805 3628 }
3806 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3807 bound_no++;
3808 }
3809
3810 assign_bounds (bndargs, all, true, true, true);
3811 bndargs.release ();
3812 3629
3813 if (targetm.calls.split_complex_arg) 3630 if (targetm.calls.split_complex_arg)
3814 assign_parms_unsplit_complex (&all, fnargs); 3631 assign_parms_unsplit_complex (&all, fnargs);
3815 3632
3816 fnargs.release (); 3633 fnargs.release ();
3877 crtl->args.size = all.stack_args_size.constant; 3694 crtl->args.size = all.stack_args_size.constant;
3878 3695
3879 /* Adjust function incoming argument size for alignment and 3696 /* Adjust function incoming argument size for alignment and
3880 minimum length. */ 3697 minimum length. */
3881 3698
3882 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space); 3699 crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
3883 crtl->args.size = CEIL_ROUND (crtl->args.size, 3700 crtl->args.size = aligned_upper_bound (crtl->args.size,
3884 PARM_BOUNDARY / BITS_PER_UNIT); 3701 PARM_BOUNDARY / BITS_PER_UNIT);
3885 3702
3886 if (ARGS_GROW_DOWNWARD) 3703 if (ARGS_GROW_DOWNWARD)
3887 { 3704 {
3888 crtl->args.arg_offset_rtx 3705 crtl->args.arg_offset_rtx
3889 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant) 3706 = (all.stack_args_size.var == 0
3707 ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3890 : expand_expr (size_diffop (all.stack_args_size.var, 3708 : expand_expr (size_diffop (all.stack_args_size.var,
3891 size_int (-all.stack_args_size.constant)), 3709 size_int (-all.stack_args_size.constant)),
3892 NULL_RTX, VOIDmode, EXPAND_NORMAL)); 3710 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3893 } 3711 }
3894 else 3712 else
3929 { 3747 {
3930 rtx real_decl_rtl; 3748 rtx real_decl_rtl;
3931 3749
3932 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result), 3750 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3933 fndecl, true); 3751 fndecl, true);
3934 if (chkp_function_instrumented_p (fndecl))
3935 crtl->return_bnd
3936 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3937 fndecl, true);
3938 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1; 3752 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3939 /* The delay slot scheduler assumes that crtl->return_rtx 3753 /* The delay slot scheduler assumes that crtl->return_rtx
3940 holds the hard register containing the return value, not a 3754 holds the hard register containing the return value, not a
3941 temporary pseudo. */ 3755 temporary pseudo. */
3942 crtl->return_rtx = real_decl_rtl; 3756 crtl->return_rtx = real_decl_rtl;
3972 evaluating SAVE_EXPRs of variable sized parameters and generating code 3786 evaluating SAVE_EXPRs of variable sized parameters and generating code
3973 to implement callee-copies reference parameters. Returns a sequence of 3787 to implement callee-copies reference parameters. Returns a sequence of
3974 statements to add to the beginning of the function. */ 3788 statements to add to the beginning of the function. */
3975 3789
3976 gimple_seq 3790 gimple_seq
3977 gimplify_parameters (void) 3791 gimplify_parameters (gimple_seq *cleanup)
3978 { 3792 {
3979 struct assign_parm_data_all all; 3793 struct assign_parm_data_all all;
3980 tree parm; 3794 tree parm;
3981 gimple_seq stmts = NULL; 3795 gimple_seq stmts = NULL;
3982 vec<tree> fnargs; 3796 vec<tree> fnargs;
4037 if (TREE_ADDRESSABLE (parm)) 3851 if (TREE_ADDRESSABLE (parm))
4038 TREE_ADDRESSABLE (local) = 1; 3852 TREE_ADDRESSABLE (local) = 1;
4039 else if (TREE_CODE (type) == COMPLEX_TYPE 3853 else if (TREE_CODE (type) == COMPLEX_TYPE
4040 || TREE_CODE (type) == VECTOR_TYPE) 3854 || TREE_CODE (type) == VECTOR_TYPE)
4041 DECL_GIMPLE_REG_P (local) = 1; 3855 DECL_GIMPLE_REG_P (local) = 1;
3856
3857 if (!is_gimple_reg (local)
3858 && flag_stack_reuse != SR_NONE)
3859 {
3860 tree clobber = build_constructor (type, NULL);
3861 gimple *clobber_stmt;
3862 TREE_THIS_VOLATILE (clobber) = 1;
3863 clobber_stmt = gimple_build_assign (local, clobber);
3864 gimple_seq_add_stmt (cleanup, clobber_stmt);
3865 }
4042 } 3866 }
4043 else 3867 else
4044 { 3868 {
4045 tree ptr_type, addr; 3869 tree ptr_type, addr;
4046 3870
4124 area reserved for registers, skip that area. */ 3948 area reserved for registers, skip that area. */
4125 if (! in_regs) 3949 if (! in_regs)
4126 { 3950 {
4127 if (reg_parm_stack_space > 0) 3951 if (reg_parm_stack_space > 0)
4128 { 3952 {
4129 if (initial_offset_ptr->var) 3953 if (initial_offset_ptr->var
3954 || !ordered_p (initial_offset_ptr->constant,
3955 reg_parm_stack_space))
4130 { 3956 {
4131 initial_offset_ptr->var 3957 initial_offset_ptr->var
4132 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), 3958 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4133 ssize_int (reg_parm_stack_space)); 3959 ssize_int (reg_parm_stack_space));
4134 initial_offset_ptr->constant = 0; 3960 initial_offset_ptr->constant = 0;
4135 } 3961 }
4136 else if (initial_offset_ptr->constant < reg_parm_stack_space) 3962 else
4137 initial_offset_ptr->constant = reg_parm_stack_space; 3963 initial_offset_ptr->constant
3964 = ordered_max (initial_offset_ptr->constant,
3965 reg_parm_stack_space);
4138 } 3966 }
4139 } 3967 }
4140 3968
4141 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0); 3969 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4142 3970
4143 sizetree 3971 sizetree = (type
4144 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode)); 3972 ? arg_size_in_bytes (type)
3973 : size_int (GET_MODE_SIZE (passed_mode)));
4145 where_pad = targetm.calls.function_arg_padding (passed_mode, type); 3974 where_pad = targetm.calls.function_arg_padding (passed_mode, type);
4146 boundary = targetm.calls.function_arg_boundary (passed_mode, type); 3975 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4147 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode, 3976 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4148 type); 3977 type);
4149 locate->where_pad = where_pad; 3978 locate->where_pad = where_pad;
4258 static void 4087 static void
4259 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, 4088 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4260 struct args_size *alignment_pad) 4089 struct args_size *alignment_pad)
4261 { 4090 {
4262 tree save_var = NULL_TREE; 4091 tree save_var = NULL_TREE;
4263 HOST_WIDE_INT save_constant = 0; 4092 poly_int64 save_constant = 0;
4264 int boundary_in_bytes = boundary / BITS_PER_UNIT; 4093 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4265 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET; 4094 poly_int64 sp_offset = STACK_POINTER_OFFSET;
4266 4095
4267 #ifdef SPARC_STACK_BOUNDARY_HACK 4096 #ifdef SPARC_STACK_BOUNDARY_HACK
4268 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than 4097 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4269 the real alignment of %sp. However, when it does this, the 4098 the real alignment of %sp. However, when it does this, the
4270 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ 4099 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4281 alignment_pad->var = NULL_TREE; 4110 alignment_pad->var = NULL_TREE;
4282 alignment_pad->constant = 0; 4111 alignment_pad->constant = 0;
4283 4112
4284 if (boundary > BITS_PER_UNIT) 4113 if (boundary > BITS_PER_UNIT)
4285 { 4114 {
4286 if (offset_ptr->var) 4115 int misalign;
4116 if (offset_ptr->var
4117 || !known_misalignment (offset_ptr->constant + sp_offset,
4118 boundary_in_bytes, &misalign))
4287 { 4119 {
4288 tree sp_offset_tree = ssize_int (sp_offset); 4120 tree sp_offset_tree = ssize_int (sp_offset);
4289 tree offset = size_binop (PLUS_EXPR, 4121 tree offset = size_binop (PLUS_EXPR,
4290 ARGS_SIZE_TREE (*offset_ptr), 4122 ARGS_SIZE_TREE (*offset_ptr),
4291 sp_offset_tree); 4123 sp_offset_tree);
4302 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, 4134 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4303 save_var); 4135 save_var);
4304 } 4136 }
4305 else 4137 else
4306 { 4138 {
4307 offset_ptr->constant = -sp_offset + 4139 if (ARGS_GROW_DOWNWARD)
4308 (ARGS_GROW_DOWNWARD 4140 offset_ptr->constant -= misalign;
4309 ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes) 4141 else
4310 : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)); 4142 offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4311 4143
4312 if (boundary > PARM_BOUNDARY) 4144 if (boundary > PARM_BOUNDARY)
4313 alignment_pad->constant = offset_ptr->constant - save_constant; 4145 alignment_pad->constant = offset_ptr->constant - save_constant;
4314 } 4146 }
4315 } 4147 }
4316 } 4148 }
4317 4149
4318 static void 4150 static void
4319 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree) 4151 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4320 { 4152 {
4321 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT; 4153 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4322 if (passed_mode != BLKmode) 4154 int misalign;
4323 offset_ptr->constant += -GET_MODE_SIZE (passed_mode) & (align - 1); 4155 if (passed_mode != BLKmode
4156 && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
4157 offset_ptr->constant += -misalign & (align - 1);
4324 else 4158 else
4325 { 4159 {
4326 if (TREE_CODE (sizetree) != INTEGER_CST 4160 if (TREE_CODE (sizetree) != INTEGER_CST
4327 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0) 4161 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4328 { 4162 {
4707 { 4541 {
4708 int i; 4542 int i;
4709 int n_blocks; 4543 int n_blocks;
4710 tree *block_vector; 4544 tree *block_vector;
4711 4545
4712 /* For SDB and XCOFF debugging output, we start numbering the blocks 4546 /* For XCOFF debugging output, we start numbering the blocks
4713 from 1 within each function, rather than keeping a running 4547 from 1 within each function, rather than keeping a running
4714 count. */ 4548 count. */
4715 #if SDB_DEBUGGING_INFO || defined (XCOFF_DEBUGGING_INFO) 4549 #if defined (XCOFF_DEBUGGING_INFO)
4716 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG) 4550 if (write_symbols == XCOFF_DEBUG)
4717 next_block_index = 1; 4551 next_block_index = 1;
4718 #endif 4552 #endif
4719 4553
4720 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks); 4554 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4721 4555
4779 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts)); 4613 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4780 } 4614 }
4781 4615
4782 targetm.set_current_function (fndecl); 4616 targetm.set_current_function (fndecl);
4783 this_fn_optabs = this_target_optabs; 4617 this_fn_optabs = this_target_optabs;
4618
4619 /* Initialize global alignment variables after op. */
4620 parse_alignment_opts ();
4784 4621
4785 if (opts != optimization_default_node) 4622 if (opts != optimization_default_node)
4786 { 4623 {
4787 init_tree_optimization_optabs (opts); 4624 init_tree_optimization_optabs (opts);
4788 if (TREE_OPTIMIZATION_OPTABS (opts)) 4625 if (TREE_OPTIMIZATION_OPTABS (opts))
4929 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions; 4766 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4930 4767
4931 if (!profile_flag && !flag_instrument_function_entry_exit) 4768 if (!profile_flag && !flag_instrument_function_entry_exit)
4932 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1; 4769 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4933 } 4770 }
4771
4772 /* Don't enable begin stmt markers if var-tracking at assignments is
4773 disabled. The markers make little sense without the variable
4774 binding annotations among them. */
4775 cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4776 && MAY_HAVE_DEBUG_MARKER_STMTS;
4934 } 4777 }
4935 4778
4936 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL 4779 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4937 instead of just setting it. */ 4780 instead of just setting it. */
4938 4781
5188 } 5031 }
5189 5032
5190 /* Set DECL_REGISTER flag so that expand_function_end will copy the 5033 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5191 result to the real return register(s). */ 5034 result to the real return register(s). */
5192 DECL_REGISTER (res) = 1; 5035 DECL_REGISTER (res) = 1;
5193
5194 if (chkp_function_instrumented_p (current_function_decl))
5195 {
5196 tree return_type = TREE_TYPE (res);
5197 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5198 subr, 1);
5199 SET_DECL_BOUNDS_RTL (res, bounds);
5200 }
5201 } 5036 }
5202 5037
5203 /* Initialize rtx for parameters and local variables. 5038 /* Initialize rtx for parameters and local variables.
5204 In some cases this requires emitting insns. */ 5039 In some cases this requires emitting insns. */
5205 assign_parms (subr); 5040 assign_parms (subr);
5246 DECL_HAS_VALUE_EXPR_P (parm) = 1; 5081 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5247 } 5082 }
5248 } 5083 }
5249 5084
5250 /* The following was moved from init_function_start. 5085 /* The following was moved from init_function_start.
5251 The move is supposed to make sdb output more accurate. */ 5086 The move was supposed to make sdb output more accurate. */
5252 /* Indicate the beginning of the function body, 5087 /* Indicate the beginning of the function body,
5253 as opposed to parm setup. */ 5088 as opposed to parm setup. */
5254 emit_note (NOTE_INSN_FUNCTION_BEG); 5089 emit_note (NOTE_INSN_FUNCTION_BEG);
5255 5090
5256 gcc_assert (NOTE_P (get_last_insn ())); 5091 gcc_assert (NOTE_P (get_last_insn ()));
5343 the current function. */ 5178 the current function. */
5344 5179
5345 void 5180 void
5346 diddle_return_value (void (*doit) (rtx, void *), void *arg) 5181 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5347 { 5182 {
5348 diddle_return_value_1 (doit, arg, crtl->return_bnd);
5349 diddle_return_value_1 (doit, arg, crtl->return_rtx); 5183 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5350 } 5184 }
5351 5185
5352 static void 5186 static void
5353 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) 5187 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5437 5271
5438 clear_pending_stack_adjust (); 5272 clear_pending_stack_adjust ();
5439 do_pending_stack_adjust (); 5273 do_pending_stack_adjust ();
5440 5274
5441 /* Output a linenumber for the end of the function. 5275 /* Output a linenumber for the end of the function.
5442 SDB depends on this. */ 5276 SDB depended on this. */
5443 set_curr_insn_location (input_location); 5277 set_curr_insn_location (input_location);
5444 5278
5445 /* Before the return label (if any), clobber the return 5279 /* Before the return label (if any), clobber the return
5446 registers so that they are not propagated live to the rest of 5280 registers so that they are not propagated live to the rest of
5447 the function. This can only happen with functions that drop 5281 the function. This can only happen with functions that drop
6612 output = SET_DEST (p_sets[match]); 6446 output = SET_DEST (p_sets[match]);
6613 input = RTVEC_ELT (inputs, i); 6447 input = RTVEC_ELT (inputs, i);
6614 /* Only do the transformation for pseudos. */ 6448 /* Only do the transformation for pseudos. */
6615 if (! REG_P (output) 6449 if (! REG_P (output)
6616 || rtx_equal_p (output, input) 6450 || rtx_equal_p (output, input)
6617 || (GET_MODE (input) != VOIDmode 6451 || !(REG_P (input) || SUBREG_P (input)
6618 && GET_MODE (input) != GET_MODE (output))) 6452 || MEM_P (input) || CONSTANT_P (input))
6453 || !general_operand (input, GET_MODE (output)))
6619 continue; 6454 continue;
6620 6455
6621 /* We can't do anything if the output is also used as input, 6456 /* We can't do anything if the output is also used as input,
6622 as we're going to overwrite it. */ 6457 as we're going to overwrite it. */
6623 for (j = 0; j < ninputs; j++) 6458 for (j = 0; j < ninputs; j++)