Mercurial > hg > CbC > CbC_gcc
comparison gcc/function.c @ 132:d34655255c78
update gcc-8.2
author | mir3636 |
---|---|
date | Thu, 25 Oct 2018 10:21:07 +0900 |
parents | 4c56639505ff 84e7813d76e9 |
children | 4e440907fcbf |
comparison
equal
deleted
inserted
replaced
130:e108057fa461 | 132:d34655255c78 |
---|---|
1 /* Expands front end tree to back end RTL for GCC. | 1 /* Expands front end tree to back end RTL for GCC. |
2 Copyright (C) 1987-2017 Free Software Foundation, Inc. | 2 Copyright (C) 1987-2018 Free Software Foundation, Inc. |
3 | 3 |
4 This file is part of GCC. | 4 This file is part of GCC. |
5 | 5 |
6 GCC is free software; you can redistribute it and/or modify it under | 6 GCC is free software; you can redistribute it and/or modify it under |
7 the terms of the GNU General Public License as published by the Free | 7 the terms of the GNU General Public License as published by the Free |
71 #include "cfgcleanup.h" | 71 #include "cfgcleanup.h" |
72 #include "cfgexpand.h" | 72 #include "cfgexpand.h" |
73 #include "shrink-wrap.h" | 73 #include "shrink-wrap.h" |
74 #include "toplev.h" | 74 #include "toplev.h" |
75 #include "rtl-iter.h" | 75 #include "rtl-iter.h" |
76 #include "tree-chkp.h" | |
77 #include "rtl-chkp.h" | |
78 #include "tree-dfa.h" | 76 #include "tree-dfa.h" |
79 #include "tree-ssa.h" | 77 #include "tree-ssa.h" |
80 #include "stringpool.h" | 78 #include "stringpool.h" |
81 #include "attribs.h" | 79 #include "attribs.h" |
80 #include "gimple.h" | |
81 #include "options.h" | |
82 | 82 |
83 | 83 |
84 | 84 |
85 /* So we can assign to cfun in this file. */ | 85 /* So we can assign to cfun in this file. */ |
86 #undef cfun | 86 #undef cfun |
218 | 218 |
219 /* Return size needed for stack frame based on slots so far allocated. | 219 /* Return size needed for stack frame based on slots so far allocated. |
220 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; | 220 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; |
221 the caller may have to do that. */ | 221 the caller may have to do that. */ |
222 | 222 |
223 HOST_WIDE_INT | 223 poly_int64 |
224 get_frame_size (void) | 224 get_frame_size (void) |
225 { | 225 { |
226 if (FRAME_GROWS_DOWNWARD) | 226 if (FRAME_GROWS_DOWNWARD) |
227 return -frame_offset; | 227 return -frame_offset; |
228 else | 228 else |
232 /* Issue an error message and return TRUE if frame OFFSET overflows in | 232 /* Issue an error message and return TRUE if frame OFFSET overflows in |
233 the signed target pointer arithmetics for function FUNC. Otherwise | 233 the signed target pointer arithmetics for function FUNC. Otherwise |
234 return FALSE. */ | 234 return FALSE. */ |
235 | 235 |
236 bool | 236 bool |
237 frame_offset_overflow (HOST_WIDE_INT offset, tree func) | 237 frame_offset_overflow (poly_int64 offset, tree func) |
238 { | 238 { |
239 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; | 239 poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset; |
240 | 240 unsigned HOST_WIDE_INT limit |
241 if (size > (HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1)) | 241 = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1)) |
242 /* Leave room for the fixed part of the frame. */ | 242 /* Leave room for the fixed part of the frame. */ |
243 - 64 * UNITS_PER_WORD) | 243 - 64 * UNITS_PER_WORD); |
244 { | 244 |
245 error_at (DECL_SOURCE_LOCATION (func), | 245 if (!coeffs_in_range_p (size, 0U, limit)) |
246 "total size of local objects too large"); | 246 { |
247 return TRUE; | 247 unsigned HOST_WIDE_INT hwisize; |
248 } | 248 if (size.is_constant (&hwisize)) |
249 | 249 error_at (DECL_SOURCE_LOCATION (func), |
250 return FALSE; | 250 "total size of local objects %wu exceeds maximum %wu", |
251 hwisize, limit); | |
252 else | |
253 error_at (DECL_SOURCE_LOCATION (func), | |
254 "total size of local objects exceeds maximum %wu", | |
255 limit); | |
256 return true; | |
257 } | |
258 | |
259 return false; | |
251 } | 260 } |
252 | 261 |
253 /* Return the minimum spill slot alignment for a register of mode MODE. */ | 262 /* Return the minimum spill slot alignment for a register of mode MODE. */ |
254 | 263 |
255 unsigned int | 264 unsigned int |
284 offset to be used for the stack slot in *POFFSET and return true; | 293 offset to be used for the stack slot in *POFFSET and return true; |
285 return false otherwise. This function will extend the frame size when | 294 return false otherwise. This function will extend the frame size when |
286 given a start/length pair that lies at the end of the frame. */ | 295 given a start/length pair that lies at the end of the frame. */ |
287 | 296 |
288 static bool | 297 static bool |
289 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length, | 298 try_fit_stack_local (poly_int64 start, poly_int64 length, |
290 HOST_WIDE_INT size, unsigned int alignment, | 299 poly_int64 size, unsigned int alignment, |
291 HOST_WIDE_INT *poffset) | 300 poly_int64_pod *poffset) |
292 { | 301 { |
293 HOST_WIDE_INT this_frame_offset; | 302 poly_int64 this_frame_offset; |
294 int frame_off, frame_alignment, frame_phase; | 303 int frame_off, frame_alignment, frame_phase; |
295 | 304 |
296 /* Calculate how many bytes the start of local variables is off from | 305 /* Calculate how many bytes the start of local variables is off from |
297 stack alignment. */ | 306 stack alignment. */ |
298 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | 307 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; |
299 frame_off = targetm.starting_frame_offset () % frame_alignment; | 308 frame_off = targetm.starting_frame_offset () % frame_alignment; |
300 frame_phase = frame_off ? frame_alignment - frame_off : 0; | 309 frame_phase = frame_off ? frame_alignment - frame_off : 0; |
301 | 310 |
302 /* Round the frame offset to the specified alignment. */ | 311 /* Round the frame offset to the specified alignment. */ |
303 | 312 |
304 /* We must be careful here, since FRAME_OFFSET might be negative and | |
305 division with a negative dividend isn't as well defined as we might | |
306 like. So we instead assume that ALIGNMENT is a power of two and | |
307 use logical operations which are unambiguous. */ | |
308 if (FRAME_GROWS_DOWNWARD) | 313 if (FRAME_GROWS_DOWNWARD) |
309 this_frame_offset | 314 this_frame_offset |
310 = (FLOOR_ROUND (start + length - size - frame_phase, | 315 = (aligned_lower_bound (start + length - size - frame_phase, alignment) |
311 (unsigned HOST_WIDE_INT) alignment) | |
312 + frame_phase); | 316 + frame_phase); |
313 else | 317 else |
314 this_frame_offset | 318 this_frame_offset |
315 = (CEIL_ROUND (start - frame_phase, | 319 = aligned_upper_bound (start - frame_phase, alignment) + frame_phase; |
316 (unsigned HOST_WIDE_INT) alignment) | |
317 + frame_phase); | |
318 | 320 |
319 /* See if it fits. If this space is at the edge of the frame, | 321 /* See if it fits. If this space is at the edge of the frame, |
320 consider extending the frame to make it fit. Our caller relies on | 322 consider extending the frame to make it fit. Our caller relies on |
321 this when allocating a new slot. */ | 323 this when allocating a new slot. */ |
322 if (frame_offset == start && this_frame_offset < frame_offset) | 324 if (maybe_lt (this_frame_offset, start)) |
323 frame_offset = this_frame_offset; | 325 { |
324 else if (this_frame_offset < start) | 326 if (known_eq (frame_offset, start)) |
325 return false; | 327 frame_offset = this_frame_offset; |
326 else if (start + length == frame_offset | 328 else |
327 && this_frame_offset + size > start + length) | 329 return false; |
328 frame_offset = this_frame_offset + size; | 330 } |
329 else if (this_frame_offset + size > start + length) | 331 else if (maybe_gt (this_frame_offset + size, start + length)) |
330 return false; | 332 { |
333 if (known_eq (frame_offset, start + length)) | |
334 frame_offset = this_frame_offset + size; | |
335 else | |
336 return false; | |
337 } | |
331 | 338 |
332 *poffset = this_frame_offset; | 339 *poffset = this_frame_offset; |
333 return true; | 340 return true; |
334 } | 341 } |
335 | 342 |
336 /* Create a new frame_space structure describing free space in the stack | 343 /* Create a new frame_space structure describing free space in the stack |
337 frame beginning at START and ending at END, and chain it into the | 344 frame beginning at START and ending at END, and chain it into the |
338 function's frame_space_list. */ | 345 function's frame_space_list. */ |
339 | 346 |
340 static void | 347 static void |
341 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end) | 348 add_frame_space (poly_int64 start, poly_int64 end) |
342 { | 349 { |
343 struct frame_space *space = ggc_alloc<frame_space> (); | 350 struct frame_space *space = ggc_alloc<frame_space> (); |
344 space->next = crtl->frame_space_list; | 351 space->next = crtl->frame_space_list; |
345 crtl->frame_space_list = space; | 352 crtl->frame_space_list = space; |
346 space->start = start; | 353 space->start = start; |
363 track the same stack slot in two independent lists. | 370 track the same stack slot in two independent lists. |
364 | 371 |
365 We do not round to stack_boundary here. */ | 372 We do not round to stack_boundary here. */ |
366 | 373 |
367 rtx | 374 rtx |
368 assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size, | 375 assign_stack_local_1 (machine_mode mode, poly_int64 size, |
369 int align, int kind) | 376 int align, int kind) |
370 { | 377 { |
371 rtx x, addr; | 378 rtx x, addr; |
372 int bigend_correction = 0; | 379 poly_int64 bigend_correction = 0; |
373 HOST_WIDE_INT slot_offset = 0, old_frame_offset; | 380 poly_int64 slot_offset = 0, old_frame_offset; |
374 unsigned int alignment, alignment_in_bits; | 381 unsigned int alignment, alignment_in_bits; |
375 | 382 |
376 if (align == 0) | 383 if (align == 0) |
377 { | 384 { |
378 alignment = get_stack_local_alignment (NULL, mode); | 385 alignment = get_stack_local_alignment (NULL, mode); |
379 alignment /= BITS_PER_UNIT; | 386 alignment /= BITS_PER_UNIT; |
380 } | 387 } |
381 else if (align == -1) | 388 else if (align == -1) |
382 { | 389 { |
383 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | 390 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; |
384 size = CEIL_ROUND (size, alignment); | 391 size = aligned_upper_bound (size, alignment); |
385 } | 392 } |
386 else if (align == -2) | 393 else if (align == -2) |
387 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */ | 394 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */ |
388 else | 395 else |
389 alignment = align / BITS_PER_UNIT; | 396 alignment = align / BITS_PER_UNIT; |
415 { | 422 { |
416 /* It is OK to reduce the alignment as long as the | 423 /* It is OK to reduce the alignment as long as the |
417 requested size is 0 or the estimated stack | 424 requested size is 0 or the estimated stack |
418 alignment >= mode alignment. */ | 425 alignment >= mode alignment. */ |
419 gcc_assert ((kind & ASLK_REDUCE_ALIGN) | 426 gcc_assert ((kind & ASLK_REDUCE_ALIGN) |
420 || size == 0 | 427 || known_eq (size, 0) |
421 || (crtl->stack_alignment_estimated | 428 || (crtl->stack_alignment_estimated |
422 >= GET_MODE_ALIGNMENT (mode))); | 429 >= GET_MODE_ALIGNMENT (mode))); |
423 alignment_in_bits = crtl->stack_alignment_estimated; | 430 alignment_in_bits = crtl->stack_alignment_estimated; |
424 alignment = alignment_in_bits / BITS_PER_UNIT; | 431 alignment = alignment_in_bits / BITS_PER_UNIT; |
425 } | 432 } |
430 if (crtl->stack_alignment_needed < alignment_in_bits) | 437 if (crtl->stack_alignment_needed < alignment_in_bits) |
431 crtl->stack_alignment_needed = alignment_in_bits; | 438 crtl->stack_alignment_needed = alignment_in_bits; |
432 if (crtl->max_used_stack_slot_alignment < alignment_in_bits) | 439 if (crtl->max_used_stack_slot_alignment < alignment_in_bits) |
433 crtl->max_used_stack_slot_alignment = alignment_in_bits; | 440 crtl->max_used_stack_slot_alignment = alignment_in_bits; |
434 | 441 |
435 if (mode != BLKmode || size != 0) | 442 if (mode != BLKmode || maybe_ne (size, 0)) |
436 { | 443 { |
437 if (kind & ASLK_RECORD_PAD) | 444 if (kind & ASLK_RECORD_PAD) |
438 { | 445 { |
439 struct frame_space **psp; | 446 struct frame_space **psp; |
440 | 447 |
443 struct frame_space *space = *psp; | 450 struct frame_space *space = *psp; |
444 if (!try_fit_stack_local (space->start, space->length, size, | 451 if (!try_fit_stack_local (space->start, space->length, size, |
445 alignment, &slot_offset)) | 452 alignment, &slot_offset)) |
446 continue; | 453 continue; |
447 *psp = space->next; | 454 *psp = space->next; |
448 if (slot_offset > space->start) | 455 if (known_gt (slot_offset, space->start)) |
449 add_frame_space (space->start, slot_offset); | 456 add_frame_space (space->start, slot_offset); |
450 if (slot_offset + size < space->start + space->length) | 457 if (known_lt (slot_offset + size, space->start + space->length)) |
451 add_frame_space (slot_offset + size, | 458 add_frame_space (slot_offset + size, |
452 space->start + space->length); | 459 space->start + space->length); |
453 goto found_space; | 460 goto found_space; |
454 } | 461 } |
455 } | 462 } |
467 frame_offset -= size; | 474 frame_offset -= size; |
468 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset); | 475 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset); |
469 | 476 |
470 if (kind & ASLK_RECORD_PAD) | 477 if (kind & ASLK_RECORD_PAD) |
471 { | 478 { |
472 if (slot_offset > frame_offset) | 479 if (known_gt (slot_offset, frame_offset)) |
473 add_frame_space (frame_offset, slot_offset); | 480 add_frame_space (frame_offset, slot_offset); |
474 if (slot_offset + size < old_frame_offset) | 481 if (known_lt (slot_offset + size, old_frame_offset)) |
475 add_frame_space (slot_offset + size, old_frame_offset); | 482 add_frame_space (slot_offset + size, old_frame_offset); |
476 } | 483 } |
477 } | 484 } |
478 else | 485 else |
479 { | 486 { |
480 frame_offset += size; | 487 frame_offset += size; |
481 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset); | 488 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset); |
482 | 489 |
483 if (kind & ASLK_RECORD_PAD) | 490 if (kind & ASLK_RECORD_PAD) |
484 { | 491 { |
485 if (slot_offset > old_frame_offset) | 492 if (known_gt (slot_offset, old_frame_offset)) |
486 add_frame_space (old_frame_offset, slot_offset); | 493 add_frame_space (old_frame_offset, slot_offset); |
487 if (slot_offset + size < frame_offset) | 494 if (known_lt (slot_offset + size, frame_offset)) |
488 add_frame_space (slot_offset + size, frame_offset); | 495 add_frame_space (slot_offset + size, frame_offset); |
489 } | 496 } |
490 } | 497 } |
491 | 498 |
492 found_space: | 499 found_space: |
493 /* On a big-endian machine, if we are allocating more space than we will use, | 500 /* On a big-endian machine, if we are allocating more space than we will use, |
494 use the least significant bytes of those that are allocated. */ | 501 use the least significant bytes of those that are allocated. */ |
495 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size) | 502 if (mode != BLKmode) |
496 bigend_correction = size - GET_MODE_SIZE (mode); | 503 { |
504 /* The slot size can sometimes be smaller than the mode size; | |
505 e.g. the rs6000 port allocates slots with a vector mode | |
506 that have the size of only one element. However, the slot | |
507 size must always be ordered wrt to the mode size, in the | |
508 same way as for a subreg. */ | |
509 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size)); | |
510 if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size)) | |
511 bigend_correction = size - GET_MODE_SIZE (mode); | |
512 } | |
497 | 513 |
498 /* If we have already instantiated virtual registers, return the actual | 514 /* If we have already instantiated virtual registers, return the actual |
499 address relative to the frame pointer. */ | 515 address relative to the frame pointer. */ |
500 if (virtuals_instantiated) | 516 if (virtuals_instantiated) |
501 addr = plus_constant (Pmode, frame_pointer_rtx, | 517 addr = plus_constant (Pmode, frame_pointer_rtx, |
521 } | 537 } |
522 | 538 |
523 /* Wrap up assign_stack_local_1 with last parameter as false. */ | 539 /* Wrap up assign_stack_local_1 with last parameter as false. */ |
524 | 540 |
525 rtx | 541 rtx |
526 assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align) | 542 assign_stack_local (machine_mode mode, poly_int64 size, int align) |
527 { | 543 { |
528 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD); | 544 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD); |
529 } | 545 } |
530 | 546 |
531 /* In order to evaluate some expressions, such as function calls returning | 547 /* In order to evaluate some expressions, such as function calls returning |
548 /* Points to previous temporary slot. */ | 564 /* Points to previous temporary slot. */ |
549 struct temp_slot *prev; | 565 struct temp_slot *prev; |
550 /* The rtx to used to reference the slot. */ | 566 /* The rtx to used to reference the slot. */ |
551 rtx slot; | 567 rtx slot; |
552 /* The size, in units, of the slot. */ | 568 /* The size, in units, of the slot. */ |
553 HOST_WIDE_INT size; | 569 poly_int64 size; |
554 /* The type of the object in the slot, or zero if it doesn't correspond | 570 /* The type of the object in the slot, or zero if it doesn't correspond |
555 to a type. We use this to determine whether a slot can be reused. | 571 to a type. We use this to determine whether a slot can be reused. |
556 It can be reused if objects of the type of the new slot will always | 572 It can be reused if objects of the type of the new slot will always |
557 conflict with objects of the type of the old slot. */ | 573 conflict with objects of the type of the old slot. */ |
558 tree type; | 574 tree type; |
562 char in_use; | 578 char in_use; |
563 /* Nesting level at which this slot is being used. */ | 579 /* Nesting level at which this slot is being used. */ |
564 int level; | 580 int level; |
565 /* The offset of the slot from the frame_pointer, including extra space | 581 /* The offset of the slot from the frame_pointer, including extra space |
566 for alignment. This info is for combine_temp_slots. */ | 582 for alignment. This info is for combine_temp_slots. */ |
567 HOST_WIDE_INT base_offset; | 583 poly_int64 base_offset; |
568 /* The size of the slot, including extra space for alignment. This | 584 /* The size of the slot, including extra space for alignment. This |
569 info is for combine_temp_slots. */ | 585 info is for combine_temp_slots. */ |
570 HOST_WIDE_INT full_size; | 586 poly_int64 full_size; |
571 }; | 587 }; |
572 | 588 |
573 /* Entry for the below hash table. */ | 589 /* Entry for the below hash table. */ |
574 struct GTY((for_user)) temp_slot_address_entry { | 590 struct GTY((for_user)) temp_slot_address_entry { |
575 hashval_t hash; | 591 hashval_t hash; |
743 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1)) | 759 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1)) |
744 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0) | 760 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0) |
745 return p; | 761 return p; |
746 | 762 |
747 /* Last resort: Address is a virtual stack var address. */ | 763 /* Last resort: Address is a virtual stack var address. */ |
748 if (GET_CODE (x) == PLUS | 764 poly_int64 offset; |
749 && XEXP (x, 0) == virtual_stack_vars_rtx | 765 if (strip_offset (x, &offset) == virtual_stack_vars_rtx) |
750 && CONST_INT_P (XEXP (x, 1))) | |
751 { | 766 { |
752 int i; | 767 int i; |
753 for (i = max_slot_level (); i >= 0; i--) | 768 for (i = max_slot_level (); i >= 0; i--) |
754 for (p = *temp_slots_at_level (i); p; p = p->next) | 769 for (p = *temp_slots_at_level (i); p; p = p->next) |
755 { | 770 if (known_in_range_p (offset, p->base_offset, p->full_size)) |
756 if (INTVAL (XEXP (x, 1)) >= p->base_offset | 771 return p; |
757 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size) | |
758 return p; | |
759 } | |
760 } | 772 } |
761 | 773 |
762 return NULL; | 774 return NULL; |
763 } | 775 } |
764 | 776 |
771 since assign_stack_local will do any required rounding. | 783 since assign_stack_local will do any required rounding. |
772 | 784 |
773 TYPE is the type that will be used for the stack slot. */ | 785 TYPE is the type that will be used for the stack slot. */ |
774 | 786 |
775 rtx | 787 rtx |
776 assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size, | 788 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type) |
777 tree type) | |
778 { | 789 { |
779 unsigned int align; | 790 unsigned int align; |
780 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp; | 791 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp; |
781 rtx slot; | 792 rtx slot; |
782 | 793 |
783 /* If SIZE is -1 it means that somebody tried to allocate a temporary | 794 gcc_assert (known_size_p (size)); |
784 of a variable size. */ | |
785 gcc_assert (size != -1); | |
786 | 795 |
787 align = get_stack_local_alignment (type, mode); | 796 align = get_stack_local_alignment (type, mode); |
788 | 797 |
789 /* Try to find an available, already-allocated temporary of the proper | 798 /* Try to find an available, already-allocated temporary of the proper |
790 mode which meets the size and alignment requirements. Choose the | 799 mode which meets the size and alignment requirements. Choose the |
795 VIRTUAL_STACK_VARS_REGNUM). */ | 804 VIRTUAL_STACK_VARS_REGNUM). */ |
796 if (!virtuals_instantiated) | 805 if (!virtuals_instantiated) |
797 { | 806 { |
798 for (p = avail_temp_slots; p; p = p->next) | 807 for (p = avail_temp_slots; p; p = p->next) |
799 { | 808 { |
800 if (p->align >= align && p->size >= size | 809 if (p->align >= align |
810 && known_ge (p->size, size) | |
801 && GET_MODE (p->slot) == mode | 811 && GET_MODE (p->slot) == mode |
802 && objects_must_conflict_p (p->type, type) | 812 && objects_must_conflict_p (p->type, type) |
803 && (best_p == 0 || best_p->size > p->size | 813 && (best_p == 0 |
804 || (best_p->size == p->size && best_p->align > p->align))) | 814 || (known_eq (best_p->size, p->size) |
815 ? best_p->align > p->align | |
816 : known_ge (best_p->size, p->size)))) | |
805 { | 817 { |
806 if (p->align == align && p->size == size) | 818 if (p->align == align && known_eq (p->size, size)) |
807 { | 819 { |
808 selected = p; | 820 selected = p; |
809 cut_slot_from_list (selected, &avail_temp_slots); | 821 cut_slot_from_list (selected, &avail_temp_slots); |
810 best_p = 0; | 822 best_p = 0; |
811 break; | 823 break; |
825 temp_slot so that the extra bytes don't get wasted. Do this only | 837 temp_slot so that the extra bytes don't get wasted. Do this only |
826 for BLKmode slots, so that we can be sure of the alignment. */ | 838 for BLKmode slots, so that we can be sure of the alignment. */ |
827 if (GET_MODE (best_p->slot) == BLKmode) | 839 if (GET_MODE (best_p->slot) == BLKmode) |
828 { | 840 { |
829 int alignment = best_p->align / BITS_PER_UNIT; | 841 int alignment = best_p->align / BITS_PER_UNIT; |
830 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment); | 842 poly_int64 rounded_size = aligned_upper_bound (size, alignment); |
831 | 843 |
832 if (best_p->size - rounded_size >= alignment) | 844 if (known_ge (best_p->size - rounded_size, alignment)) |
833 { | 845 { |
834 p = ggc_alloc<temp_slot> (); | 846 p = ggc_alloc<temp_slot> (); |
835 p->in_use = 0; | 847 p->in_use = 0; |
836 p->size = best_p->size - rounded_size; | 848 p->size = best_p->size - rounded_size; |
837 p->base_offset = best_p->base_offset + rounded_size; | 849 p->base_offset = best_p->base_offset + rounded_size; |
850 } | 862 } |
851 | 863 |
852 /* If we still didn't find one, make a new temporary. */ | 864 /* If we still didn't find one, make a new temporary. */ |
853 if (selected == 0) | 865 if (selected == 0) |
854 { | 866 { |
855 HOST_WIDE_INT frame_offset_old = frame_offset; | 867 poly_int64 frame_offset_old = frame_offset; |
856 | 868 |
857 p = ggc_alloc<temp_slot> (); | 869 p = ggc_alloc<temp_slot> (); |
858 | 870 |
859 /* We are passing an explicit alignment request to assign_stack_local. | 871 /* We are passing an explicit alignment request to assign_stack_local. |
860 One side effect of that is assign_stack_local will not round SIZE | 872 One side effect of that is assign_stack_local will not round SIZE |
864 and round it now. We also make sure ALIGNMENT is at least | 876 and round it now. We also make sure ALIGNMENT is at least |
865 BIGGEST_ALIGNMENT. */ | 877 BIGGEST_ALIGNMENT. */ |
866 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT); | 878 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT); |
867 p->slot = assign_stack_local_1 (mode, | 879 p->slot = assign_stack_local_1 (mode, |
868 (mode == BLKmode | 880 (mode == BLKmode |
869 ? CEIL_ROUND (size, | 881 ? aligned_upper_bound (size, |
870 (int) align | 882 (int) align |
871 / BITS_PER_UNIT) | 883 / BITS_PER_UNIT) |
872 : size), | 884 : size), |
873 align, 0); | 885 align, 0); |
874 | 886 |
875 p->align = align; | 887 p->align = align; |
876 | 888 |
931 | 943 |
932 /* Allocate a temporary stack slot and record it for possible later | 944 /* Allocate a temporary stack slot and record it for possible later |
933 reuse. First two arguments are same as in preceding function. */ | 945 reuse. First two arguments are same as in preceding function. */ |
934 | 946 |
935 rtx | 947 rtx |
936 assign_stack_temp (machine_mode mode, HOST_WIDE_INT size) | 948 assign_stack_temp (machine_mode mode, poly_int64 size) |
937 { | 949 { |
938 return assign_stack_temp_for_type (mode, size, NULL_TREE); | 950 return assign_stack_temp_for_type (mode, size, NULL_TREE); |
939 } | 951 } |
940 | 952 |
941 /* Assign a temporary. | 953 /* Assign a temporary. |
971 end. See also create_tmp_var for the gimplification-time check. */ | 983 end. See also create_tmp_var for the gimplification-time check. */ |
972 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); | 984 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); |
973 | 985 |
974 if (mode == BLKmode || memory_required) | 986 if (mode == BLKmode || memory_required) |
975 { | 987 { |
976 HOST_WIDE_INT size = int_size_in_bytes (type); | 988 poly_int64 size; |
977 rtx tmp; | 989 rtx tmp; |
978 | |
979 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid | |
980 problems with allocating the stack space. */ | |
981 if (size == 0) | |
982 size = 1; | |
983 | 990 |
984 /* Unfortunately, we don't yet know how to allocate variable-sized | 991 /* Unfortunately, we don't yet know how to allocate variable-sized |
985 temporaries. However, sometimes we can find a fixed upper limit on | 992 temporaries. However, sometimes we can find a fixed upper limit on |
986 the size, so try that instead. */ | 993 the size, so try that instead. */ |
987 else if (size == -1) | 994 if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size)) |
988 size = max_int_size_in_bytes (type); | 995 size = max_int_size_in_bytes (type); |
996 | |
997 /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid | |
998 problems with allocating the stack space. */ | |
999 if (known_eq (size, 0)) | |
1000 size = 1; | |
989 | 1001 |
990 /* The size of the temporary may be too large to fit into an integer. */ | 1002 /* The size of the temporary may be too large to fit into an integer. */ |
991 /* ??? Not sure this should happen except for user silliness, so limit | 1003 /* ??? Not sure this should happen except for user silliness, so limit |
992 this to things that aren't compiler-generated temporaries. The | 1004 this to things that aren't compiler-generated temporaries. The |
993 rest of the time we'll die in assign_stack_temp_for_type. */ | 1005 rest of the time we'll die in assign_stack_temp_for_type. */ |
994 if (decl && size == -1 | 1006 if (decl |
1007 && !known_size_p (size) | |
995 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST) | 1008 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST) |
996 { | 1009 { |
997 error ("size of variable %q+D is too large", decl); | 1010 error ("size of variable %q+D is too large", decl); |
998 size = 1; | 1011 size = 1; |
999 } | 1012 } |
1050 next_q = q->next; | 1063 next_q = q->next; |
1051 | 1064 |
1052 if (GET_MODE (q->slot) != BLKmode) | 1065 if (GET_MODE (q->slot) != BLKmode) |
1053 continue; | 1066 continue; |
1054 | 1067 |
1055 if (p->base_offset + p->full_size == q->base_offset) | 1068 if (known_eq (p->base_offset + p->full_size, q->base_offset)) |
1056 { | 1069 { |
1057 /* Q comes after P; combine Q into P. */ | 1070 /* Q comes after P; combine Q into P. */ |
1058 p->size += q->size; | 1071 p->size += q->size; |
1059 p->full_size += q->full_size; | 1072 p->full_size += q->full_size; |
1060 delete_q = 1; | 1073 delete_q = 1; |
1061 } | 1074 } |
1062 else if (q->base_offset + q->full_size == p->base_offset) | 1075 else if (known_eq (q->base_offset + q->full_size, p->base_offset)) |
1063 { | 1076 { |
1064 /* P comes after Q; combine P into Q. */ | 1077 /* P comes after Q; combine P into Q. */ |
1065 q->size += p->size; | 1078 q->size += p->size; |
1066 q->full_size += p->full_size; | 1079 q->full_size += p->full_size; |
1067 delete_p = 1; | 1080 delete_p = 1; |
1362 | 1375 |
1363 The following four variables are used for communication between the | 1376 The following four variables are used for communication between the |
1364 routines. They contain the offsets of the virtual registers from their | 1377 routines. They contain the offsets of the virtual registers from their |
1365 respective hard registers. */ | 1378 respective hard registers. */ |
1366 | 1379 |
1367 static int in_arg_offset; | 1380 static poly_int64 in_arg_offset; |
1368 static int var_offset; | 1381 static poly_int64 var_offset; |
1369 static int dynamic_offset; | 1382 static poly_int64 dynamic_offset; |
1370 static int out_arg_offset; | 1383 static poly_int64 out_arg_offset; |
1371 static int cfa_offset; | 1384 static poly_int64 cfa_offset; |
1372 | 1385 |
1373 /* In most machines, the stack pointer register is equivalent to the bottom | 1386 /* In most machines, the stack pointer register is equivalent to the bottom |
1374 of the stack. */ | 1387 of the stack. */ |
1375 | 1388 |
1376 #ifndef STACK_POINTER_OFFSET | 1389 #ifndef STACK_POINTER_OFFSET |
1402 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \ | 1415 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \ |
1403 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \ | 1416 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \ |
1404 : 0) + (STACK_POINTER_OFFSET)) | 1417 : 0) + (STACK_POINTER_OFFSET)) |
1405 #else | 1418 #else |
1406 #define STACK_DYNAMIC_OFFSET(FNDECL) \ | 1419 #define STACK_DYNAMIC_OFFSET(FNDECL) \ |
1407 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \ | 1420 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \ |
1408 + (STACK_POINTER_OFFSET)) | 1421 + (STACK_POINTER_OFFSET)) |
1409 #endif | 1422 #endif |
1410 #endif | 1423 #endif |
1411 | 1424 |
1412 | 1425 |
1413 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX | 1426 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX |
1414 is a virtual register, return the equivalent hard register and set the | 1427 is a virtual register, return the equivalent hard register and set the |
1415 offset indirectly through the pointer. Otherwise, return 0. */ | 1428 offset indirectly through the pointer. Otherwise, return 0. */ |
1416 | 1429 |
1417 static rtx | 1430 static rtx |
1418 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset) | 1431 instantiate_new_reg (rtx x, poly_int64_pod *poffset) |
1419 { | 1432 { |
1420 rtx new_rtx; | 1433 rtx new_rtx; |
1421 HOST_WIDE_INT offset; | 1434 poly_int64 offset; |
1422 | 1435 |
1423 if (x == virtual_incoming_args_rtx) | 1436 if (x == virtual_incoming_args_rtx) |
1424 { | 1437 { |
1425 if (stack_realign_drap) | 1438 if (stack_realign_drap) |
1426 { | 1439 { |
1475 { | 1488 { |
1476 rtx *loc = *iter; | 1489 rtx *loc = *iter; |
1477 if (rtx x = *loc) | 1490 if (rtx x = *loc) |
1478 { | 1491 { |
1479 rtx new_rtx; | 1492 rtx new_rtx; |
1480 HOST_WIDE_INT offset; | 1493 poly_int64 offset; |
1481 switch (GET_CODE (x)) | 1494 switch (GET_CODE (x)) |
1482 { | 1495 { |
1483 case REG: | 1496 case REG: |
1484 new_rtx = instantiate_new_reg (x, &offset); | 1497 new_rtx = instantiate_new_reg (x, &offset); |
1485 if (new_rtx) | 1498 if (new_rtx) |
1528 registers present inside of insn. The result will be a valid insn. */ | 1541 registers present inside of insn. The result will be a valid insn. */ |
1529 | 1542 |
1530 static void | 1543 static void |
1531 instantiate_virtual_regs_in_insn (rtx_insn *insn) | 1544 instantiate_virtual_regs_in_insn (rtx_insn *insn) |
1532 { | 1545 { |
1533 HOST_WIDE_INT offset; | 1546 poly_int64 offset; |
1534 int insn_code, i; | 1547 int insn_code, i; |
1535 bool any_change = false; | 1548 bool any_change = false; |
1536 rtx set, new_rtx, x; | 1549 rtx set, new_rtx, x; |
1537 rtx_insn *seq; | 1550 rtx_insn *seq; |
1538 | 1551 |
1567 /* Handle a straight copy from a virtual register by generating a | 1580 /* Handle a straight copy from a virtual register by generating a |
1568 new add insn. The difference between this and falling through | 1581 new add insn. The difference between this and falling through |
1569 to the generic case is avoiding a new pseudo and eliminating a | 1582 to the generic case is avoiding a new pseudo and eliminating a |
1570 move insn in the initial rtl stream. */ | 1583 move insn in the initial rtl stream. */ |
1571 new_rtx = instantiate_new_reg (SET_SRC (set), &offset); | 1584 new_rtx = instantiate_new_reg (SET_SRC (set), &offset); |
1572 if (new_rtx && offset != 0 | 1585 if (new_rtx |
1586 && maybe_ne (offset, 0) | |
1573 && REG_P (SET_DEST (set)) | 1587 && REG_P (SET_DEST (set)) |
1574 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) | 1588 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) |
1575 { | 1589 { |
1576 start_sequence (); | 1590 start_sequence (); |
1577 | 1591 |
1593 extract_insn (insn); | 1607 extract_insn (insn); |
1594 insn_code = INSN_CODE (insn); | 1608 insn_code = INSN_CODE (insn); |
1595 | 1609 |
1596 /* Handle a plus involving a virtual register by determining if the | 1610 /* Handle a plus involving a virtual register by determining if the |
1597 operands remain valid if they're modified in place. */ | 1611 operands remain valid if they're modified in place. */ |
1612 poly_int64 delta; | |
1598 if (GET_CODE (SET_SRC (set)) == PLUS | 1613 if (GET_CODE (SET_SRC (set)) == PLUS |
1599 && recog_data.n_operands >= 3 | 1614 && recog_data.n_operands >= 3 |
1600 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0) | 1615 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0) |
1601 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1) | 1616 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1) |
1602 && CONST_INT_P (recog_data.operand[2]) | 1617 && poly_int_rtx_p (recog_data.operand[2], &delta) |
1603 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset))) | 1618 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset))) |
1604 { | 1619 { |
1605 offset += INTVAL (recog_data.operand[2]); | 1620 offset += delta; |
1606 | 1621 |
1607 /* If the sum is zero, then replace with a plain move. */ | 1622 /* If the sum is zero, then replace with a plain move. */ |
1608 if (offset == 0 | 1623 if (known_eq (offset, 0) |
1609 && REG_P (SET_DEST (set)) | 1624 && REG_P (SET_DEST (set)) |
1610 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) | 1625 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) |
1611 { | 1626 { |
1612 start_sequence (); | 1627 start_sequence (); |
1613 emit_move_insn (SET_DEST (set), new_rtx); | 1628 emit_move_insn (SET_DEST (set), new_rtx); |
1681 | 1696 |
1682 case REG: | 1697 case REG: |
1683 new_rtx = instantiate_new_reg (x, &offset); | 1698 new_rtx = instantiate_new_reg (x, &offset); |
1684 if (new_rtx == NULL) | 1699 if (new_rtx == NULL) |
1685 continue; | 1700 continue; |
1686 if (offset == 0) | 1701 if (known_eq (offset, 0)) |
1687 x = new_rtx; | 1702 x = new_rtx; |
1688 else | 1703 else |
1689 { | 1704 { |
1690 start_sequence (); | 1705 start_sequence (); |
1691 | 1706 |
1706 | 1721 |
1707 case SUBREG: | 1722 case SUBREG: |
1708 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset); | 1723 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset); |
1709 if (new_rtx == NULL) | 1724 if (new_rtx == NULL) |
1710 continue; | 1725 continue; |
1711 if (offset != 0) | 1726 if (maybe_ne (offset, 0)) |
1712 { | 1727 { |
1713 start_sequence (); | 1728 start_sequence (); |
1714 new_rtx = expand_simple_binop | 1729 new_rtx = expand_simple_binop |
1715 (GET_MODE (new_rtx), PLUS, new_rtx, | 1730 (GET_MODE (new_rtx), PLUS, new_rtx, |
1716 gen_int_mode (offset, GET_MODE (new_rtx)), | 1731 gen_int_mode (offset, GET_MODE (new_rtx)), |
1951 { | 1966 { |
1952 /* These patterns in the instruction stream can never be recognized. | 1967 /* These patterns in the instruction stream can never be recognized. |
1953 Fortunately, they shouldn't contain virtual registers either. */ | 1968 Fortunately, they shouldn't contain virtual registers either. */ |
1954 if (GET_CODE (PATTERN (insn)) == USE | 1969 if (GET_CODE (PATTERN (insn)) == USE |
1955 || GET_CODE (PATTERN (insn)) == CLOBBER | 1970 || GET_CODE (PATTERN (insn)) == CLOBBER |
1956 || GET_CODE (PATTERN (insn)) == ASM_INPUT) | 1971 || GET_CODE (PATTERN (insn)) == ASM_INPUT |
1972 || DEBUG_MARKER_INSN_P (insn)) | |
1957 continue; | 1973 continue; |
1958 else if (DEBUG_INSN_P (insn)) | 1974 else if (DEBUG_BIND_INSN_P (insn)) |
1959 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn)); | 1975 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn)); |
1960 else | 1976 else |
1961 instantiate_virtual_regs_in_insn (insn); | 1977 instantiate_virtual_regs_in_insn (insn); |
1962 | 1978 |
1963 if (insn->deleted ()) | 1979 if (insn->deleted ()) |
1964 continue; | 1980 continue; |
2083 | 2099 |
2084 /* Types that are TREE_ADDRESSABLE must be constructed in memory, | 2100 /* Types that are TREE_ADDRESSABLE must be constructed in memory, |
2085 and thus can't be returned in registers. */ | 2101 and thus can't be returned in registers. */ |
2086 if (TREE_ADDRESSABLE (type)) | 2102 if (TREE_ADDRESSABLE (type)) |
2087 return 1; | 2103 return 1; |
2104 | |
2105 if (TYPE_EMPTY_P (type)) | |
2106 return 0; | |
2088 | 2107 |
2089 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type)) | 2108 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type)) |
2090 return 1; | 2109 return 1; |
2091 | 2110 |
2092 if (targetm.calls.return_in_memory (type, fntype)) | 2111 if (targetm.calls.return_in_memory (type, fntype)) |
2187 return true; | 2206 return true; |
2188 /* We don't set DECL_REGISTER for the function_result_decl. */ | 2207 /* We don't set DECL_REGISTER for the function_result_decl. */ |
2189 return false; | 2208 return false; |
2190 } | 2209 } |
2191 | 2210 |
2192 /* Decl is implicitly addressible by bound stores and loads | |
2193 if it is an aggregate holding bounds. */ | |
2194 if (chkp_function_instrumented_p (current_function_decl) | |
2195 && TREE_TYPE (decl) | |
2196 && !BOUNDED_P (decl) | |
2197 && chkp_type_has_pointer (TREE_TYPE (decl))) | |
2198 return false; | |
2199 | |
2200 /* Only register-like things go in registers. */ | 2211 /* Only register-like things go in registers. */ |
2201 if (DECL_MODE (decl) == BLKmode) | 2212 if (DECL_MODE (decl) == BLKmode) |
2202 return false; | 2213 return false; |
2203 | 2214 |
2204 /* If -ffloat-store specified, don't put explicit float variables | 2215 /* If -ffloat-store specified, don't put explicit float variables |
2264 int partial; | 2275 int partial; |
2265 BOOL_BITFIELD named_arg : 1; | 2276 BOOL_BITFIELD named_arg : 1; |
2266 BOOL_BITFIELD passed_pointer : 1; | 2277 BOOL_BITFIELD passed_pointer : 1; |
2267 BOOL_BITFIELD on_stack : 1; | 2278 BOOL_BITFIELD on_stack : 1; |
2268 BOOL_BITFIELD loaded_in_reg : 1; | 2279 BOOL_BITFIELD loaded_in_reg : 1; |
2269 }; | |
2270 | |
2271 struct bounds_parm_data | |
2272 { | |
2273 assign_parm_data_one parm_data; | |
2274 tree bounds_parm; | |
2275 tree ptr_parm; | |
2276 rtx ptr_entry; | |
2277 int bound_no; | |
2278 }; | 2280 }; |
2279 | 2281 |
2280 /* A subroutine of assign_parms. Initialize ALL. */ | 2282 /* A subroutine of assign_parms. Initialize ALL. */ |
2281 | 2283 |
2282 static void | 2284 static void |
2389 DECL_CHAIN (decl) = all->orig_fnargs; | 2391 DECL_CHAIN (decl) = all->orig_fnargs; |
2390 all->orig_fnargs = decl; | 2392 all->orig_fnargs = decl; |
2391 fnargs.safe_insert (0, decl); | 2393 fnargs.safe_insert (0, decl); |
2392 | 2394 |
2393 all->function_result_decl = decl; | 2395 all->function_result_decl = decl; |
2394 | |
2395 /* If function is instrumented then bounds of the | |
2396 passed structure address is the second argument. */ | |
2397 if (chkp_function_instrumented_p (fndecl)) | |
2398 { | |
2399 decl = build_decl (DECL_SOURCE_LOCATION (fndecl), | |
2400 PARM_DECL, get_identifier (".result_bnd"), | |
2401 pointer_bounds_type_node); | |
2402 DECL_ARG_TYPE (decl) = pointer_bounds_type_node; | |
2403 DECL_ARTIFICIAL (decl) = 1; | |
2404 DECL_NAMELESS (decl) = 1; | |
2405 TREE_CONSTANT (decl) = 1; | |
2406 | |
2407 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs); | |
2408 DECL_CHAIN (all->orig_fnargs) = decl; | |
2409 fnargs.safe_insert (1, decl); | |
2410 } | |
2411 } | 2396 } |
2412 | 2397 |
2413 /* If the target wants to split complex arguments into scalars, do so. */ | 2398 /* If the target wants to split complex arguments into scalars, do so. */ |
2414 if (targetm.calls.split_complex_arg) | 2399 if (targetm.calls.split_complex_arg) |
2415 split_complex_args (&fnargs); | 2400 split_complex_args (&fnargs); |
2528 { | 2513 { |
2529 data->entry_parm = data->stack_parm = const0_rtx; | 2514 data->entry_parm = data->stack_parm = const0_rtx; |
2530 return; | 2515 return; |
2531 } | 2516 } |
2532 | 2517 |
2518 targetm.calls.warn_parameter_passing_abi (all->args_so_far, | |
2519 data->passed_type); | |
2520 | |
2533 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far, | 2521 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far, |
2534 data->promoted_mode, | 2522 data->promoted_mode, |
2535 data->passed_type, | 2523 data->passed_type, |
2536 data->named_arg); | 2524 data->named_arg); |
2537 | 2525 |
2546 have been passed in registers, but wasn't only because it is | 2534 have been passed in registers, but wasn't only because it is |
2547 __builtin_va_alist, we want locate_and_pad_parm to treat it as if | 2535 __builtin_va_alist, we want locate_and_pad_parm to treat it as if |
2548 it came in a register so that REG_PARM_STACK_SPACE isn't skipped. | 2536 it came in a register so that REG_PARM_STACK_SPACE isn't skipped. |
2549 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0 | 2537 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0 |
2550 as it was the previous time. */ | 2538 as it was the previous time. */ |
2551 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type); | 2539 in_regs = (entry_parm != 0); |
2552 #ifdef STACK_PARMS_IN_REG_PARM_AREA | 2540 #ifdef STACK_PARMS_IN_REG_PARM_AREA |
2553 in_regs = true; | 2541 in_regs = true; |
2554 #endif | 2542 #endif |
2555 if (!in_regs && !data->named_arg) | 2543 if (!in_regs && !data->named_arg) |
2556 { | 2544 { |
2635 | 2623 |
2636 static bool | 2624 static bool |
2637 assign_parm_is_stack_parm (struct assign_parm_data_all *all, | 2625 assign_parm_is_stack_parm (struct assign_parm_data_all *all, |
2638 struct assign_parm_data_one *data) | 2626 struct assign_parm_data_one *data) |
2639 { | 2627 { |
2640 /* Bounds are never passed on the stack to keep compatibility | |
2641 with not instrumented code. */ | |
2642 if (POINTER_BOUNDS_TYPE_P (data->passed_type)) | |
2643 return false; | |
2644 /* Trivially true if we've no incoming register. */ | 2628 /* Trivially true if we've no incoming register. */ |
2645 else if (data->entry_parm == NULL) | 2629 if (data->entry_parm == NULL) |
2646 ; | 2630 ; |
2647 /* Also true if we're partially in registers and partially not, | 2631 /* Also true if we're partially in registers and partially not, |
2648 since we've arranged to drop the entire argument on the stack. */ | 2632 since we've arranged to drop the entire argument on the stack. */ |
2649 else if (data->partial != 0) | 2633 else if (data->partial != 0) |
2650 ; | 2634 ; |
2698 && data->promoted_mode != DECL_MODE (parm)) | 2682 && data->promoted_mode != DECL_MODE (parm)) |
2699 { | 2683 { |
2700 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode)); | 2684 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode)); |
2701 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm)) | 2685 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm)) |
2702 { | 2686 { |
2703 int offset = subreg_lowpart_offset (DECL_MODE (parm), | 2687 poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm), |
2704 data->promoted_mode); | 2688 data->promoted_mode); |
2705 if (offset) | 2689 if (maybe_ne (offset, 0)) |
2706 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset); | 2690 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset); |
2707 } | 2691 } |
2708 } | 2692 } |
2709 } | 2693 } |
2710 | 2694 |
2713 | 2697 |
2714 /* If we're padding upward, we know that the alignment of the slot | 2698 /* If we're padding upward, we know that the alignment of the slot |
2715 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're | 2699 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're |
2716 intentionally forcing upward padding. Otherwise we have to come | 2700 intentionally forcing upward padding. Otherwise we have to come |
2717 up with a guess at the alignment based on OFFSET_RTX. */ | 2701 up with a guess at the alignment based on OFFSET_RTX. */ |
2702 poly_int64 offset; | |
2718 if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm) | 2703 if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm) |
2719 align = boundary; | 2704 align = boundary; |
2720 else if (CONST_INT_P (offset_rtx)) | 2705 else if (poly_int_rtx_p (offset_rtx, &offset)) |
2721 { | 2706 { |
2722 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary; | 2707 align = least_bit_hwi (boundary); |
2723 align = least_bit_hwi (align); | 2708 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT; |
2709 if (offset_align != 0) | |
2710 align = MIN (align, offset_align); | |
2724 } | 2711 } |
2725 set_mem_align (stack_parm, align); | 2712 set_mem_align (stack_parm, align); |
2726 | 2713 |
2727 if (data->entry_parm) | 2714 if (data->entry_parm) |
2728 set_reg_attrs_for_parm (data->entry_parm, stack_parm); | 2715 set_reg_attrs_for_parm (data->entry_parm, stack_parm); |
2865 | 2852 |
2866 #ifdef BLOCK_REG_PADDING | 2853 #ifdef BLOCK_REG_PADDING |
2867 /* Only assign_parm_setup_block knows how to deal with register arguments | 2854 /* Only assign_parm_setup_block knows how to deal with register arguments |
2868 that are padded at the least significant end. */ | 2855 that are padded at the least significant end. */ |
2869 if (REG_P (data->entry_parm) | 2856 if (REG_P (data->entry_parm) |
2870 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD | 2857 && known_lt (GET_MODE_SIZE (data->promoted_mode), UNITS_PER_WORD) |
2871 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1) | 2858 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1) |
2872 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) | 2859 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) |
2873 return true; | 2860 return true; |
2874 #endif | 2861 #endif |
2875 | 2862 |
2928 if (stack_parm == 0) | 2915 if (stack_parm == 0) |
2929 { | 2916 { |
2930 SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD)); | 2917 SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD)); |
2931 stack_parm = assign_stack_local (BLKmode, size_stored, | 2918 stack_parm = assign_stack_local (BLKmode, size_stored, |
2932 DECL_ALIGN (parm)); | 2919 DECL_ALIGN (parm)); |
2933 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size) | 2920 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size)) |
2934 PUT_MODE (stack_parm, GET_MODE (entry_parm)); | 2921 PUT_MODE (stack_parm, GET_MODE (entry_parm)); |
2935 set_mem_attributes (stack_parm, parm, 1); | 2922 set_mem_attributes (stack_parm, parm, 1); |
2936 } | 2923 } |
2937 | 2924 |
2938 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle | 2925 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle |
3424 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg, | 3411 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg, |
3425 TYPE_UNSIGNED (TREE_TYPE (parm))); | 3412 TYPE_UNSIGNED (TREE_TYPE (parm))); |
3426 | 3413 |
3427 if (data->stack_parm) | 3414 if (data->stack_parm) |
3428 { | 3415 { |
3429 int offset = subreg_lowpart_offset (data->nominal_mode, | 3416 poly_int64 offset |
3430 GET_MODE (data->stack_parm)); | 3417 = subreg_lowpart_offset (data->nominal_mode, |
3418 GET_MODE (data->stack_parm)); | |
3431 /* ??? This may need a big-endian conversion on sparc64. */ | 3419 /* ??? This may need a big-endian conversion on sparc64. */ |
3432 data->stack_parm | 3420 data->stack_parm |
3433 = adjust_address (data->stack_parm, data->nominal_mode, 0); | 3421 = adjust_address (data->stack_parm, data->nominal_mode, 0); |
3434 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm)) | 3422 if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm)) |
3435 set_mem_offset (data->stack_parm, | 3423 set_mem_offset (data->stack_parm, |
3436 MEM_OFFSET (data->stack_parm) + offset); | 3424 MEM_OFFSET (data->stack_parm) + offset); |
3437 } | 3425 } |
3438 } | 3426 } |
3439 | 3427 |
3551 i++; | 3539 i++; |
3552 } | 3540 } |
3553 } | 3541 } |
3554 } | 3542 } |
3555 | 3543 |
3556 /* Load bounds of PARM from bounds table. */ | |
3557 static void | |
3558 assign_parm_load_bounds (struct assign_parm_data_one *data, | |
3559 tree parm, | |
3560 rtx entry, | |
3561 unsigned bound_no) | |
3562 { | |
3563 bitmap_iterator bi; | |
3564 unsigned i, offs = 0; | |
3565 int bnd_no = -1; | |
3566 rtx slot = NULL, ptr = NULL; | |
3567 | |
3568 if (parm) | |
3569 { | |
3570 bitmap slots; | |
3571 bitmap_obstack_initialize (NULL); | |
3572 slots = BITMAP_ALLOC (NULL); | |
3573 chkp_find_bound_slots (TREE_TYPE (parm), slots); | |
3574 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi) | |
3575 { | |
3576 if (bound_no) | |
3577 bound_no--; | |
3578 else | |
3579 { | |
3580 bnd_no = i; | |
3581 break; | |
3582 } | |
3583 } | |
3584 BITMAP_FREE (slots); | |
3585 bitmap_obstack_release (NULL); | |
3586 } | |
3587 | |
3588 /* We may have bounds not associated with any pointer. */ | |
3589 if (bnd_no != -1) | |
3590 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT; | |
3591 | |
3592 /* Find associated pointer. */ | |
3593 if (bnd_no == -1) | |
3594 { | |
3595 /* If bounds are not associated with any bounds, | |
3596 then it is passed in a register or special slot. */ | |
3597 gcc_assert (data->entry_parm); | |
3598 ptr = const0_rtx; | |
3599 } | |
3600 else if (MEM_P (entry)) | |
3601 slot = adjust_address (entry, Pmode, offs); | |
3602 else if (REG_P (entry)) | |
3603 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no); | |
3604 else if (GET_CODE (entry) == PARALLEL) | |
3605 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs)); | |
3606 else | |
3607 gcc_unreachable (); | |
3608 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr, | |
3609 data->entry_parm); | |
3610 } | |
3611 | |
3612 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */ | |
3613 | |
3614 static void | |
3615 assign_bounds (vec<bounds_parm_data> &bndargs, | |
3616 struct assign_parm_data_all &all, | |
3617 bool assign_regs, bool assign_special, | |
3618 bool assign_bt) | |
3619 { | |
3620 unsigned i, pass; | |
3621 bounds_parm_data *pbdata; | |
3622 | |
3623 if (!bndargs.exists ()) | |
3624 return; | |
3625 | |
3626 /* We make few passes to store input bounds. Firstly handle bounds | |
3627 passed in registers. After that we load bounds passed in special | |
3628 slots. Finally we load bounds from Bounds Table. */ | |
3629 for (pass = 0; pass < 3; pass++) | |
3630 FOR_EACH_VEC_ELT (bndargs, i, pbdata) | |
3631 { | |
3632 /* Pass 0 => regs only. */ | |
3633 if (pass == 0 | |
3634 && (!assign_regs | |
3635 ||(!pbdata->parm_data.entry_parm | |
3636 || GET_CODE (pbdata->parm_data.entry_parm) != REG))) | |
3637 continue; | |
3638 /* Pass 1 => slots only. */ | |
3639 else if (pass == 1 | |
3640 && (!assign_special | |
3641 || (!pbdata->parm_data.entry_parm | |
3642 || GET_CODE (pbdata->parm_data.entry_parm) == REG))) | |
3643 continue; | |
3644 /* Pass 2 => BT only. */ | |
3645 else if (pass == 2 | |
3646 && (!assign_bt | |
3647 || pbdata->parm_data.entry_parm)) | |
3648 continue; | |
3649 | |
3650 if (!pbdata->parm_data.entry_parm | |
3651 || GET_CODE (pbdata->parm_data.entry_parm) != REG) | |
3652 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm, | |
3653 pbdata->ptr_entry, pbdata->bound_no); | |
3654 | |
3655 set_decl_incoming_rtl (pbdata->bounds_parm, | |
3656 pbdata->parm_data.entry_parm, false); | |
3657 | |
3658 if (assign_parm_setup_block_p (&pbdata->parm_data)) | |
3659 assign_parm_setup_block (&all, pbdata->bounds_parm, | |
3660 &pbdata->parm_data); | |
3661 else if (pbdata->parm_data.passed_pointer | |
3662 || use_register_for_decl (pbdata->bounds_parm)) | |
3663 assign_parm_setup_reg (&all, pbdata->bounds_parm, | |
3664 &pbdata->parm_data); | |
3665 else | |
3666 assign_parm_setup_stack (&all, pbdata->bounds_parm, | |
3667 &pbdata->parm_data); | |
3668 } | |
3669 } | |
3670 | |
3671 #ifndef noCbC | 3544 #ifndef noCbC |
3672 #include "c/cbc-tree.h" | 3545 #include "c/cbc-tree.h" |
3673 #endif | 3546 #endif |
3674 | 3547 |
3675 /* Assign RTL expressions to the function's parameters. This may involve | 3548 /* Assign RTL expressions to the function's parameters. This may involve |
3679 assign_parms (tree fndecl) | 3552 assign_parms (tree fndecl) |
3680 { | 3553 { |
3681 struct assign_parm_data_all all; | 3554 struct assign_parm_data_all all; |
3682 tree parm; | 3555 tree parm; |
3683 vec<tree> fnargs; | 3556 vec<tree> fnargs; |
3684 unsigned i, bound_no = 0; | 3557 unsigned i; |
3685 tree last_arg = NULL; | |
3686 rtx last_arg_entry = NULL; | |
3687 vec<bounds_parm_data> bndargs = vNULL; | |
3688 bounds_parm_data bdata; | |
3689 | 3558 |
3690 crtl->args.internal_arg_pointer | 3559 crtl->args.internal_arg_pointer |
3691 = targetm.calls.internal_arg_pointer (); | 3560 = targetm.calls.internal_arg_pointer (); |
3692 | 3561 |
3693 assign_parms_initialize_all (&all); | 3562 assign_parms_initialize_all (&all); |
3734 if (assign_parm_is_stack_parm (&all, &data)) | 3603 if (assign_parm_is_stack_parm (&all, &data)) |
3735 { | 3604 { |
3736 assign_parm_find_stack_rtl (parm, &data); | 3605 assign_parm_find_stack_rtl (parm, &data); |
3737 assign_parm_adjust_entry_rtl (&data); | 3606 assign_parm_adjust_entry_rtl (&data); |
3738 } | 3607 } |
3739 if (!POINTER_BOUNDS_TYPE_P (data.passed_type)) | |
3740 { | |
3741 /* Remember where last non bounds arg was passed in case | |
3742 we have to load associated bounds for it from Bounds | |
3743 Table. */ | |
3744 last_arg = parm; | |
3745 last_arg_entry = data.entry_parm; | |
3746 bound_no = 0; | |
3747 } | |
3748 /* Record permanently how this parm was passed. */ | 3608 /* Record permanently how this parm was passed. */ |
3749 if (data.passed_pointer) | 3609 if (data.passed_pointer) |
3750 { | 3610 { |
3751 rtx incoming_rtl | 3611 rtx incoming_rtl |
3752 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)), | 3612 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)), |
3756 else | 3616 else |
3757 set_decl_incoming_rtl (parm, data.entry_parm, false); | 3617 set_decl_incoming_rtl (parm, data.entry_parm, false); |
3758 | 3618 |
3759 assign_parm_adjust_stack_rtl (&data); | 3619 assign_parm_adjust_stack_rtl (&data); |
3760 | 3620 |
3761 /* Bounds should be loaded in the particular order to | 3621 if (assign_parm_setup_block_p (&data)) |
3762 have registers allocated correctly. Collect info about | 3622 assign_parm_setup_block (&all, parm, &data); |
3763 input bounds and load them later. */ | 3623 else if (data.passed_pointer || use_register_for_decl (parm)) |
3764 if (POINTER_BOUNDS_TYPE_P (data.passed_type)) | 3624 assign_parm_setup_reg (&all, parm, &data); |
3765 { | |
3766 /* Expect bounds in instrumented functions only. */ | |
3767 gcc_assert (chkp_function_instrumented_p (fndecl)); | |
3768 | |
3769 bdata.parm_data = data; | |
3770 bdata.bounds_parm = parm; | |
3771 bdata.ptr_parm = last_arg; | |
3772 bdata.ptr_entry = last_arg_entry; | |
3773 bdata.bound_no = bound_no; | |
3774 bndargs.safe_push (bdata); | |
3775 } | |
3776 else | 3625 else |
3777 { | 3626 assign_parm_setup_stack (&all, parm, &data); |
3778 if (assign_parm_setup_block_p (&data)) | |
3779 assign_parm_setup_block (&all, parm, &data); | |
3780 else if (data.passed_pointer || use_register_for_decl (parm)) | |
3781 assign_parm_setup_reg (&all, parm, &data); | |
3782 else | |
3783 assign_parm_setup_stack (&all, parm, &data); | |
3784 } | |
3785 | 3627 |
3786 if (cfun->stdarg && !DECL_CHAIN (parm)) | 3628 if (cfun->stdarg && !DECL_CHAIN (parm)) |
3787 { | 3629 assign_parms_setup_varargs (&all, &data, false); |
3788 int pretend_bytes = 0; | |
3789 | |
3790 assign_parms_setup_varargs (&all, &data, false); | |
3791 | |
3792 if (chkp_function_instrumented_p (fndecl)) | |
3793 { | |
3794 /* We expect this is the last parm. Otherwise it is wrong | |
3795 to assign bounds right now. */ | |
3796 gcc_assert (i == (fnargs.length () - 1)); | |
3797 assign_bounds (bndargs, all, true, false, false); | |
3798 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far, | |
3799 data.promoted_mode, | |
3800 data.passed_type, | |
3801 &pretend_bytes, | |
3802 false); | |
3803 assign_bounds (bndargs, all, false, true, true); | |
3804 bndargs.release (); | |
3805 } | |
3806 } | |
3807 | 3630 |
3808 /* Update info on where next arg arrives in registers. */ | 3631 /* Update info on where next arg arrives in registers. */ |
3809 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, | 3632 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, |
3810 data.passed_type, data.named_arg); | 3633 data.passed_type, data.named_arg); |
3811 | 3634 } |
3812 if (POINTER_BOUNDS_TYPE_P (data.passed_type)) | |
3813 bound_no++; | |
3814 } | |
3815 | |
3816 assign_bounds (bndargs, all, true, true, true); | |
3817 bndargs.release (); | |
3818 | 3635 |
3819 if (targetm.calls.split_complex_arg) | 3636 if (targetm.calls.split_complex_arg) |
3820 assign_parms_unsplit_complex (&all, fnargs); | 3637 assign_parms_unsplit_complex (&all, fnargs); |
3821 | 3638 |
3822 fnargs.release (); | 3639 fnargs.release (); |
3888 crtl->args.size = all.stack_args_size.constant; | 3705 crtl->args.size = all.stack_args_size.constant; |
3889 | 3706 |
3890 /* Adjust function incoming argument size for alignment and | 3707 /* Adjust function incoming argument size for alignment and |
3891 minimum length. */ | 3708 minimum length. */ |
3892 | 3709 |
3893 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space); | 3710 crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space); |
3894 crtl->args.size = CEIL_ROUND (crtl->args.size, | 3711 crtl->args.size = aligned_upper_bound (crtl->args.size, |
3895 PARM_BOUNDARY / BITS_PER_UNIT); | 3712 PARM_BOUNDARY / BITS_PER_UNIT); |
3896 | 3713 |
3897 if (ARGS_GROW_DOWNWARD) | 3714 if (ARGS_GROW_DOWNWARD) |
3898 { | 3715 { |
3899 crtl->args.arg_offset_rtx | 3716 crtl->args.arg_offset_rtx |
3900 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant) | 3717 = (all.stack_args_size.var == 0 |
3718 ? gen_int_mode (-all.stack_args_size.constant, Pmode) | |
3901 : expand_expr (size_diffop (all.stack_args_size.var, | 3719 : expand_expr (size_diffop (all.stack_args_size.var, |
3902 size_int (-all.stack_args_size.constant)), | 3720 size_int (-all.stack_args_size.constant)), |
3903 NULL_RTX, VOIDmode, EXPAND_NORMAL)); | 3721 NULL_RTX, VOIDmode, EXPAND_NORMAL)); |
3904 } | 3722 } |
3905 else | 3723 else |
3940 { | 3758 { |
3941 rtx real_decl_rtl; | 3759 rtx real_decl_rtl; |
3942 | 3760 |
3943 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result), | 3761 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result), |
3944 fndecl, true); | 3762 fndecl, true); |
3945 if (chkp_function_instrumented_p (fndecl)) | |
3946 crtl->return_bnd | |
3947 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result), | |
3948 fndecl, true); | |
3949 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1; | 3763 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1; |
3950 /* The delay slot scheduler assumes that crtl->return_rtx | 3764 /* The delay slot scheduler assumes that crtl->return_rtx |
3951 holds the hard register containing the return value, not a | 3765 holds the hard register containing the return value, not a |
3952 temporary pseudo. */ | 3766 temporary pseudo. */ |
3953 crtl->return_rtx = real_decl_rtl; | 3767 crtl->return_rtx = real_decl_rtl; |
3983 evaluating SAVE_EXPRs of variable sized parameters and generating code | 3797 evaluating SAVE_EXPRs of variable sized parameters and generating code |
3984 to implement callee-copies reference parameters. Returns a sequence of | 3798 to implement callee-copies reference parameters. Returns a sequence of |
3985 statements to add to the beginning of the function. */ | 3799 statements to add to the beginning of the function. */ |
3986 | 3800 |
3987 gimple_seq | 3801 gimple_seq |
3988 gimplify_parameters (void) | 3802 gimplify_parameters (gimple_seq *cleanup) |
3989 { | 3803 { |
3990 struct assign_parm_data_all all; | 3804 struct assign_parm_data_all all; |
3991 tree parm; | 3805 tree parm; |
3992 gimple_seq stmts = NULL; | 3806 gimple_seq stmts = NULL; |
3993 vec<tree> fnargs; | 3807 vec<tree> fnargs; |
4048 if (TREE_ADDRESSABLE (parm)) | 3862 if (TREE_ADDRESSABLE (parm)) |
4049 TREE_ADDRESSABLE (local) = 1; | 3863 TREE_ADDRESSABLE (local) = 1; |
4050 else if (TREE_CODE (type) == COMPLEX_TYPE | 3864 else if (TREE_CODE (type) == COMPLEX_TYPE |
4051 || TREE_CODE (type) == VECTOR_TYPE) | 3865 || TREE_CODE (type) == VECTOR_TYPE) |
4052 DECL_GIMPLE_REG_P (local) = 1; | 3866 DECL_GIMPLE_REG_P (local) = 1; |
3867 | |
3868 if (!is_gimple_reg (local) | |
3869 && flag_stack_reuse != SR_NONE) | |
3870 { | |
3871 tree clobber = build_constructor (type, NULL); | |
3872 gimple *clobber_stmt; | |
3873 TREE_THIS_VOLATILE (clobber) = 1; | |
3874 clobber_stmt = gimple_build_assign (local, clobber); | |
3875 gimple_seq_add_stmt (cleanup, clobber_stmt); | |
3876 } | |
4053 } | 3877 } |
4054 else | 3878 else |
4055 { | 3879 { |
4056 tree ptr_type, addr; | 3880 tree ptr_type, addr; |
4057 | 3881 |
4135 area reserved for registers, skip that area. */ | 3959 area reserved for registers, skip that area. */ |
4136 if (! in_regs) | 3960 if (! in_regs) |
4137 { | 3961 { |
4138 if (reg_parm_stack_space > 0) | 3962 if (reg_parm_stack_space > 0) |
4139 { | 3963 { |
4140 if (initial_offset_ptr->var) | 3964 if (initial_offset_ptr->var |
3965 || !ordered_p (initial_offset_ptr->constant, | |
3966 reg_parm_stack_space)) | |
4141 { | 3967 { |
4142 initial_offset_ptr->var | 3968 initial_offset_ptr->var |
4143 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), | 3969 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), |
4144 ssize_int (reg_parm_stack_space)); | 3970 ssize_int (reg_parm_stack_space)); |
4145 initial_offset_ptr->constant = 0; | 3971 initial_offset_ptr->constant = 0; |
4146 } | 3972 } |
4147 else if (initial_offset_ptr->constant < reg_parm_stack_space) | 3973 else |
4148 initial_offset_ptr->constant = reg_parm_stack_space; | 3974 initial_offset_ptr->constant |
3975 = ordered_max (initial_offset_ptr->constant, | |
3976 reg_parm_stack_space); | |
4149 } | 3977 } |
4150 } | 3978 } |
4151 | 3979 |
4152 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0); | 3980 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0); |
4153 | 3981 |
4154 sizetree | 3982 sizetree = (type |
4155 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode)); | 3983 ? arg_size_in_bytes (type) |
3984 : size_int (GET_MODE_SIZE (passed_mode))); | |
4156 where_pad = targetm.calls.function_arg_padding (passed_mode, type); | 3985 where_pad = targetm.calls.function_arg_padding (passed_mode, type); |
4157 boundary = targetm.calls.function_arg_boundary (passed_mode, type); | 3986 boundary = targetm.calls.function_arg_boundary (passed_mode, type); |
4158 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode, | 3987 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode, |
4159 type); | 3988 type); |
4160 locate->where_pad = where_pad; | 3989 locate->where_pad = where_pad; |
4269 static void | 4098 static void |
4270 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, | 4099 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, |
4271 struct args_size *alignment_pad) | 4100 struct args_size *alignment_pad) |
4272 { | 4101 { |
4273 tree save_var = NULL_TREE; | 4102 tree save_var = NULL_TREE; |
4274 HOST_WIDE_INT save_constant = 0; | 4103 poly_int64 save_constant = 0; |
4275 int boundary_in_bytes = boundary / BITS_PER_UNIT; | 4104 int boundary_in_bytes = boundary / BITS_PER_UNIT; |
4276 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET; | 4105 poly_int64 sp_offset = STACK_POINTER_OFFSET; |
4277 | 4106 |
4278 #ifdef SPARC_STACK_BOUNDARY_HACK | 4107 #ifdef SPARC_STACK_BOUNDARY_HACK |
4279 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than | 4108 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than |
4280 the real alignment of %sp. However, when it does this, the | 4109 the real alignment of %sp. However, when it does this, the |
4281 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ | 4110 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ |
4292 alignment_pad->var = NULL_TREE; | 4121 alignment_pad->var = NULL_TREE; |
4293 alignment_pad->constant = 0; | 4122 alignment_pad->constant = 0; |
4294 | 4123 |
4295 if (boundary > BITS_PER_UNIT) | 4124 if (boundary > BITS_PER_UNIT) |
4296 { | 4125 { |
4297 if (offset_ptr->var) | 4126 int misalign; |
4127 if (offset_ptr->var | |
4128 || !known_misalignment (offset_ptr->constant + sp_offset, | |
4129 boundary_in_bytes, &misalign)) | |
4298 { | 4130 { |
4299 tree sp_offset_tree = ssize_int (sp_offset); | 4131 tree sp_offset_tree = ssize_int (sp_offset); |
4300 tree offset = size_binop (PLUS_EXPR, | 4132 tree offset = size_binop (PLUS_EXPR, |
4301 ARGS_SIZE_TREE (*offset_ptr), | 4133 ARGS_SIZE_TREE (*offset_ptr), |
4302 sp_offset_tree); | 4134 sp_offset_tree); |
4313 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, | 4145 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, |
4314 save_var); | 4146 save_var); |
4315 } | 4147 } |
4316 else | 4148 else |
4317 { | 4149 { |
4318 offset_ptr->constant = -sp_offset + | 4150 if (ARGS_GROW_DOWNWARD) |
4319 (ARGS_GROW_DOWNWARD | 4151 offset_ptr->constant -= misalign; |
4320 ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes) | 4152 else |
4321 : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)); | 4153 offset_ptr->constant += -misalign & (boundary_in_bytes - 1); |
4322 | 4154 |
4323 if (boundary > PARM_BOUNDARY) | 4155 if (boundary > PARM_BOUNDARY) |
4324 alignment_pad->constant = offset_ptr->constant - save_constant; | 4156 alignment_pad->constant = offset_ptr->constant - save_constant; |
4325 } | 4157 } |
4326 } | 4158 } |
4327 } | 4159 } |
4328 | 4160 |
4329 static void | 4161 static void |
4330 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree) | 4162 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree) |
4331 { | 4163 { |
4332 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT; | 4164 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT; |
4333 if (passed_mode != BLKmode) | 4165 int misalign; |
4334 offset_ptr->constant += -GET_MODE_SIZE (passed_mode) & (align - 1); | 4166 if (passed_mode != BLKmode |
4167 && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign)) | |
4168 offset_ptr->constant += -misalign & (align - 1); | |
4335 else | 4169 else |
4336 { | 4170 { |
4337 if (TREE_CODE (sizetree) != INTEGER_CST | 4171 if (TREE_CODE (sizetree) != INTEGER_CST |
4338 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0) | 4172 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0) |
4339 { | 4173 { |
4718 { | 4552 { |
4719 int i; | 4553 int i; |
4720 int n_blocks; | 4554 int n_blocks; |
4721 tree *block_vector; | 4555 tree *block_vector; |
4722 | 4556 |
4723 /* For SDB and XCOFF debugging output, we start numbering the blocks | 4557 /* For XCOFF debugging output, we start numbering the blocks |
4724 from 1 within each function, rather than keeping a running | 4558 from 1 within each function, rather than keeping a running |
4725 count. */ | 4559 count. */ |
4726 #if SDB_DEBUGGING_INFO || defined (XCOFF_DEBUGGING_INFO) | 4560 #if defined (XCOFF_DEBUGGING_INFO) |
4727 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG) | 4561 if (write_symbols == XCOFF_DEBUG) |
4728 next_block_index = 1; | 4562 next_block_index = 1; |
4729 #endif | 4563 #endif |
4730 | 4564 |
4731 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks); | 4565 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks); |
4732 | 4566 |
4790 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts)); | 4624 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts)); |
4791 } | 4625 } |
4792 | 4626 |
4793 targetm.set_current_function (fndecl); | 4627 targetm.set_current_function (fndecl); |
4794 this_fn_optabs = this_target_optabs; | 4628 this_fn_optabs = this_target_optabs; |
4629 | |
4630 /* Initialize global alignment variables after op. */ | |
4631 parse_alignment_opts (); | |
4795 | 4632 |
4796 if (opts != optimization_default_node) | 4633 if (opts != optimization_default_node) |
4797 { | 4634 { |
4798 init_tree_optimization_optabs (opts); | 4635 init_tree_optimization_optabs (opts); |
4799 if (TREE_OPTIMIZATION_OPTABS (opts)) | 4636 if (TREE_OPTIMIZATION_OPTABS (opts)) |
4940 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions; | 4777 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions; |
4941 | 4778 |
4942 if (!profile_flag && !flag_instrument_function_entry_exit) | 4779 if (!profile_flag && !flag_instrument_function_entry_exit) |
4943 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1; | 4780 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1; |
4944 } | 4781 } |
4782 | |
4783 /* Don't enable begin stmt markers if var-tracking at assignments is | |
4784 disabled. The markers make little sense without the variable | |
4785 binding annotations among them. */ | |
4786 cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt | |
4787 && MAY_HAVE_DEBUG_MARKER_STMTS; | |
4945 } | 4788 } |
4946 | 4789 |
4947 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL | 4790 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL |
4948 instead of just setting it. */ | 4791 instead of just setting it. */ |
4949 | 4792 |
5199 } | 5042 } |
5200 | 5043 |
5201 /* Set DECL_REGISTER flag so that expand_function_end will copy the | 5044 /* Set DECL_REGISTER flag so that expand_function_end will copy the |
5202 result to the real return register(s). */ | 5045 result to the real return register(s). */ |
5203 DECL_REGISTER (res) = 1; | 5046 DECL_REGISTER (res) = 1; |
5204 | |
5205 if (chkp_function_instrumented_p (current_function_decl)) | |
5206 { | |
5207 tree return_type = TREE_TYPE (res); | |
5208 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type, | |
5209 subr, 1); | |
5210 SET_DECL_BOUNDS_RTL (res, bounds); | |
5211 } | |
5212 } | 5047 } |
5213 | 5048 |
5214 /* Initialize rtx for parameters and local variables. | 5049 /* Initialize rtx for parameters and local variables. |
5215 In some cases this requires emitting insns. */ | 5050 In some cases this requires emitting insns. */ |
5216 assign_parms (subr); | 5051 assign_parms (subr); |
5257 DECL_HAS_VALUE_EXPR_P (parm) = 1; | 5092 DECL_HAS_VALUE_EXPR_P (parm) = 1; |
5258 } | 5093 } |
5259 } | 5094 } |
5260 | 5095 |
5261 /* The following was moved from init_function_start. | 5096 /* The following was moved from init_function_start. |
5262 The move is supposed to make sdb output more accurate. */ | 5097 The move was supposed to make sdb output more accurate. */ |
5263 /* Indicate the beginning of the function body, | 5098 /* Indicate the beginning of the function body, |
5264 as opposed to parm setup. */ | 5099 as opposed to parm setup. */ |
5265 emit_note (NOTE_INSN_FUNCTION_BEG); | 5100 emit_note (NOTE_INSN_FUNCTION_BEG); |
5266 | 5101 |
5267 gcc_assert (NOTE_P (get_last_insn ())); | 5102 gcc_assert (NOTE_P (get_last_insn ())); |
5354 the current function. */ | 5189 the current function. */ |
5355 | 5190 |
5356 void | 5191 void |
5357 diddle_return_value (void (*doit) (rtx, void *), void *arg) | 5192 diddle_return_value (void (*doit) (rtx, void *), void *arg) |
5358 { | 5193 { |
5359 diddle_return_value_1 (doit, arg, crtl->return_bnd); | |
5360 diddle_return_value_1 (doit, arg, crtl->return_rtx); | 5194 diddle_return_value_1 (doit, arg, crtl->return_rtx); |
5361 } | 5195 } |
5362 | 5196 |
5363 static void | 5197 static void |
5364 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) | 5198 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) |
5448 | 5282 |
5449 clear_pending_stack_adjust (); | 5283 clear_pending_stack_adjust (); |
5450 do_pending_stack_adjust (); | 5284 do_pending_stack_adjust (); |
5451 | 5285 |
5452 /* Output a linenumber for the end of the function. | 5286 /* Output a linenumber for the end of the function. |
5453 SDB depends on this. */ | 5287 SDB depended on this. */ |
5454 set_curr_insn_location (input_location); | 5288 set_curr_insn_location (input_location); |
5455 | 5289 |
5456 /* Before the return label (if any), clobber the return | 5290 /* Before the return label (if any), clobber the return |
5457 registers so that they are not propagated live to the rest of | 5291 registers so that they are not propagated live to the rest of |
5458 the function. This can only happen with functions that drop | 5292 the function. This can only happen with functions that drop |
6623 output = SET_DEST (p_sets[match]); | 6457 output = SET_DEST (p_sets[match]); |
6624 input = RTVEC_ELT (inputs, i); | 6458 input = RTVEC_ELT (inputs, i); |
6625 /* Only do the transformation for pseudos. */ | 6459 /* Only do the transformation for pseudos. */ |
6626 if (! REG_P (output) | 6460 if (! REG_P (output) |
6627 || rtx_equal_p (output, input) | 6461 || rtx_equal_p (output, input) |
6628 || (GET_MODE (input) != VOIDmode | 6462 || !(REG_P (input) || SUBREG_P (input) |
6629 && GET_MODE (input) != GET_MODE (output))) | 6463 || MEM_P (input) || CONSTANT_P (input)) |
6464 || !general_operand (input, GET_MODE (output))) | |
6630 continue; | 6465 continue; |
6631 | 6466 |
6632 /* We can't do anything if the output is also used as input, | 6467 /* We can't do anything if the output is also used as input, |
6633 as we're going to overwrite it. */ | 6468 as we're going to overwrite it. */ |
6634 for (j = 0; j < ninputs; j++) | 6469 for (j = 0; j < ninputs; j++) |