comparison gcc/function.c @ 145:1830386684a0

gcc-9.2.0
author anatofuz
date Thu, 13 Feb 2020 11:34:05 +0900
parents 84e7813d76e9
children 351920fa3827
comparison
equal deleted inserted replaced
131:84e7813d76e9 145:1830386684a0
1 /* Expands front end tree to back end RTL for GCC. 1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc. 2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
77 #include "tree-ssa.h" 77 #include "tree-ssa.h"
78 #include "stringpool.h" 78 #include "stringpool.h"
79 #include "attribs.h" 79 #include "attribs.h"
80 #include "gimple.h" 80 #include "gimple.h"
81 #include "options.h" 81 #include "options.h"
82 #include "function-abi.h"
82 83
83 /* So we can assign to cfun in this file. */ 84 /* So we can assign to cfun in this file. */
84 #undef cfun 85 #undef cfun
85 86
86 #ifndef STACK_ALIGNMENT_NEEDED 87 #ifndef STACK_ALIGNMENT_NEEDED
131 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL; 132 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
132 vec<tree, va_gc> *types_used_by_cur_var_decl; 133 vec<tree, va_gc> *types_used_by_cur_var_decl;
133 134
134 /* Forward declarations. */ 135 /* Forward declarations. */
135 136
136 static struct temp_slot *find_temp_slot_from_address (rtx); 137 static class temp_slot *find_temp_slot_from_address (rtx);
137 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *); 138 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
138 static void pad_below (struct args_size *, machine_mode, tree); 139 static void pad_below (struct args_size *, machine_mode, tree);
139 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *); 140 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
140 static int all_blocks (tree, tree *); 141 static int all_blocks (tree, tree *);
141 static tree *get_block_vector (tree, int *); 142 static tree *get_block_vector (tree, int *);
343 function's frame_space_list. */ 344 function's frame_space_list. */
344 345
345 static void 346 static void
346 add_frame_space (poly_int64 start, poly_int64 end) 347 add_frame_space (poly_int64 start, poly_int64 end)
347 { 348 {
348 struct frame_space *space = ggc_alloc<frame_space> (); 349 class frame_space *space = ggc_alloc<frame_space> ();
349 space->next = crtl->frame_space_list; 350 space->next = crtl->frame_space_list;
350 crtl->frame_space_list = space; 351 crtl->frame_space_list = space;
351 space->start = start; 352 space->start = start;
352 space->length = end - start; 353 space->length = end - start;
353 } 354 }
397 398
398 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */ 399 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
399 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT) 400 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
400 { 401 {
401 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT; 402 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
402 alignment = alignment_in_bits / BITS_PER_UNIT; 403 alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
403 } 404 }
404 405
405 if (SUPPORTS_STACK_ALIGNMENT) 406 if (SUPPORTS_STACK_ALIGNMENT)
406 { 407 {
407 if (crtl->stack_alignment_estimated < alignment_in_bits) 408 if (crtl->stack_alignment_estimated < alignment_in_bits)
439 440
440 if (mode != BLKmode || maybe_ne (size, 0)) 441 if (mode != BLKmode || maybe_ne (size, 0))
441 { 442 {
442 if (kind & ASLK_RECORD_PAD) 443 if (kind & ASLK_RECORD_PAD)
443 { 444 {
444 struct frame_space **psp; 445 class frame_space **psp;
445 446
446 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next) 447 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
447 { 448 {
448 struct frame_space *space = *psp; 449 class frame_space *space = *psp;
449 if (!try_fit_stack_local (space->start, space->length, size, 450 if (!try_fit_stack_local (space->start, space->length, size,
450 alignment, &slot_offset)) 451 alignment, &slot_offset))
451 continue; 452 continue;
452 *psp = space->next; 453 *psp = space->next;
453 if (known_gt (slot_offset, space->start)) 454 if (known_gt (slot_offset, space->start))
554 result could be in a temporary, we preserve it if we can determine which 555 result could be in a temporary, we preserve it if we can determine which
555 one it is in. If we cannot determine which temporary may contain the 556 one it is in. If we cannot determine which temporary may contain the
556 result, all temporaries are preserved. A temporary is preserved by 557 result, all temporaries are preserved. A temporary is preserved by
557 pretending it was allocated at the previous nesting level. */ 558 pretending it was allocated at the previous nesting level. */
558 559
559 struct GTY(()) temp_slot { 560 class GTY(()) temp_slot {
561 public:
560 /* Points to next temporary slot. */ 562 /* Points to next temporary slot. */
561 struct temp_slot *next; 563 class temp_slot *next;
562 /* Points to previous temporary slot. */ 564 /* Points to previous temporary slot. */
563 struct temp_slot *prev; 565 class temp_slot *prev;
564 /* The rtx to used to reference the slot. */ 566 /* The rtx to used to reference the slot. */
565 rtx slot; 567 rtx slot;
566 /* The size, in units, of the slot. */ 568 /* The size, in units, of the slot. */
567 poly_int64 size; 569 poly_int64 size;
568 /* The type of the object in the slot, or zero if it doesn't correspond 570 /* The type of the object in the slot, or zero if it doesn't correspond
586 588
587 /* Entry for the below hash table. */ 589 /* Entry for the below hash table. */
588 struct GTY((for_user)) temp_slot_address_entry { 590 struct GTY((for_user)) temp_slot_address_entry {
589 hashval_t hash; 591 hashval_t hash;
590 rtx address; 592 rtx address;
591 struct temp_slot *temp_slot; 593 class temp_slot *temp_slot;
592 }; 594 };
593 595
594 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry> 596 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
595 { 597 {
596 static hashval_t hash (temp_slot_address_entry *); 598 static hashval_t hash (temp_slot_address_entry *);
603 static size_t n_temp_slots_in_use; 605 static size_t n_temp_slots_in_use;
604 606
605 /* Removes temporary slot TEMP from LIST. */ 607 /* Removes temporary slot TEMP from LIST. */
606 608
607 static void 609 static void
608 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list) 610 cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
609 { 611 {
610 if (temp->next) 612 if (temp->next)
611 temp->next->prev = temp->prev; 613 temp->next->prev = temp->prev;
612 if (temp->prev) 614 if (temp->prev)
613 temp->prev->next = temp->next; 615 temp->prev->next = temp->next;
618 } 620 }
619 621
620 /* Inserts temporary slot TEMP to LIST. */ 622 /* Inserts temporary slot TEMP to LIST. */
621 623
622 static void 624 static void
623 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list) 625 insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
624 { 626 {
625 temp->next = *list; 627 temp->next = *list;
626 if (*list) 628 if (*list)
627 (*list)->prev = temp; 629 (*list)->prev = temp;
628 temp->prev = NULL; 630 temp->prev = NULL;
629 *list = temp; 631 *list = temp;
630 } 632 }
631 633
632 /* Returns the list of used temp slots at LEVEL. */ 634 /* Returns the list of used temp slots at LEVEL. */
633 635
634 static struct temp_slot ** 636 static class temp_slot **
635 temp_slots_at_level (int level) 637 temp_slots_at_level (int level)
636 { 638 {
637 if (level >= (int) vec_safe_length (used_temp_slots)) 639 if (level >= (int) vec_safe_length (used_temp_slots))
638 vec_safe_grow_cleared (used_temp_slots, level + 1); 640 vec_safe_grow_cleared (used_temp_slots, level + 1);
639 641
652 } 654 }
653 655
654 /* Moves temporary slot TEMP to LEVEL. */ 656 /* Moves temporary slot TEMP to LEVEL. */
655 657
656 static void 658 static void
657 move_slot_to_level (struct temp_slot *temp, int level) 659 move_slot_to_level (class temp_slot *temp, int level)
658 { 660 {
659 cut_slot_from_list (temp, temp_slots_at_level (temp->level)); 661 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
660 insert_slot_to_list (temp, temp_slots_at_level (level)); 662 insert_slot_to_list (temp, temp_slots_at_level (level));
661 temp->level = level; 663 temp->level = level;
662 } 664 }
663 665
664 /* Make temporary slot TEMP available. */ 666 /* Make temporary slot TEMP available. */
665 667
666 static void 668 static void
667 make_slot_available (struct temp_slot *temp) 669 make_slot_available (class temp_slot *temp)
668 { 670 {
669 cut_slot_from_list (temp, temp_slots_at_level (temp->level)); 671 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
670 insert_slot_to_list (temp, &avail_temp_slots); 672 insert_slot_to_list (temp, &avail_temp_slots);
671 temp->in_use = 0; 673 temp->in_use = 0;
672 temp->level = -1; 674 temp->level = -1;
698 return exp_equiv_p (t1->address, t2->address, 0, true); 700 return exp_equiv_p (t1->address, t2->address, 0, true);
699 } 701 }
700 702
701 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */ 703 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
702 static void 704 static void
703 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot) 705 insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
704 { 706 {
705 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> (); 707 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
706 t->address = address; 708 t->address = copy_rtx (address);
707 t->temp_slot = temp_slot; 709 t->temp_slot = temp_slot;
708 t->hash = temp_slot_address_compute_hash (t); 710 t->hash = temp_slot_address_compute_hash (t);
709 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t; 711 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
710 } 712 }
711 713
732 temp_slot_address_table->empty (); 734 temp_slot_address_table->empty ();
733 } 735 }
734 736
735 /* Find the temp slot corresponding to the object at address X. */ 737 /* Find the temp slot corresponding to the object at address X. */
736 738
737 static struct temp_slot * 739 static class temp_slot *
738 find_temp_slot_from_address (rtx x) 740 find_temp_slot_from_address (rtx x)
739 { 741 {
740 struct temp_slot *p; 742 class temp_slot *p;
741 struct temp_slot_address_entry tmp, *t; 743 struct temp_slot_address_entry tmp, *t;
742 744
743 /* First try the easy way: 745 /* First try the easy way:
744 See if X exists in the address -> temp slot mapping. */ 746 See if X exists in the address -> temp slot mapping. */
745 tmp.address = x; 747 tmp.address = x;
784 786
785 rtx 787 rtx
786 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type) 788 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
787 { 789 {
788 unsigned int align; 790 unsigned int align;
789 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp; 791 class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
790 rtx slot; 792 rtx slot;
791 793
792 gcc_assert (known_size_p (size)); 794 gcc_assert (known_size_p (size));
793 795
794 align = get_stack_local_alignment (type, mode); 796 align = get_stack_local_alignment (type, mode);
1028 problems in this case. */ 1030 problems in this case. */
1029 1031
1030 static void 1032 static void
1031 combine_temp_slots (void) 1033 combine_temp_slots (void)
1032 { 1034 {
1033 struct temp_slot *p, *q, *next, *next_q; 1035 class temp_slot *p, *q, *next, *next_q;
1034 int num_slots; 1036 int num_slots;
1035 1037
1036 /* We can't combine slots, because the information about which slot 1038 /* We can't combine slots, because the information about which slot
1037 is in which alias set will be lost. */ 1039 is in which alias set will be lost. */
1038 if (flag_strict_aliasing) 1040 if (flag_strict_aliasing)
1092 slot that previously was known by OLD_RTX. */ 1094 slot that previously was known by OLD_RTX. */
1093 1095
1094 void 1096 void
1095 update_temp_slot_address (rtx old_rtx, rtx new_rtx) 1097 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1096 { 1098 {
1097 struct temp_slot *p; 1099 class temp_slot *p;
1098 1100
1099 if (rtx_equal_p (old_rtx, new_rtx)) 1101 if (rtx_equal_p (old_rtx, new_rtx))
1100 return; 1102 return;
1101 1103
1102 p = find_temp_slot_from_address (old_rtx); 1104 p = find_temp_slot_from_address (old_rtx);
1146 returns a value in memory. */ 1148 returns a value in memory. */
1147 1149
1148 void 1150 void
1149 preserve_temp_slots (rtx x) 1151 preserve_temp_slots (rtx x)
1150 { 1152 {
1151 struct temp_slot *p = 0, *next; 1153 class temp_slot *p = 0, *next;
1152 1154
1153 if (x == 0) 1155 if (x == 0)
1154 return; 1156 return;
1155 1157
1156 /* If X is a register that is being used as a pointer, see if we have 1158 /* If X is a register that is being used as a pointer, see if we have
1186 end of generating code for a statement. */ 1188 end of generating code for a statement. */
1187 1189
1188 void 1190 void
1189 free_temp_slots (void) 1191 free_temp_slots (void)
1190 { 1192 {
1191 struct temp_slot *p, *next; 1193 class temp_slot *p, *next;
1192 bool some_available = false; 1194 bool some_available = false;
1193 1195
1194 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1196 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1195 { 1197 {
1196 next = p->next; 1198 next = p->next;
2116 /* If we have something other than a REG (e.g. a PARALLEL), then assume 2118 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2117 it is OK. */ 2119 it is OK. */
2118 if (!REG_P (reg)) 2120 if (!REG_P (reg))
2119 return 0; 2121 return 0;
2120 2122
2123 /* Use the default ABI if the type of the function isn't known.
2124 The scheme for handling interoperability between different ABIs
2125 requires us to be able to tell when we're calling a function with
2126 a nondefault ABI. */
2127 const predefined_function_abi &abi = (fntype
2128 ? fntype_abi (fntype)
2129 : default_function_abi);
2121 regno = REGNO (reg); 2130 regno = REGNO (reg);
2122 nregs = hard_regno_nregs (regno, TYPE_MODE (type)); 2131 nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2123 for (i = 0; i < nregs; i++) 2132 for (i = 0; i < nregs; i++)
2124 if (! call_used_regs[regno + i]) 2133 if (!fixed_regs[regno + i] && !abi.clobbers_full_reg_p (regno + i))
2125 return 1; 2134 return 1;
2126 2135
2127 return 0; 2136 return 0;
2128 } 2137 }
2129 2138
2261 }; 2270 };
2262 2271
2263 struct assign_parm_data_one 2272 struct assign_parm_data_one
2264 { 2273 {
2265 tree nominal_type; 2274 tree nominal_type;
2266 tree passed_type; 2275 function_arg_info arg;
2267 rtx entry_parm; 2276 rtx entry_parm;
2268 rtx stack_parm; 2277 rtx stack_parm;
2269 machine_mode nominal_mode; 2278 machine_mode nominal_mode;
2270 machine_mode passed_mode; 2279 machine_mode passed_mode;
2271 machine_mode promoted_mode;
2272 struct locate_and_pad_arg_data locate; 2280 struct locate_and_pad_arg_data locate;
2273 int partial; 2281 int partial;
2274 BOOL_BITFIELD named_arg : 1;
2275 BOOL_BITFIELD passed_pointer : 1;
2276 BOOL_BITFIELD on_stack : 1;
2277 BOOL_BITFIELD loaded_in_reg : 1;
2278 }; 2282 };
2279 2283
2280 /* A subroutine of assign_parms. Initialize ALL. */ 2284 /* A subroutine of assign_parms. Initialize ALL. */
2281 2285
2282 static void 2286 static void
2406 2410
2407 static void 2411 static void
2408 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, 2412 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2409 struct assign_parm_data_one *data) 2413 struct assign_parm_data_one *data)
2410 { 2414 {
2411 tree nominal_type, passed_type;
2412 machine_mode nominal_mode, passed_mode, promoted_mode;
2413 int unsignedp; 2415 int unsignedp;
2414 2416
2415 memset (data, 0, sizeof (*data)); 2417 *data = assign_parm_data_one ();
2416 2418
2417 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */ 2419 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2418 if (!cfun->stdarg) 2420 if (!cfun->stdarg)
2419 data->named_arg = 1; /* No variadic parms. */ 2421 data->arg.named = 1; /* No variadic parms. */
2420 else if (DECL_CHAIN (parm)) 2422 else if (DECL_CHAIN (parm))
2421 data->named_arg = 1; /* Not the last non-variadic parm. */ 2423 data->arg.named = 1; /* Not the last non-variadic parm. */
2422 else if (targetm.calls.strict_argument_naming (all->args_so_far)) 2424 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2423 data->named_arg = 1; /* Only variadic ones are unnamed. */ 2425 data->arg.named = 1; /* Only variadic ones are unnamed. */
2424 else 2426 else
2425 data->named_arg = 0; /* Treat as variadic. */ 2427 data->arg.named = 0; /* Treat as variadic. */
2426 2428
2427 nominal_type = TREE_TYPE (parm); 2429 data->nominal_type = TREE_TYPE (parm);
2428 passed_type = DECL_ARG_TYPE (parm); 2430 data->arg.type = DECL_ARG_TYPE (parm);
2429 2431
2430 /* Look out for errors propagating this far. Also, if the parameter's 2432 /* Look out for errors propagating this far. Also, if the parameter's
2431 type is void then its value doesn't matter. */ 2433 type is void then its value doesn't matter. */
2432 if (TREE_TYPE (parm) == error_mark_node 2434 if (TREE_TYPE (parm) == error_mark_node
2433 /* This can happen after weird syntax errors 2435 /* This can happen after weird syntax errors
2434 or if an enum type is defined among the parms. */ 2436 or if an enum type is defined among the parms. */
2435 || TREE_CODE (parm) != PARM_DECL 2437 || TREE_CODE (parm) != PARM_DECL
2436 || passed_type == NULL 2438 || data->arg.type == NULL
2437 || VOID_TYPE_P (nominal_type)) 2439 || VOID_TYPE_P (data->nominal_type))
2438 { 2440 {
2439 nominal_type = passed_type = void_type_node; 2441 data->nominal_type = data->arg.type = void_type_node;
2440 nominal_mode = passed_mode = promoted_mode = VOIDmode; 2442 data->nominal_mode = data->passed_mode = data->arg.mode = VOIDmode;
2441 goto egress; 2443 return;
2442 } 2444 }
2443 2445
2444 /* Find mode of arg as it is passed, and mode of arg as it should be 2446 /* Find mode of arg as it is passed, and mode of arg as it should be
2445 during execution of this function. */ 2447 during execution of this function. */
2446 passed_mode = TYPE_MODE (passed_type); 2448 data->passed_mode = data->arg.mode = TYPE_MODE (data->arg.type);
2447 nominal_mode = TYPE_MODE (nominal_type); 2449 data->nominal_mode = TYPE_MODE (data->nominal_type);
2448 2450
2449 /* If the parm is to be passed as a transparent union or record, use the 2451 /* If the parm is to be passed as a transparent union or record, use the
2450 type of the first field for the tests below. We have already verified 2452 type of the first field for the tests below. We have already verified
2451 that the modes are the same. */ 2453 that the modes are the same. */
2452 if ((TREE_CODE (passed_type) == UNION_TYPE 2454 if (RECORD_OR_UNION_TYPE_P (data->arg.type)
2453 || TREE_CODE (passed_type) == RECORD_TYPE) 2455 && TYPE_TRANSPARENT_AGGR (data->arg.type))
2454 && TYPE_TRANSPARENT_AGGR (passed_type)) 2456 data->arg.type = TREE_TYPE (first_field (data->arg.type));
2455 passed_type = TREE_TYPE (first_field (passed_type));
2456 2457
2457 /* See if this arg was passed by invisible reference. */ 2458 /* See if this arg was passed by invisible reference. */
2458 if (pass_by_reference (&all->args_so_far_v, passed_mode, 2459 if (apply_pass_by_reference_rules (&all->args_so_far_v, data->arg))
2459 passed_type, data->named_arg)) 2460 {
2460 { 2461 data->nominal_type = data->arg.type;
2461 passed_type = nominal_type = build_pointer_type (passed_type); 2462 data->passed_mode = data->nominal_mode = data->arg.mode;
2462 data->passed_pointer = true;
2463 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2464 } 2463 }
2465 2464
2466 /* Find mode as it is passed by the ABI. */ 2465 /* Find mode as it is passed by the ABI. */
2467 unsignedp = TYPE_UNSIGNED (passed_type); 2466 unsignedp = TYPE_UNSIGNED (data->arg.type);
2468 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp, 2467 data->arg.mode
2469 TREE_TYPE (current_function_decl), 0); 2468 = promote_function_mode (data->arg.type, data->arg.mode, &unsignedp,
2470 2469 TREE_TYPE (current_function_decl), 0);
2471 egress:
2472 data->nominal_type = nominal_type;
2473 data->passed_type = passed_type;
2474 data->nominal_mode = nominal_mode;
2475 data->passed_mode = passed_mode;
2476 data->promoted_mode = promoted_mode;
2477 } 2470 }
2478 2471
2479 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */ 2472 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2480 2473
2481 static void 2474 static void
2482 assign_parms_setup_varargs (struct assign_parm_data_all *all, 2475 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2483 struct assign_parm_data_one *data, bool no_rtl) 2476 struct assign_parm_data_one *data, bool no_rtl)
2484 { 2477 {
2485 int varargs_pretend_bytes = 0; 2478 int varargs_pretend_bytes = 0;
2486 2479
2487 targetm.calls.setup_incoming_varargs (all->args_so_far, 2480 function_arg_info last_named_arg = data->arg;
2488 data->promoted_mode, 2481 last_named_arg.named = true;
2489 data->passed_type, 2482 targetm.calls.setup_incoming_varargs (all->args_so_far, last_named_arg,
2490 &varargs_pretend_bytes, no_rtl); 2483 &varargs_pretend_bytes, no_rtl);
2491 2484
2492 /* If the back-end has requested extra stack space, record how much is 2485 /* If the back-end has requested extra stack space, record how much is
2493 needed. Do not change pretend_args_size otherwise since it may be 2486 needed. Do not change pretend_args_size otherwise since it may be
2494 nonzero from an earlier partial argument. */ 2487 nonzero from an earlier partial argument. */
2505 { 2498 {
2506 HOST_WIDE_INT pretend_bytes = 0; 2499 HOST_WIDE_INT pretend_bytes = 0;
2507 rtx entry_parm; 2500 rtx entry_parm;
2508 bool in_regs; 2501 bool in_regs;
2509 2502
2510 if (data->promoted_mode == VOIDmode) 2503 if (data->arg.mode == VOIDmode)
2511 { 2504 {
2512 data->entry_parm = data->stack_parm = const0_rtx; 2505 data->entry_parm = data->stack_parm = const0_rtx;
2513 return; 2506 return;
2514 } 2507 }
2515 2508
2516 targetm.calls.warn_parameter_passing_abi (all->args_so_far, 2509 targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2517 data->passed_type); 2510 data->arg.type);
2518 2511
2519 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far, 2512 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2520 data->promoted_mode, 2513 data->arg);
2521 data->passed_type,
2522 data->named_arg);
2523
2524 if (entry_parm == 0) 2514 if (entry_parm == 0)
2525 data->promoted_mode = data->passed_mode; 2515 data->arg.mode = data->passed_mode;
2526 2516
2527 /* Determine parm's home in the stack, in case it arrives in the stack 2517 /* Determine parm's home in the stack, in case it arrives in the stack
2528 or we should pretend it did. Compute the stack position and rtx where 2518 or we should pretend it did. Compute the stack position and rtx where
2529 the argument arrives and its size. 2519 the argument arrives and its size.
2530 2520
2536 as it was the previous time. */ 2526 as it was the previous time. */
2537 in_regs = (entry_parm != 0); 2527 in_regs = (entry_parm != 0);
2538 #ifdef STACK_PARMS_IN_REG_PARM_AREA 2528 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2539 in_regs = true; 2529 in_regs = true;
2540 #endif 2530 #endif
2541 if (!in_regs && !data->named_arg) 2531 if (!in_regs && !data->arg.named)
2542 { 2532 {
2543 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far)) 2533 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2544 { 2534 {
2545 rtx tem; 2535 rtx tem;
2536 function_arg_info named_arg = data->arg;
2537 named_arg.named = true;
2546 tem = targetm.calls.function_incoming_arg (all->args_so_far, 2538 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2547 data->promoted_mode, 2539 named_arg);
2548 data->passed_type, true);
2549 in_regs = tem != NULL; 2540 in_regs = tem != NULL;
2550 } 2541 }
2551 } 2542 }
2552 2543
2553 /* If this parameter was passed both in registers and in the stack, use 2544 /* If this parameter was passed both in registers and in the stack, use
2554 the copy on the stack. */ 2545 the copy on the stack. */
2555 if (targetm.calls.must_pass_in_stack (data->promoted_mode, 2546 if (targetm.calls.must_pass_in_stack (data->arg))
2556 data->passed_type))
2557 entry_parm = 0; 2547 entry_parm = 0;
2558 2548
2559 if (entry_parm) 2549 if (entry_parm)
2560 { 2550 {
2561 int partial; 2551 int partial;
2562 2552
2563 partial = targetm.calls.arg_partial_bytes (all->args_so_far, 2553 partial = targetm.calls.arg_partial_bytes (all->args_so_far, data->arg);
2564 data->promoted_mode,
2565 data->passed_type,
2566 data->named_arg);
2567 data->partial = partial; 2554 data->partial = partial;
2568 2555
2569 /* The caller might already have allocated stack space for the 2556 /* The caller might already have allocated stack space for the
2570 register parameters. */ 2557 register parameters. */
2571 if (partial != 0 && all->reg_parm_stack_space == 0) 2558 if (partial != 0 && all->reg_parm_stack_space == 0)
2596 don't include this in the stack size until later. */ 2583 don't include this in the stack size until later. */
2597 all->extra_pretend_bytes = all->pretend_args_size; 2584 all->extra_pretend_bytes = all->pretend_args_size;
2598 } 2585 }
2599 } 2586 }
2600 2587
2601 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs, 2588 locate_and_pad_parm (data->arg.mode, data->arg.type, in_regs,
2602 all->reg_parm_stack_space, 2589 all->reg_parm_stack_space,
2603 entry_parm ? data->partial : 0, current_function_decl, 2590 entry_parm ? data->partial : 0, current_function_decl,
2604 &all->stack_args_size, &data->locate); 2591 &all->stack_args_size, &data->locate);
2605 2592
2606 /* Update parm_stack_boundary if this parameter is passed in the 2593 /* Update parm_stack_boundary if this parameter is passed in the
2667 offset_rtx = ARGS_SIZE_RTX (data->locate.offset); 2654 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2668 2655
2669 stack_parm = crtl->args.internal_arg_pointer; 2656 stack_parm = crtl->args.internal_arg_pointer;
2670 if (offset_rtx != const0_rtx) 2657 if (offset_rtx != const0_rtx)
2671 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx); 2658 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2672 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm); 2659 stack_parm = gen_rtx_MEM (data->arg.mode, stack_parm);
2673 2660
2674 if (!data->passed_pointer) 2661 if (!data->arg.pass_by_reference)
2675 { 2662 {
2676 set_mem_attributes (stack_parm, parm, 1); 2663 set_mem_attributes (stack_parm, parm, 1);
2677 /* set_mem_attributes could set MEM_SIZE to the passed mode's size, 2664 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2678 while promoted mode's size is needed. */ 2665 while promoted mode's size is needed. */
2679 if (data->promoted_mode != BLKmode 2666 if (data->arg.mode != BLKmode
2680 && data->promoted_mode != DECL_MODE (parm)) 2667 && data->arg.mode != DECL_MODE (parm))
2681 { 2668 {
2682 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode)); 2669 set_mem_size (stack_parm, GET_MODE_SIZE (data->arg.mode));
2683 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm)) 2670 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2684 { 2671 {
2685 poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm), 2672 poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2686 data->promoted_mode); 2673 data->arg.mode);
2687 if (maybe_ne (offset, 0)) 2674 if (maybe_ne (offset, 0))
2688 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset); 2675 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2689 } 2676 }
2690 } 2677 }
2691 } 2678 }
2696 /* If we're padding upward, we know that the alignment of the slot 2683 /* If we're padding upward, we know that the alignment of the slot
2697 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're 2684 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2698 intentionally forcing upward padding. Otherwise we have to come 2685 intentionally forcing upward padding. Otherwise we have to come
2699 up with a guess at the alignment based on OFFSET_RTX. */ 2686 up with a guess at the alignment based on OFFSET_RTX. */
2700 poly_int64 offset; 2687 poly_int64 offset;
2701 if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm) 2688 if (data->locate.where_pad == PAD_NONE || data->entry_parm)
2702 align = boundary; 2689 align = boundary;
2690 else if (data->locate.where_pad == PAD_UPWARD)
2691 {
2692 align = boundary;
2693 /* If the argument offset is actually more aligned than the nominal
2694 stack slot boundary, take advantage of that excess alignment.
2695 Don't make any assumptions if STACK_POINTER_OFFSET is in use. */
2696 if (poly_int_rtx_p (offset_rtx, &offset)
2697 && known_eq (STACK_POINTER_OFFSET, 0))
2698 {
2699 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2700 if (offset_align == 0 || offset_align > STACK_BOUNDARY)
2701 offset_align = STACK_BOUNDARY;
2702 align = MAX (align, offset_align);
2703 }
2704 }
2703 else if (poly_int_rtx_p (offset_rtx, &offset)) 2705 else if (poly_int_rtx_p (offset_rtx, &offset))
2704 { 2706 {
2705 align = least_bit_hwi (boundary); 2707 align = least_bit_hwi (boundary);
2706 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT; 2708 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2707 if (offset_align != 0) 2709 if (offset_align != 0)
2733 { 2735 {
2734 /* Handle calls that pass values in multiple non-contiguous 2736 /* Handle calls that pass values in multiple non-contiguous
2735 locations. The Irix 6 ABI has examples of this. */ 2737 locations. The Irix 6 ABI has examples of this. */
2736 if (GET_CODE (entry_parm) == PARALLEL) 2738 if (GET_CODE (entry_parm) == PARALLEL)
2737 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm, 2739 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2738 data->passed_type, 2740 data->arg.type, int_size_in_bytes (data->arg.type));
2739 int_size_in_bytes (data->passed_type));
2740 else 2741 else
2741 { 2742 {
2742 gcc_assert (data->partial % UNITS_PER_WORD == 0); 2743 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2743 move_block_from_reg (REGNO (entry_parm), 2744 move_block_from_reg (REGNO (entry_parm),
2744 validize_mem (copy_rtx (stack_parm)), 2745 validize_mem (copy_rtx (stack_parm)),
2790 stack, even if we will store the reconstituted parameter on the 2791 stack, even if we will store the reconstituted parameter on the
2791 stack later. */ 2792 stack later. */
2792 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode) 2793 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2793 { 2794 {
2794 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm)); 2795 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2795 emit_group_store (parmreg, entry_parm, data->passed_type, 2796 emit_group_store (parmreg, entry_parm, data->arg.type,
2796 GET_MODE_SIZE (GET_MODE (entry_parm))); 2797 GET_MODE_SIZE (GET_MODE (entry_parm)));
2797 entry_parm = parmreg; 2798 entry_parm = parmreg;
2798 } 2799 }
2799 2800
2800 data->entry_parm = entry_parm; 2801 data->entry_parm = entry_parm;
2810 2811
2811 /* If we can't trust the parm stack slot to be aligned enough for its 2812 /* If we can't trust the parm stack slot to be aligned enough for its
2812 ultimate type, don't use that slot after entry. We'll make another 2813 ultimate type, don't use that slot after entry. We'll make another
2813 stack slot, if we need one. */ 2814 stack slot, if we need one. */
2814 if (stack_parm 2815 if (stack_parm
2815 && ((STRICT_ALIGNMENT 2816 && ((GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)
2816 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)) 2817 && ((optab_handler (movmisalign_optab, data->nominal_mode)
2818 != CODE_FOR_nothing)
2819 || targetm.slow_unaligned_access (data->nominal_mode,
2820 MEM_ALIGN (stack_parm))))
2817 || (data->nominal_type 2821 || (data->nominal_type
2818 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm) 2822 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2819 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY))) 2823 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2820 stack_parm = NULL; 2824 stack_parm = NULL;
2821 2825
2828 2832
2829 /* If stack protection is in effect for this function, don't leave any 2833 /* If stack protection is in effect for this function, don't leave any
2830 pointers in their passed stack slots. */ 2834 pointers in their passed stack slots. */
2831 else if (crtl->stack_protect_guard 2835 else if (crtl->stack_protect_guard
2832 && (flag_stack_protect == 2 2836 && (flag_stack_protect == 2
2833 || data->passed_pointer 2837 || data->arg.pass_by_reference
2834 || POINTER_TYPE_P (data->nominal_type))) 2838 || POINTER_TYPE_P (data->nominal_type)))
2835 stack_parm = NULL; 2839 stack_parm = NULL;
2836 2840
2837 data->stack_parm = stack_parm; 2841 data->stack_parm = stack_parm;
2838 } 2842 }
2850 2854
2851 #ifdef BLOCK_REG_PADDING 2855 #ifdef BLOCK_REG_PADDING
2852 /* Only assign_parm_setup_block knows how to deal with register arguments 2856 /* Only assign_parm_setup_block knows how to deal with register arguments
2853 that are padded at the least significant end. */ 2857 that are padded at the least significant end. */
2854 if (REG_P (data->entry_parm) 2858 if (REG_P (data->entry_parm)
2855 && known_lt (GET_MODE_SIZE (data->promoted_mode), UNITS_PER_WORD) 2859 && known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
2856 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1) 2860 && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
2857 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) 2861 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2858 return true; 2862 return true;
2859 #endif 2863 #endif
2860 2864
2861 return false; 2865 return false;
2906 } 2910 }
2907 } 2911 }
2908 data->stack_parm = NULL; 2912 data->stack_parm = NULL;
2909 } 2913 }
2910 2914
2911 size = int_size_in_bytes (data->passed_type); 2915 size = int_size_in_bytes (data->arg.type);
2912 size_stored = CEIL_ROUND (size, UNITS_PER_WORD); 2916 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2913 if (stack_parm == 0) 2917 if (stack_parm == 0)
2914 { 2918 {
2915 SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD)); 2919 HOST_WIDE_INT parm_align
2916 stack_parm = assign_stack_local (BLKmode, size_stored, 2920 = (STRICT_ALIGNMENT
2917 DECL_ALIGN (parm)); 2921 ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
2922
2923 SET_DECL_ALIGN (parm, parm_align);
2924 if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
2925 {
2926 rtx allocsize = gen_int_mode (size_stored, Pmode);
2927 get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
2928 stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
2929 MAX_SUPPORTED_STACK_ALIGNMENT);
2930 rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
2931 DECL_ALIGN (parm));
2932 mark_reg_pointer (addr, DECL_ALIGN (parm));
2933 stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
2934 MEM_NOTRAP_P (stack_parm) = 1;
2935 }
2936 else
2937 stack_parm = assign_stack_local (BLKmode, size_stored,
2938 DECL_ALIGN (parm));
2918 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size)) 2939 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2919 PUT_MODE (stack_parm, GET_MODE (entry_parm)); 2940 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2920 set_mem_attributes (stack_parm, parm, 1); 2941 set_mem_attributes (stack_parm, parm, 1);
2921 } 2942 }
2922 2943
2944 2965
2945 mem = validize_mem (copy_rtx (stack_parm)); 2966 mem = validize_mem (copy_rtx (stack_parm));
2946 2967
2947 /* Handle values in multiple non-contiguous locations. */ 2968 /* Handle values in multiple non-contiguous locations. */
2948 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem)) 2969 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2949 emit_group_store (mem, entry_parm, data->passed_type, size); 2970 emit_group_store (mem, entry_parm, data->arg.type, size);
2950 else if (GET_CODE (entry_parm) == PARALLEL) 2971 else if (GET_CODE (entry_parm) == PARALLEL)
2951 { 2972 {
2952 push_to_sequence2 (all->first_conversion_insn, 2973 push_to_sequence2 (all->first_conversion_insn,
2953 all->last_conversion_insn); 2974 all->last_conversion_insn);
2954 emit_group_store (mem, entry_parm, data->passed_type, size); 2975 emit_group_store (mem, entry_parm, data->arg.type, size);
2955 all->first_conversion_insn = get_insns (); 2976 all->first_conversion_insn = get_insns ();
2956 all->last_conversion_insn = get_last_insn (); 2977 all->last_conversion_insn = get_last_insn ();
2957 end_sequence (); 2978 end_sequence ();
2958 in_conversion_seq = true; 2979 in_conversion_seq = true;
2959 } 2980 }
2969 machine_mode mode = int_mode_for_size (bits, 0).else_blk (); 2990 machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
2970 2991
2971 if (mode != BLKmode 2992 if (mode != BLKmode
2972 #ifdef BLOCK_REG_PADDING 2993 #ifdef BLOCK_REG_PADDING
2973 && (size == UNITS_PER_WORD 2994 && (size == UNITS_PER_WORD
2974 || (BLOCK_REG_PADDING (mode, data->passed_type, 1) 2995 || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
2975 != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) 2996 != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2976 #endif 2997 #endif
2977 ) 2998 )
2978 { 2999 {
2979 rtx reg; 3000 rtx reg;
3010 padding of smaller-than-word arguments on a machine 3031 padding of smaller-than-word arguments on a machine
3011 with little-endian bytes, which would likely require 3032 with little-endian bytes, which would likely require
3012 additional changes to work correctly. */ 3033 additional changes to work correctly. */
3013 gcc_checking_assert (BYTES_BIG_ENDIAN 3034 gcc_checking_assert (BYTES_BIG_ENDIAN
3014 && (BLOCK_REG_PADDING (mode, 3035 && (BLOCK_REG_PADDING (mode,
3015 data->passed_type, 1) 3036 data->arg.type, 1)
3016 == PAD_UPWARD)); 3037 == PAD_UPWARD));
3017 3038
3018 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT; 3039 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3019 3040
3020 x = gen_rtx_REG (word_mode, REGNO (entry_parm)); 3041 x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3031 machine must be aligned to the left before storing 3052 machine must be aligned to the left before storing
3032 to memory. Note that the previous test doesn't 3053 to memory. Note that the previous test doesn't
3033 handle all cases (e.g. SIZE == 3). */ 3054 handle all cases (e.g. SIZE == 3). */
3034 else if (size != UNITS_PER_WORD 3055 else if (size != UNITS_PER_WORD
3035 #ifdef BLOCK_REG_PADDING 3056 #ifdef BLOCK_REG_PADDING
3036 && (BLOCK_REG_PADDING (mode, data->passed_type, 1) 3057 && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3037 == PAD_DOWNWARD) 3058 == PAD_DOWNWARD)
3038 #else 3059 #else
3039 && BYTES_BIG_ENDIAN 3060 && BYTES_BIG_ENDIAN
3040 #endif 3061 #endif
3041 ) 3062 )
3055 else if (!MEM_P (mem)) 3076 else if (!MEM_P (mem))
3056 { 3077 {
3057 gcc_checking_assert (size > UNITS_PER_WORD); 3078 gcc_checking_assert (size > UNITS_PER_WORD);
3058 #ifdef BLOCK_REG_PADDING 3079 #ifdef BLOCK_REG_PADDING
3059 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem), 3080 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3060 data->passed_type, 0) 3081 data->arg.type, 0)
3061 == PAD_UPWARD); 3082 == PAD_UPWARD);
3062 #endif 3083 #endif
3063 emit_move_insn (mem, entry_parm); 3084 emit_move_insn (mem, entry_parm);
3064 } 3085 }
3065 else 3086 else
3066 move_block_from_reg (REGNO (entry_parm), mem, 3087 move_block_from_reg (REGNO (entry_parm), mem,
3067 size_stored / UNITS_PER_WORD); 3088 size_stored / UNITS_PER_WORD);
3068 } 3089 }
3069 else if (data->stack_parm == 0) 3090 else if (data->stack_parm == 0 && !TYPE_EMPTY_P (data->arg.type))
3070 { 3091 {
3071 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); 3092 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3072 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size), 3093 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3073 BLOCK_OP_NORMAL); 3094 BLOCK_OP_NORMAL);
3074 all->first_conversion_insn = get_insns (); 3095 all->first_conversion_insn = get_insns ();
3108 rtx equiv_stack_parm; 3129 rtx equiv_stack_parm;
3109 machine_mode promoted_nominal_mode; 3130 machine_mode promoted_nominal_mode;
3110 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm)); 3131 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3111 bool did_conversion = false; 3132 bool did_conversion = false;
3112 bool need_conversion, moved; 3133 bool need_conversion, moved;
3134 enum insn_code icode;
3113 rtx rtl; 3135 rtx rtl;
3114 3136
3115 /* Store the parm in a pseudoregister during the function, but we may 3137 /* Store the parm in a pseudoregister during the function, but we may
3116 need to do it in a wider mode. Using 2 here makes the result 3138 need to do it in a wider mode. Using 2 here makes the result
3117 consistent with promote_decl_mode and thus expand_expr_real_1. */ 3139 consistent with promote_decl_mode and thus expand_expr_real_1. */
3123 if (!DECL_ARTIFICIAL (parm)) 3145 if (!DECL_ARTIFICIAL (parm))
3124 mark_user_reg (parmreg); 3146 mark_user_reg (parmreg);
3125 3147
3126 /* If this was an item that we received a pointer to, 3148 /* If this was an item that we received a pointer to,
3127 set rtl appropriately. */ 3149 set rtl appropriately. */
3128 if (data->passed_pointer) 3150 if (data->arg.pass_by_reference)
3129 { 3151 {
3130 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg); 3152 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->arg.type)), parmreg);
3131 set_mem_attributes (rtl, parm, 1); 3153 set_mem_attributes (rtl, parm, 1);
3132 } 3154 }
3133 else 3155 else
3134 rtl = parmreg; 3156 rtl = parmreg;
3135 3157
3140 3162
3141 equiv_stack_parm = data->stack_parm; 3163 equiv_stack_parm = data->stack_parm;
3142 validated_mem = validize_mem (copy_rtx (data->entry_parm)); 3164 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3143 3165
3144 need_conversion = (data->nominal_mode != data->passed_mode 3166 need_conversion = (data->nominal_mode != data->passed_mode
3145 || promoted_nominal_mode != data->promoted_mode); 3167 || promoted_nominal_mode != data->arg.mode);
3146 moved = false; 3168 moved = false;
3147 3169
3148 if (need_conversion 3170 if (need_conversion
3149 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT 3171 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3150 && data->nominal_mode == data->passed_mode 3172 && data->nominal_mode == data->passed_mode
3169 3191
3170 First, we try to emit an insn which performs the necessary 3192 First, we try to emit an insn which performs the necessary
3171 conversion. We verify that this insn does not clobber any 3193 conversion. We verify that this insn does not clobber any
3172 hard registers. */ 3194 hard registers. */
3173 3195
3174 enum insn_code icode;
3175 rtx op0, op1; 3196 rtx op0, op1;
3176 3197
3177 icode = can_extend_p (promoted_nominal_mode, data->passed_mode, 3198 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3178 unsignedp); 3199 unsignedp);
3179 3200
3212 moved = true; 3233 moved = true;
3213 CLEAR_HARD_REG_SET (hardregs); 3234 CLEAR_HARD_REG_SET (hardregs);
3214 for (insn = insns; insn && moved; insn = NEXT_INSN (insn)) 3235 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3215 { 3236 {
3216 if (INSN_P (insn)) 3237 if (INSN_P (insn))
3217 note_stores (PATTERN (insn), record_hard_reg_sets, 3238 note_stores (insn, record_hard_reg_sets, &hardregs);
3218 &hardregs);
3219 if (!hard_reg_set_empty_p (hardregs)) 3239 if (!hard_reg_set_empty_p (hardregs))
3220 moved = false; 3240 moved = false;
3221 } 3241 }
3222 3242
3223 end_sequence (); 3243 end_sequence ();
3272 all->last_conversion_insn = get_last_insn (); 3292 all->last_conversion_insn = get_last_insn ();
3273 end_sequence (); 3293 end_sequence ();
3274 3294
3275 did_conversion = true; 3295 did_conversion = true;
3276 } 3296 }
3297 else if (MEM_P (data->entry_parm)
3298 && GET_MODE_ALIGNMENT (promoted_nominal_mode)
3299 > MEM_ALIGN (data->entry_parm)
3300 && (((icode = optab_handler (movmisalign_optab,
3301 promoted_nominal_mode))
3302 != CODE_FOR_nothing)
3303 || targetm.slow_unaligned_access (promoted_nominal_mode,
3304 MEM_ALIGN (data->entry_parm))))
3305 {
3306 if (icode != CODE_FOR_nothing)
3307 emit_insn (GEN_FCN (icode) (parmreg, validated_mem));
3308 else
3309 rtl = parmreg = extract_bit_field (validated_mem,
3310 GET_MODE_BITSIZE (promoted_nominal_mode), 0,
3311 unsignedp, parmreg,
3312 promoted_nominal_mode, VOIDmode, false, NULL);
3313 }
3277 else 3314 else
3278 emit_move_insn (parmreg, validated_mem); 3315 emit_move_insn (parmreg, validated_mem);
3279 3316
3280 /* If we were passed a pointer but the actual value can safely live 3317 /* If we were passed a pointer but the actual value can safely live
3281 in a register, retrieve it and use it directly. */ 3318 in a register, retrieve it and use it directly. */
3282 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode) 3319 if (data->arg.pass_by_reference && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3283 { 3320 {
3284 /* We can't use nominal_mode, because it will have been set to 3321 /* We can't use nominal_mode, because it will have been set to
3285 Pmode above. We must use the actual mode of the parm. */ 3322 Pmode above. We must use the actual mode of the parm. */
3286 if (use_register_for_decl (parm)) 3323 if (use_register_for_decl (parm))
3287 { 3324 {
3394 execution. */ 3431 execution. */
3395 bool to_conversion = false; 3432 bool to_conversion = false;
3396 3433
3397 assign_parm_remove_parallels (data); 3434 assign_parm_remove_parallels (data);
3398 3435
3399 if (data->promoted_mode != data->nominal_mode) 3436 if (data->arg.mode != data->nominal_mode)
3400 { 3437 {
3401 /* Conversion is required. */ 3438 /* Conversion is required. */
3402 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm)); 3439 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3403 3440
3404 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm))); 3441 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3427 { 3464 {
3428 rtx src, dest; 3465 rtx src, dest;
3429 3466
3430 if (data->stack_parm == 0) 3467 if (data->stack_parm == 0)
3431 { 3468 {
3432 int align = STACK_SLOT_ALIGNMENT (data->passed_type, 3469 int align = STACK_SLOT_ALIGNMENT (data->arg.type,
3433 GET_MODE (data->entry_parm), 3470 GET_MODE (data->entry_parm),
3434 TYPE_ALIGN (data->passed_type)); 3471 TYPE_ALIGN (data->arg.type));
3472 if (align < (int)GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm))
3473 && ((optab_handler (movmisalign_optab,
3474 GET_MODE (data->entry_parm))
3475 != CODE_FOR_nothing)
3476 || targetm.slow_unaligned_access (GET_MODE (data->entry_parm),
3477 align)))
3478 align = GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm));
3435 data->stack_parm 3479 data->stack_parm
3436 = assign_stack_local (GET_MODE (data->entry_parm), 3480 = assign_stack_local (GET_MODE (data->entry_parm),
3437 GET_MODE_SIZE (GET_MODE (data->entry_parm)), 3481 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3438 align); 3482 align);
3483 align = MEM_ALIGN (data->stack_parm);
3439 set_mem_attributes (data->stack_parm, parm, 1); 3484 set_mem_attributes (data->stack_parm, parm, 1);
3485 set_mem_align (data->stack_parm, align);
3440 } 3486 }
3441 3487
3442 dest = validize_mem (copy_rtx (data->stack_parm)); 3488 dest = validize_mem (copy_rtx (data->stack_parm));
3443 src = validize_mem (copy_rtx (data->entry_parm)); 3489 src = validize_mem (copy_rtx (data->entry_parm));
3444 3490
3445 if (MEM_P (src)) 3491 if (TYPE_EMPTY_P (data->arg.type))
3492 /* Empty types don't really need to be copied. */;
3493 else if (MEM_P (src))
3446 { 3494 {
3447 /* Use a block move to handle potentially misaligned entry_parm. */ 3495 /* Use a block move to handle potentially misaligned entry_parm. */
3448 if (!to_conversion) 3496 if (!to_conversion)
3449 push_to_sequence2 (all->first_conversion_insn, 3497 push_to_sequence2 (all->first_conversion_insn,
3450 all->last_conversion_insn); 3498 all->last_conversion_insn);
3451 to_conversion = true; 3499 to_conversion = true;
3452 3500
3453 emit_block_move (dest, src, 3501 emit_block_move (dest, src,
3454 GEN_INT (int_size_in_bytes (data->passed_type)), 3502 GEN_INT (int_size_in_bytes (data->arg.type)),
3455 BLOCK_OP_NORMAL); 3503 BLOCK_OP_NORMAL);
3456 } 3504 }
3457 else 3505 else
3458 { 3506 {
3459 if (!REG_P (src)) 3507 if (!REG_P (src))
3573 3621
3574 /* Estimate stack alignment from parameter alignment. */ 3622 /* Estimate stack alignment from parameter alignment. */
3575 if (SUPPORTS_STACK_ALIGNMENT) 3623 if (SUPPORTS_STACK_ALIGNMENT)
3576 { 3624 {
3577 unsigned int align 3625 unsigned int align
3578 = targetm.calls.function_arg_boundary (data.promoted_mode, 3626 = targetm.calls.function_arg_boundary (data.arg.mode,
3579 data.passed_type); 3627 data.arg.type);
3580 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode, 3628 align = MINIMUM_ALIGNMENT (data.arg.type, data.arg.mode, align);
3581 align);
3582 if (TYPE_ALIGN (data.nominal_type) > align) 3629 if (TYPE_ALIGN (data.nominal_type) > align)
3583 align = MINIMUM_ALIGNMENT (data.nominal_type, 3630 align = MINIMUM_ALIGNMENT (data.nominal_type,
3584 TYPE_MODE (data.nominal_type), 3631 TYPE_MODE (data.nominal_type),
3585 TYPE_ALIGN (data.nominal_type)); 3632 TYPE_ALIGN (data.nominal_type));
3586 if (crtl->stack_alignment_estimated < align) 3633 if (crtl->stack_alignment_estimated < align)
3596 /* Find out where stack space for this parameter might be. */ 3643 /* Find out where stack space for this parameter might be. */
3597 if (assign_parm_is_stack_parm (&all, &data)) 3644 if (assign_parm_is_stack_parm (&all, &data))
3598 { 3645 {
3599 assign_parm_find_stack_rtl (parm, &data); 3646 assign_parm_find_stack_rtl (parm, &data);
3600 assign_parm_adjust_entry_rtl (&data); 3647 assign_parm_adjust_entry_rtl (&data);
3648 /* For arguments that occupy no space in the parameter
3649 passing area, have non-zero size and have address taken,
3650 force creation of a stack slot so that they have distinct
3651 address from other parameters. */
3652 if (TYPE_EMPTY_P (data.arg.type)
3653 && TREE_ADDRESSABLE (parm)
3654 && data.entry_parm == data.stack_parm
3655 && MEM_P (data.entry_parm)
3656 && int_size_in_bytes (data.arg.type))
3657 data.stack_parm = NULL_RTX;
3601 } 3658 }
3602 /* Record permanently how this parm was passed. */ 3659 /* Record permanently how this parm was passed. */
3603 if (data.passed_pointer) 3660 if (data.arg.pass_by_reference)
3604 { 3661 {
3605 rtx incoming_rtl 3662 rtx incoming_rtl
3606 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)), 3663 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.arg.type)),
3607 data.entry_parm); 3664 data.entry_parm);
3608 set_decl_incoming_rtl (parm, incoming_rtl, true); 3665 set_decl_incoming_rtl (parm, incoming_rtl, true);
3609 } 3666 }
3610 else 3667 else
3611 set_decl_incoming_rtl (parm, data.entry_parm, false); 3668 set_decl_incoming_rtl (parm, data.entry_parm, false);
3612 3669
3613 assign_parm_adjust_stack_rtl (&data); 3670 assign_parm_adjust_stack_rtl (&data);
3614 3671
3615 if (assign_parm_setup_block_p (&data)) 3672 if (assign_parm_setup_block_p (&data))
3616 assign_parm_setup_block (&all, parm, &data); 3673 assign_parm_setup_block (&all, parm, &data);
3617 else if (data.passed_pointer || use_register_for_decl (parm)) 3674 else if (data.arg.pass_by_reference || use_register_for_decl (parm))
3618 assign_parm_setup_reg (&all, parm, &data); 3675 assign_parm_setup_reg (&all, parm, &data);
3619 else 3676 else
3620 assign_parm_setup_stack (&all, parm, &data); 3677 assign_parm_setup_stack (&all, parm, &data);
3621 3678
3622 if (cfun->stdarg && !DECL_CHAIN (parm)) 3679 if (cfun->stdarg && !DECL_CHAIN (parm))
3623 assign_parms_setup_varargs (&all, &data, false); 3680 assign_parms_setup_varargs (&all, &data, false);
3624 3681
3625 /* Update info on where next arg arrives in registers. */ 3682 /* Update info on where next arg arrives in registers. */
3626 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, 3683 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3627 data.passed_type, data.named_arg);
3628 } 3684 }
3629 3685
3630 if (targetm.calls.split_complex_arg) 3686 if (targetm.calls.split_complex_arg)
3631 assign_parms_unsplit_complex (&all, fnargs); 3687 assign_parms_unsplit_complex (&all, fnargs);
3632 3688
3809 /* Early out for errors and void parameters. */ 3865 /* Early out for errors and void parameters. */
3810 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL) 3866 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3811 continue; 3867 continue;
3812 3868
3813 /* Update info on where next arg arrives in registers. */ 3869 /* Update info on where next arg arrives in registers. */
3814 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, 3870 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3815 data.passed_type, data.named_arg);
3816 3871
3817 /* ??? Once upon a time variable_size stuffed parameter list 3872 /* ??? Once upon a time variable_size stuffed parameter list
3818 SAVE_EXPRs (amongst others) onto a pending sizes list. This 3873 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3819 turned out to be less than manageable in the gimple world. 3874 turned out to be less than manageable in the gimple world.
3820 Now we have to hunt them down ourselves. */ 3875 Now we have to hunt them down ourselves. */
3821 walk_tree_without_duplicates (&data.passed_type, 3876 walk_tree_without_duplicates (&data.arg.type,
3822 gimplify_parm_type, &stmts); 3877 gimplify_parm_type, &stmts);
3823 3878
3824 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST) 3879 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3825 { 3880 {
3826 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts); 3881 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3827 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts); 3882 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3828 } 3883 }
3829 3884
3830 if (data.passed_pointer) 3885 if (data.arg.pass_by_reference)
3831 { 3886 {
3832 tree type = TREE_TYPE (data.passed_type); 3887 tree type = TREE_TYPE (data.arg.type);
3833 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type), 3888 function_arg_info orig_arg (type, data.arg.named);
3834 type, data.named_arg)) 3889 if (reference_callee_copied (&all.args_so_far_v, orig_arg))
3835 { 3890 {
3836 tree local, t; 3891 tree local, t;
3837 3892
3838 /* For constant-sized objects, this is trivial; for 3893 /* For constant-sized objects, this is trivial; for
3839 variable-sized objects, we have to play games. */ 3894 variable-sized objects, we have to play games. */
3855 DECL_GIMPLE_REG_P (local) = 1; 3910 DECL_GIMPLE_REG_P (local) = 1;
3856 3911
3857 if (!is_gimple_reg (local) 3912 if (!is_gimple_reg (local)
3858 && flag_stack_reuse != SR_NONE) 3913 && flag_stack_reuse != SR_NONE)
3859 { 3914 {
3860 tree clobber = build_constructor (type, NULL); 3915 tree clobber = build_clobber (type);
3861 gimple *clobber_stmt; 3916 gimple *clobber_stmt;
3862 TREE_THIS_VOLATILE (clobber) = 1;
3863 clobber_stmt = gimple_build_assign (local, clobber); 3917 clobber_stmt = gimple_build_assign (local, clobber);
3864 gimple_seq_add_stmt (cleanup, clobber_stmt); 3918 gimple_seq_add_stmt (cleanup, clobber_stmt);
3865 } 3919 }
3866 } 3920 }
3867 else 3921 else
4001 gcc_assert (!crtl->stack_realign_finalized 4055 gcc_assert (!crtl->stack_realign_finalized
4002 && crtl->stack_realign_needed); 4056 && crtl->stack_realign_needed);
4003 } 4057 }
4004 } 4058 }
4005 } 4059 }
4006
4007 /* Remember if the outgoing parameter requires extra alignment on the
4008 calling function side. */
4009 if (crtl->stack_alignment_needed < boundary)
4010 crtl->stack_alignment_needed = boundary;
4011 if (crtl->preferred_stack_boundary < boundary)
4012 crtl->preferred_stack_boundary = boundary;
4013 4060
4014 if (ARGS_GROW_DOWNWARD) 4061 if (ARGS_GROW_DOWNWARD)
4015 { 4062 {
4016 locate->slot_offset.constant = -initial_offset_ptr->constant; 4063 locate->slot_offset.constant = -initial_offset_ptr->constant;
4017 if (initial_offset_ptr->var) 4064 if (initial_offset_ptr->var)
4688 get_last_funcdef_no (void) 4735 get_last_funcdef_no (void)
4689 { 4736 {
4690 return funcdef_no; 4737 return funcdef_no;
4691 } 4738 }
4692 4739
4740 /* Allocate and initialize the stack usage info data structure for the
4741 current function. */
4742 static void
4743 allocate_stack_usage_info (void)
4744 {
4745 gcc_assert (!cfun->su);
4746 cfun->su = ggc_cleared_alloc<stack_usage> ();
4747 cfun->su->static_stack_size = -1;
4748 }
4749
4693 /* Allocate a function structure for FNDECL and set its contents 4750 /* Allocate a function structure for FNDECL and set its contents
4694 to the defaults. Set cfun to the newly-allocated object. 4751 to the defaults. Set cfun to the newly-allocated object.
4695 Some of the helper functions invoked during initialization assume 4752 Some of the helper functions invoked during initialization assume
4696 that cfun has already been set. Therefore, assign the new object 4753 that cfun has already been set. Therefore, assign the new object
4697 directly into cfun and invoke the back end hook explicitly at the 4754 directly into cfun and invoke the back end hook explicitly at the
4765 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions; 4822 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4766 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions; 4823 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4767 4824
4768 if (!profile_flag && !flag_instrument_function_entry_exit) 4825 if (!profile_flag && !flag_instrument_function_entry_exit)
4769 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1; 4826 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4827
4828 if (flag_callgraph_info)
4829 allocate_stack_usage_info ();
4770 } 4830 }
4771 4831
4772 /* Don't enable begin stmt markers if var-tracking at assignments is 4832 /* Don't enable begin stmt markers if var-tracking at assignments is
4773 disabled. The markers make little sense without the variable 4833 disabled. The markers make little sense without the variable
4774 binding annotations among them. */ 4834 binding annotations among them. */
4797 4857
4798 static void 4858 static void
4799 prepare_function_start (void) 4859 prepare_function_start (void)
4800 { 4860 {
4801 gcc_assert (!get_last_insn ()); 4861 gcc_assert (!get_last_insn ());
4862
4863 if (in_dummy_function)
4864 crtl->abi = &default_function_abi;
4865 else
4866 crtl->abi = &fndecl_abi (cfun->decl).base_abi ();
4867
4802 init_temp_slots (); 4868 init_temp_slots ();
4803 init_emit (); 4869 init_emit ();
4804 init_varasm_status (); 4870 init_varasm_status ();
4805 init_expr (); 4871 init_expr ();
4806 default_rtl_profile (); 4872 default_rtl_profile ();
4807 4873
4808 if (flag_stack_usage_info) 4874 if (flag_stack_usage_info && !flag_callgraph_info)
4809 { 4875 allocate_stack_usage_info ();
4810 cfun->su = ggc_cleared_alloc<stack_usage> ();
4811 cfun->su->static_stack_size = -1;
4812 }
4813 4876
4814 cse_not_expected = ! optimize; 4877 cse_not_expected = ! optimize;
4815 4878
4816 /* Caller save not needed yet. */ 4879 /* Caller save not needed yet. */
4817 caller_save_needed = 0; 4880 caller_save_needed = 0;
4887 the end of a function to be protected. */ 4950 the end of a function to be protected. */
4888 4951
4889 void 4952 void
4890 stack_protect_epilogue (void) 4953 stack_protect_epilogue (void)
4891 { 4954 {
4892 tree guard_decl = targetm.stack_protect_guard (); 4955 tree guard_decl = crtl->stack_protect_guard_decl;
4893 rtx_code_label *label = gen_label_rtx (); 4956 rtx_code_label *label = gen_label_rtx ();
4894 rtx x, y; 4957 rtx x, y;
4895 rtx_insn *seq; 4958 rtx_insn *seq = NULL;
4896 4959
4897 x = expand_normal (crtl->stack_protect_guard); 4960 x = expand_normal (crtl->stack_protect_guard);
4898 if (guard_decl) 4961
4899 y = expand_normal (guard_decl); 4962 if (targetm.have_stack_protect_combined_test () && guard_decl)
4963 {
4964 gcc_assert (DECL_P (guard_decl));
4965 y = DECL_RTL (guard_decl);
4966 /* Allow the target to compute address of Y and compare it with X without
4967 leaking Y into a register. This combined address + compare pattern
4968 allows the target to prevent spilling of any intermediate results by
4969 splitting it after register allocator. */
4970 seq = targetm.gen_stack_protect_combined_test (x, y, label);
4971 }
4900 else 4972 else
4901 y = const0_rtx; 4973 {
4902 4974 if (guard_decl)
4903 /* Allow the target to compare Y with X without leaking either into 4975 y = expand_normal (guard_decl);
4904 a register. */ 4976 else
4905 if (targetm.have_stack_protect_test () 4977 y = const0_rtx;
4906 && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX)) 4978
4979 /* Allow the target to compare Y with X without leaking either into
4980 a register. */
4981 if (targetm.have_stack_protect_test ())
4982 seq = targetm.gen_stack_protect_test (x, y, label);
4983 }
4984
4985 if (seq)
4907 emit_insn (seq); 4986 emit_insn (seq);
4908 else 4987 else
4909 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label); 4988 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4910 4989
4911 /* The noreturn predictor has been moved to the tree level. The rtl-level 4990 /* The noreturn predictor has been moved to the tree level. The rtl-level
5107 cfun->nonlocal_goto_save_area, 5186 cfun->nonlocal_goto_save_area,
5108 integer_zero_node, NULL_TREE, NULL_TREE); 5187 integer_zero_node, NULL_TREE, NULL_TREE);
5109 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); 5188 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5110 gcc_assert (GET_MODE (r_save) == Pmode); 5189 gcc_assert (GET_MODE (r_save) == Pmode);
5111 5190
5112 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ()); 5191 emit_move_insn (r_save, hard_frame_pointer_rtx);
5113 update_nonlocal_goto_save_area (); 5192 update_nonlocal_goto_save_area ();
5114 } 5193 }
5115 5194
5116 if (crtl->profile) 5195 if (crtl->profile)
5117 { 5196 {
5214 5293
5215 static void 5294 static void
5216 use_return_register (void) 5295 use_return_register (void)
5217 { 5296 {
5218 diddle_return_value (do_use_return_reg, NULL); 5297 diddle_return_value (do_use_return_reg, NULL);
5219 }
5220
5221 /* Set the location of the insn chain starting at INSN to LOC. */
5222
5223 static void
5224 set_insn_locations (rtx_insn *insn, int loc)
5225 {
5226 while (insn != NULL)
5227 {
5228 if (INSN_P (insn))
5229 INSN_LOCATION (insn) = loc;
5230 insn = NEXT_INSN (insn);
5231 }
5232 } 5298 }
5233 5299
5234 /* Generate RTL for the end of the current function. */ 5300 /* Generate RTL for the end of the current function. */
5235 5301
5236 void 5302 void
5294 /* Let except.c know where it should emit the call to unregister 5360 /* Let except.c know where it should emit the call to unregister
5295 the function context for sjlj exceptions. */ 5361 the function context for sjlj exceptions. */
5296 if (flag_exceptions) 5362 if (flag_exceptions)
5297 sjlj_emit_function_exit_after (get_last_insn ()); 5363 sjlj_emit_function_exit_after (get_last_insn ());
5298 } 5364 }
5299 else
5300 {
5301 /* We want to ensure that instructions that may trap are not
5302 moved into the epilogue by scheduling, because we don't
5303 always emit unwind information for the epilogue. */
5304 if (cfun->can_throw_non_call_exceptions)
5305 emit_insn (gen_blockage ());
5306 }
5307 5365
5308 /* If this is an implementation of throw, do what's necessary to 5366 /* If this is an implementation of throw, do what's necessary to
5309 communicate between __builtin_eh_return and the epilogue. */ 5367 communicate between __builtin_eh_return and the epilogue. */
5310 expand_eh_return (); 5368 expand_eh_return ();
5369
5370 /* If stack protection is enabled for this function, check the guard. */
5371 if (crtl->stack_protect_guard
5372 && targetm.stack_protect_runtime_enabled_p ()
5373 && naked_return_label == NULL_RTX)
5374 stack_protect_epilogue ();
5311 5375
5312 /* If scalar return value was computed in a pseudo-reg, or was a named 5376 /* If scalar return value was computed in a pseudo-reg, or was a named
5313 return value that got dumped to the stack, copy that to the hard 5377 return value that got dumped to the stack, copy that to the hard
5314 return register. */ 5378 return register. */
5315 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) 5379 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5453 if (cfun->can_throw_non_call_exceptions 5517 if (cfun->can_throw_non_call_exceptions
5454 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ) 5518 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5455 emit_insn (gen_blockage ()); 5519 emit_insn (gen_blockage ());
5456 5520
5457 /* If stack protection is enabled for this function, check the guard. */ 5521 /* If stack protection is enabled for this function, check the guard. */
5458 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ()) 5522 if (crtl->stack_protect_guard
5523 && targetm.stack_protect_runtime_enabled_p ()
5524 && naked_return_label)
5459 stack_protect_epilogue (); 5525 stack_protect_epilogue ();
5460 5526
5461 /* If we had calls to alloca, and this machine needs 5527 /* If we had calls to alloca, and this machine needs
5462 an accurate stack pointer to exit the function, 5528 an accurate stack pointer to exit the function,
5463 insert some code to save and restore the stack pointer. */ 5529 insert some code to save and restore the stack pointer. */
6327 /* Shrink-wrapping can result in unreachable edges in the epilogue, 6393 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6328 see PR57320. */ 6394 see PR57320. */
6329 cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0); 6395 cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6330 6396
6331 /* The stack usage info is finalized during prologue expansion. */ 6397 /* The stack usage info is finalized during prologue expansion. */
6332 if (flag_stack_usage_info) 6398 if (flag_stack_usage_info || flag_callgraph_info)
6333 output_stack_usage (); 6399 output_stack_usage ();
6334 6400
6335 return 0; 6401 return 0;
6402 }
6403
6404 /* Record a final call to CALLEE at LOCATION. */
6405
6406 void
6407 record_final_call (tree callee, location_t location)
6408 {
6409 struct callinfo_callee datum = { location, callee };
6410 vec_safe_push (cfun->su->callees, datum);
6411 }
6412
6413 /* Record a dynamic allocation made for DECL_OR_EXP. */
6414
6415 void
6416 record_dynamic_alloc (tree decl_or_exp)
6417 {
6418 struct callinfo_dalloc datum;
6419
6420 if (DECL_P (decl_or_exp))
6421 {
6422 datum.location = DECL_SOURCE_LOCATION (decl_or_exp);
6423 const char *name = lang_hooks.decl_printable_name (decl_or_exp, 2);
6424 const char *dot = strrchr (name, '.');
6425 if (dot)
6426 name = dot + 1;
6427 datum.name = ggc_strdup (name);
6428 }
6429 else
6430 {
6431 datum.location = EXPR_LOCATION (decl_or_exp);
6432 datum.name = NULL;
6433 }
6434
6435 vec_safe_push (cfun->su->dallocs, datum);
6336 } 6436 }
6337 6437
6338 namespace { 6438 namespace {
6339 6439
6340 const pass_data pass_data_thread_prologue_and_epilogue = 6440 const pass_data pass_data_thread_prologue_and_epilogue =
6372 { 6472 {
6373 return new pass_thread_prologue_and_epilogue (ctxt); 6473 return new pass_thread_prologue_and_epilogue (ctxt);
6374 } 6474 }
6375 6475
6376 6476
6477 /* If CONSTRAINT is a matching constraint, then return its number.
6478 Otherwise, return -1. */
6479
6480 static int
6481 matching_constraint_num (const char *constraint)
6482 {
6483 if (*constraint == '%')
6484 constraint++;
6485
6486 if (IN_RANGE (*constraint, '0', '9'))
6487 return strtoul (constraint, NULL, 10);
6488
6489 return -1;
6490 }
6491
6377 /* This mini-pass fixes fall-out from SSA in asm statements that have 6492 /* This mini-pass fixes fall-out from SSA in asm statements that have
6378 in-out constraints. Say you start with 6493 in-out constraints. Say you start with
6379 6494
6380 orig = inout; 6495 orig = inout;
6381 asm ("": "+mr" (inout)); 6496 asm ("": "+mr" (inout));
6430 for (i = 0; i < ninputs; i++) 6545 for (i = 0; i < ninputs; i++)
6431 { 6546 {
6432 rtx input, output; 6547 rtx input, output;
6433 rtx_insn *insns; 6548 rtx_insn *insns;
6434 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i); 6549 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6435 char *end;
6436 int match, j; 6550 int match, j;
6437 6551
6438 if (*constraint == '%') 6552 match = matching_constraint_num (constraint);
6439 constraint++; 6553 if (match < 0)
6440
6441 match = strtoul (constraint, &end, 10);
6442 if (end == constraint)
6443 continue; 6554 continue;
6444 6555
6445 gcc_assert (match < noutputs); 6556 gcc_assert (match < noutputs);
6446 output = SET_DEST (p_sets[match]); 6557 output = SET_DEST (p_sets[match]);
6447 input = RTVEC_ELT (inputs, i); 6558 input = RTVEC_ELT (inputs, i);
6454 continue; 6565 continue;
6455 6566
6456 /* We can't do anything if the output is also used as input, 6567 /* We can't do anything if the output is also used as input,
6457 as we're going to overwrite it. */ 6568 as we're going to overwrite it. */
6458 for (j = 0; j < ninputs; j++) 6569 for (j = 0; j < ninputs; j++)
6459 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j))) 6570 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6460 break; 6571 break;
6461 if (j != ninputs) 6572 if (j != ninputs)
6462 continue; 6573 continue;
6463 6574
6464 /* Avoid changing the same input several times. For 6575 /* Avoid changing the same input several times. For
6465 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in)); 6576 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6466 only change in once (to out1), rather than changing it 6577 only change it once (to out1), rather than changing it
6467 first to out1 and afterwards to out2. */ 6578 first to out1 and afterwards to out2. */
6468 if (i > 0) 6579 if (i > 0)
6469 { 6580 {
6470 for (j = 0; j < noutputs; j++) 6581 for (j = 0; j < noutputs; j++)
6471 if (output_matched[j] && input == SET_DEST (p_sets[j])) 6582 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6474 continue; 6585 continue;
6475 } 6586 }
6476 output_matched[match] = true; 6587 output_matched[match] = true;
6477 6588
6478 start_sequence (); 6589 start_sequence ();
6479 emit_move_insn (output, input); 6590 emit_move_insn (output, copy_rtx (input));
6480 insns = get_insns (); 6591 insns = get_insns ();
6481 end_sequence (); 6592 end_sequence ();
6482 emit_insn_before (insns, insn); 6593 emit_insn_before (insns, insn);
6594
6595 constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
6596 bool early_clobber_p = strchr (constraint, '&') != NULL;
6483 6597
6484 /* Now replace all mentions of the input with output. We can't 6598 /* Now replace all mentions of the input with output. We can't
6485 just replace the occurrence in inputs[i], as the register might 6599 just replace the occurrence in inputs[i], as the register might
6486 also be used in some other input (or even in an address of an 6600 also be used in some other input (or even in an address of an
6487 output), which would mean possibly increasing the number of 6601 output), which would mean possibly increasing the number of
6500 6614
6501 Now we suddenly have two different input values (containing the same 6615 Now we suddenly have two different input values (containing the same
6502 value, but different pseudos) where we formerly had only one. 6616 value, but different pseudos) where we formerly had only one.
6503 With more complicated asms this might lead to reload failures 6617 With more complicated asms this might lead to reload failures
6504 which wouldn't have happen without this pass. So, iterate over 6618 which wouldn't have happen without this pass. So, iterate over
6505 all operands and replace all occurrences of the register used. */ 6619 all operands and replace all occurrences of the register used.
6620
6621 However, if one or more of the 'input' uses have a non-matching
6622 constraint and the matched output operand is an early clobber
6623 operand, then do not replace the input operand, since by definition
6624 it conflicts with the output operand and cannot share the same
6625 register. See PR89313 for details. */
6626
6506 for (j = 0; j < noutputs; j++) 6627 for (j = 0; j < noutputs; j++)
6507 if (!rtx_equal_p (SET_DEST (p_sets[j]), input) 6628 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6508 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j]))) 6629 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6509 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]), 6630 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6510 input, output); 6631 input, output);
6511 for (j = 0; j < ninputs; j++) 6632 for (j = 0; j < ninputs; j++)
6512 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j))) 6633 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6513 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j), 6634 {
6514 input, output); 6635 if (!early_clobber_p
6636 || match == matching_constraint_num
6637 (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
6638 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6639 input, output);
6640 }
6515 6641
6516 changed = true; 6642 changed = true;
6517 } 6643 }
6518 6644
6519 if (changed) 6645 if (changed)