comparison gcc/calls.c @ 132:d34655255c78

update gcc-8.2
author mir3636
date Thu, 25 Oct 2018 10:21:07 +0900
parents fe568345ddd5 84e7813d76e9
children 420680fc7707
comparison
equal deleted inserted replaced
130:e108057fa461 132:d34655255c78
1 /* Convert function calls to rtl insns, for GNU C compiler. 1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2017 Free Software Foundation, Inc. 2 Copyright (C) 1989-2018 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
45 #include "output.h" 45 #include "output.h"
46 #include "langhooks.h" 46 #include "langhooks.h"
47 #include "except.h" 47 #include "except.h"
48 #include "dbgcnt.h" 48 #include "dbgcnt.h"
49 #include "rtl-iter.h" 49 #include "rtl-iter.h"
50 #include "tree-chkp.h"
51 #include "tree-vrp.h" 50 #include "tree-vrp.h"
52 #include "tree-ssanames.h" 51 #include "tree-ssanames.h"
53 #include "rtl-chkp.h" 52 #include "tree-ssa-strlen.h"
54 #include "intl.h" 53 #include "intl.h"
55 #include "stringpool.h" 54 #include "stringpool.h"
56 #include "attribs.h" 55 #include "attribs.h"
56 #include "builtins.h"
57 #include "gimple-fold.h"
57 58
58 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */ 59 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
59 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) 60 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
60 61
61 /* Data structure and subroutines used within expand_call. */ 62 /* Data structure and subroutines used within expand_call. */
128 This vector is used to prevent a function call within an argument from 129 This vector is used to prevent a function call within an argument from
129 clobbering any stack already set up. */ 130 clobbering any stack already set up. */
130 static char *stack_usage_map; 131 static char *stack_usage_map;
131 132
132 /* Size of STACK_USAGE_MAP. */ 133 /* Size of STACK_USAGE_MAP. */
133 static int highest_outgoing_arg_in_use; 134 static unsigned int highest_outgoing_arg_in_use;
135
136 /* Assume that any stack location at this byte index is used,
137 without checking the contents of stack_usage_map. */
138 static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
134 139
135 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding 140 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
136 stack location's tail call argument has been already stored into the stack. 141 stack location's tail call argument has been already stored into the stack.
137 This bitmap is used to prevent sibling call optimization if function tries 142 This bitmap is used to prevent sibling call optimization if function tries
138 to use parent's incoming argument slots when they have been already 143 to use parent's incoming argument slots when they have been already
139 overwritten with tail call arguments. */ 144 overwritten with tail call arguments. */
140 static sbitmap stored_args_map; 145 static sbitmap stored_args_map;
141 146
147 /* Assume that any virtual-incoming location at this byte index has been
148 stored, without checking the contents of stored_args_map. */
149 static unsigned HOST_WIDE_INT stored_args_watermark;
150
142 /* stack_arg_under_construction is nonzero when an argument may be 151 /* stack_arg_under_construction is nonzero when an argument may be
143 initialized with a constructor call (including a C function that 152 initialized with a constructor call (including a C function that
144 returns a BLKmode struct) and expand_call must take special action 153 returns a BLKmode struct) and expand_call must take special action
145 to make sure the object being constructed does not overlap the 154 to make sure the object being constructed does not overlap the
146 argument list for the constructor call. */ 155 argument list for the constructor call. */
147 static int stack_arg_under_construction; 156 static int stack_arg_under_construction;
148 157
149 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
150 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
151 cumulative_args_t);
152 static void precompute_register_parameters (int, struct arg_data *, int *); 158 static void precompute_register_parameters (int, struct arg_data *, int *);
153 static void store_bounds (struct arg_data *, struct arg_data *);
154 static int store_one_arg (struct arg_data *, rtx, int, int, int); 159 static int store_one_arg (struct arg_data *, rtx, int, int, int);
155 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int); 160 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
156 static int finalize_must_preallocate (int, int, struct arg_data *, 161 static int finalize_must_preallocate (int, int, struct arg_data *,
157 struct args_size *); 162 struct args_size *);
158 static void precompute_arguments (int, struct arg_data *); 163 static void precompute_arguments (int, struct arg_data *);
159 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
160 static void initialize_argument_information (int, struct arg_data *,
161 struct args_size *, int,
162 tree, tree,
163 tree, tree, cumulative_args_t, int,
164 rtx *, int *, int *, int *,
165 bool *, bool);
166 static void compute_argument_addresses (struct arg_data *, rtx, int); 164 static void compute_argument_addresses (struct arg_data *, rtx, int);
167 static rtx rtx_for_function_call (tree, tree); 165 static rtx rtx_for_function_call (tree, tree);
168 static void load_register_parameters (struct arg_data *, int, rtx *, int, 166 static void load_register_parameters (struct arg_data *, int, rtx *, int,
169 int, int *); 167 int, int *);
170 static int special_function_p (const_tree, int); 168 static int special_function_p (const_tree, int);
171 static int check_sibcall_argument_overlap_1 (rtx); 169 static int check_sibcall_argument_overlap_1 (rtx);
172 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int); 170 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
173 171
174 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
175 unsigned int);
176 static tree split_complex_types (tree); 172 static tree split_complex_types (tree);
177 173
178 #ifdef REG_PARM_STACK_SPACE 174 #ifdef REG_PARM_STACK_SPACE
179 static rtx save_fixed_argument_area (int, rtx, int *, int *); 175 static rtx save_fixed_argument_area (int, rtx, int *, int *);
180 static void restore_fixed_argument_area (rtx, rtx, int, int); 176 static void restore_fixed_argument_area (rtx, rtx, int, int);
181 #endif 177 #endif
182 178
179 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
180 stack region might already be in use. */
181
182 static bool
183 stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
184 unsigned int reg_parm_stack_space)
185 {
186 unsigned HOST_WIDE_INT const_lower, const_upper;
187 const_lower = constant_lower_bound (lower_bound);
188 if (!upper_bound.is_constant (&const_upper))
189 const_upper = HOST_WIDE_INT_M1U;
190
191 if (const_upper > stack_usage_watermark)
192 return true;
193
194 /* Don't worry about things in the fixed argument area;
195 it has already been saved. */
196 const_lower = MAX (const_lower, reg_parm_stack_space);
197 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
198 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
199 if (stack_usage_map[i])
200 return true;
201 return false;
202 }
203
204 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
205 stack region are now in use. */
206
207 static void
208 mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
209 {
210 unsigned HOST_WIDE_INT const_lower, const_upper;
211 const_lower = constant_lower_bound (lower_bound);
212 if (upper_bound.is_constant (&const_upper))
213 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
214 stack_usage_map[i] = 1;
215 else
216 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
217 }
218
183 /* Force FUNEXP into a form suitable for the address of a CALL, 219 /* Force FUNEXP into a form suitable for the address of a CALL,
184 and return that as an rtx. Also load the static chain register 220 and return that as an rtx. Also load the static chain register
185 if FNDECL is a nested function. 221 if FNDECL is a nested function.
186 222
187 CALL_FUSAGE points to a variable holding the prospective 223 CALL_FUSAGE points to a variable holding the prospective
340 denote registers used by the called function. */ 376 denote registers used by the called function. */
341 377
342 static void 378 static void
343 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED, 379 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
344 tree funtype ATTRIBUTE_UNUSED, 380 tree funtype ATTRIBUTE_UNUSED,
345 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED, 381 poly_int64 stack_size ATTRIBUTE_UNUSED,
346 HOST_WIDE_INT rounded_stack_size, 382 poly_int64 rounded_stack_size,
347 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED, 383 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
348 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, 384 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
349 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags, 385 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
350 cumulative_args_t args_so_far ATTRIBUTE_UNUSED) 386 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
351 { 387 {
352 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size); 388 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
353 rtx call, funmem, pat; 389 rtx call, funmem, pat;
354 int already_popped = 0; 390 int already_popped = 0;
355 HOST_WIDE_INT n_popped = 0; 391 poly_int64 n_popped = 0;
356 392
357 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop 393 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
358 patterns exist). Any popping that the callee does on return will 394 patterns exist). Any popping that the callee does on return will
359 be from our caller's frame rather than ours. */ 395 be from our caller's frame rather than ours. */
360 if (!(ecf_flags & ECF_SIBCALL)) 396 if (!(ecf_flags & ECF_SIBCALL))
402 pat = targetm.gen_sibcall_value (valreg, funmem, 438 pat = targetm.gen_sibcall_value (valreg, funmem,
403 rounded_stack_size_rtx, 439 rounded_stack_size_rtx,
404 next_arg_reg, NULL_RTX); 440 next_arg_reg, NULL_RTX);
405 else 441 else
406 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx, 442 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
407 next_arg_reg, GEN_INT (struct_value_size)); 443 next_arg_reg,
444 gen_int_mode (struct_value_size, Pmode));
408 } 445 }
409 /* If the target has "call" or "call_value" insns, then prefer them 446 /* If the target has "call" or "call_value" insns, then prefer them
410 if no arguments are actually popped. If the target does not have 447 if no arguments are actually popped. If the target does not have
411 "call" or "call_value" insns, then we must use the popping versions 448 "call" or "call_value" insns, then we must use the popping versions
412 even if the call has no arguments to pop. */ 449 even if the call has no arguments to pop. */
413 else if (n_popped > 0 450 else if (maybe_ne (n_popped, 0)
414 || !(valreg 451 || !(valreg
415 ? targetm.have_call_value () 452 ? targetm.have_call_value ()
416 : targetm.have_call ())) 453 : targetm.have_call ()))
417 { 454 {
418 rtx n_pop = GEN_INT (n_popped); 455 rtx n_pop = gen_int_mode (n_popped, Pmode);
419 456
420 /* If this subroutine pops its own args, record that in the call insn 457 /* If this subroutine pops its own args, record that in the call insn
421 if possible, for the sake of frame pointer elimination. */ 458 if possible, for the sake of frame pointer elimination. */
422 459
423 if (valreg) 460 if (valreg)
435 if (valreg) 472 if (valreg)
436 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx, 473 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
437 next_arg_reg, NULL_RTX); 474 next_arg_reg, NULL_RTX);
438 else 475 else
439 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg, 476 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
440 GEN_INT (struct_value_size)); 477 gen_int_mode (struct_value_size, Pmode));
441 } 478 }
442 emit_insn (pat); 479 emit_insn (pat);
443 480
444 /* Find the call we just emitted. */ 481 /* Find the call we just emitted. */
445 rtx_call_insn *call_insn = last_call_insn (); 482 rtx_call_insn *call_insn = last_call_insn ();
450 if (call 487 if (call
451 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE 488 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
452 && MEM_EXPR (funmem) != NULL_TREE) 489 && MEM_EXPR (funmem) != NULL_TREE)
453 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem)); 490 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
454 491
455 /* Mark instrumented calls. */
456 if (call && fntree)
457 CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
458
459 /* Put the register usage information there. */ 492 /* Put the register usage information there. */
460 add_function_usage_to (call_insn, call_fusage); 493 add_function_usage_to (call_insn, call_fusage);
461 494
462 /* If this is a const call, then set the insn's unchanging bit. */ 495 /* If this is a const call, then set the insn's unchanging bit. */
463 if (ecf_flags & ECF_CONST) 496 if (ecf_flags & ECF_CONST)
487 520
488 /* Restore this now, so that we do defer pops for this call's args 521 /* Restore this now, so that we do defer pops for this call's args
489 if the context of the call as a whole permits. */ 522 if the context of the call as a whole permits. */
490 inhibit_defer_pop = old_inhibit_defer_pop; 523 inhibit_defer_pop = old_inhibit_defer_pop;
491 524
492 if (n_popped > 0) 525 if (maybe_ne (n_popped, 0))
493 { 526 {
494 if (!already_popped) 527 if (!already_popped)
495 CALL_INSN_FUNCTION_USAGE (call_insn) 528 CALL_INSN_FUNCTION_USAGE (call_insn)
496 = gen_rtx_EXPR_LIST (VOIDmode, 529 = gen_rtx_EXPR_LIST (VOIDmode,
497 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx), 530 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
498 CALL_INSN_FUNCTION_USAGE (call_insn)); 531 CALL_INSN_FUNCTION_USAGE (call_insn));
499 rounded_stack_size -= n_popped; 532 rounded_stack_size -= n_popped;
500 rounded_stack_size_rtx = GEN_INT (rounded_stack_size); 533 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
501 stack_pointer_delta -= n_popped; 534 stack_pointer_delta -= n_popped;
502 535
503 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); 536 add_args_size_note (call_insn, stack_pointer_delta);
504 537
505 /* If popup is needed, stack realign must use DRAP */ 538 /* If popup is needed, stack realign must use DRAP */
506 if (SUPPORTS_STACK_ALIGNMENT) 539 if (SUPPORTS_STACK_ALIGNMENT)
507 crtl->need_drap = true; 540 crtl->need_drap = true;
508 } 541 }
509 /* For noreturn calls when not accumulating outgoing args force 542 /* For noreturn calls when not accumulating outgoing args force
510 REG_ARGS_SIZE note to prevent crossjumping of calls with different 543 REG_ARGS_SIZE note to prevent crossjumping of calls with different
511 args sizes. */ 544 args sizes. */
512 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0) 545 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
513 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); 546 add_args_size_note (call_insn, stack_pointer_delta);
514 547
515 if (!ACCUMULATE_OUTGOING_ARGS) 548 if (!ACCUMULATE_OUTGOING_ARGS)
516 { 549 {
517 /* If returning from the subroutine does not automatically pop the args, 550 /* If returning from the subroutine does not automatically pop the args,
518 we need an instruction to pop them sooner or later. 551 we need an instruction to pop them sooner or later.
519 Perhaps do it now; perhaps just record how much space to pop later. 552 Perhaps do it now; perhaps just record how much space to pop later.
520 553
521 If returning from the subroutine does pop the args, indicate that the 554 If returning from the subroutine does pop the args, indicate that the
522 stack pointer will be changed. */ 555 stack pointer will be changed. */
523 556
524 if (rounded_stack_size != 0) 557 if (maybe_ne (rounded_stack_size, 0))
525 { 558 {
526 if (ecf_flags & ECF_NORETURN) 559 if (ecf_flags & ECF_NORETURN)
527 /* Just pretend we did the pop. */ 560 /* Just pretend we did the pop. */
528 stack_pointer_delta -= rounded_stack_size; 561 stack_pointer_delta -= rounded_stack_size;
529 else if (flag_defer_pop && inhibit_defer_pop == 0 562 else if (flag_defer_pop && inhibit_defer_pop == 0
542 ??? We may optimize similar to defer_pop above, but it is 575 ??? We may optimize similar to defer_pop above, but it is
543 probably not worthwhile. 576 probably not worthwhile.
544 577
545 ??? It will be worthwhile to enable combine_stack_adjustments even for 578 ??? It will be worthwhile to enable combine_stack_adjustments even for
546 such machines. */ 579 such machines. */
547 else if (n_popped) 580 else if (maybe_ne (n_popped, 0))
548 anti_adjust_stack (GEN_INT (n_popped)); 581 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
549 } 582 }
550 583
551 /* Determine if the function identified by FNDECL is one with 584 /* Determine if the function identified by FNDECL is one with
552 special properties we wish to know about. Modify FLAGS accordingly. 585 special properties we wish to know about. Modify FLAGS accordingly.
553 586
559 592
560 static int 593 static int
561 special_function_p (const_tree fndecl, int flags) 594 special_function_p (const_tree fndecl, int flags)
562 { 595 {
563 tree name_decl = DECL_NAME (fndecl); 596 tree name_decl = DECL_NAME (fndecl);
564
565 /* For instrumentation clones we want to derive flags
566 from the original name. */
567 if (cgraph_node::get (fndecl)
568 && cgraph_node::get (fndecl)->instrumentation_clone)
569 name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
570 597
571 if (fndecl && name_decl 598 if (fndecl && name_decl
572 && IDENTIFIER_LENGTH (name_decl) <= 11 599 && IDENTIFIER_LENGTH (name_decl) <= 11
573 /* Exclude functions not at the file scope, or not `extern', 600 /* Exclude functions not at the file scope, or not `extern',
574 since they are not the magic functions we would otherwise 601 since they are not the magic functions we would otherwise
689 716
690 if (!is_gimple_call (stmt)) 717 if (!is_gimple_call (stmt))
691 return false; 718 return false;
692 719
693 fndecl = gimple_call_fndecl (stmt); 720 fndecl = gimple_call_fndecl (stmt);
694 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 721 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
695 switch (DECL_FUNCTION_CODE (fndecl)) 722 switch (DECL_FUNCTION_CODE (fndecl))
696 { 723 {
697 CASE_BUILT_IN_ALLOCA: 724 CASE_BUILT_IN_ALLOCA:
698 return true; 725 return gimple_call_num_args (stmt) > 0;
699 default: 726 default:
700 break; 727 break;
701 } 728 }
702 729
703 return false; 730 return false;
1009 parameters, we must save and restore it. */ 1036 parameters, we must save and restore it. */
1010 1037
1011 static rtx 1038 static rtx
1012 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save) 1039 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1013 { 1040 {
1014 int low; 1041 unsigned int low;
1015 int high; 1042 unsigned int high;
1016 1043
1017 /* Compute the boundary of the area that needs to be saved, if any. */ 1044 /* Compute the boundary of the area that needs to be saved, if any. */
1018 high = reg_parm_stack_space; 1045 high = reg_parm_stack_space;
1019 if (ARGS_GROW_DOWNWARD) 1046 if (ARGS_GROW_DOWNWARD)
1020 high += 1; 1047 high += 1;
1021 1048
1022 if (high > highest_outgoing_arg_in_use) 1049 if (high > highest_outgoing_arg_in_use)
1023 high = highest_outgoing_arg_in_use; 1050 high = highest_outgoing_arg_in_use;
1024 1051
1025 for (low = 0; low < high; low++) 1052 for (low = 0; low < high; low++)
1026 if (stack_usage_map[low] != 0) 1053 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1027 { 1054 {
1028 int num_to_save; 1055 int num_to_save;
1029 machine_mode save_mode; 1056 machine_mode save_mode;
1030 int delta; 1057 int delta;
1031 rtx addr; 1058 rtx addr;
1193 is not. Return the initialized value. */ 1220 is not. Return the initialized value. */
1194 1221
1195 static tree 1222 static tree
1196 alloc_max_size (void) 1223 alloc_max_size (void)
1197 { 1224 {
1198 if (!alloc_object_size_limit) 1225 if (alloc_object_size_limit)
1199 { 1226 return alloc_object_size_limit;
1200 alloc_object_size_limit = TYPE_MAX_VALUE (ssizetype); 1227
1201 1228 HOST_WIDE_INT limit = warn_alloc_size_limit;
1202 if (warn_alloc_size_limit) 1229 if (limit == HOST_WIDE_INT_MAX)
1203 { 1230 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1204 char *end = NULL; 1231
1205 errno = 0; 1232 alloc_object_size_limit = build_int_cst (size_type_node, limit);
1206 unsigned HOST_WIDE_INT unit = 1; 1233
1207 unsigned HOST_WIDE_INT limit
1208 = strtoull (warn_alloc_size_limit, &end, 10);
1209
1210 if (!errno)
1211 {
1212 if (end && *end)
1213 {
1214 /* Numeric option arguments are at most INT_MAX. Make it
1215 possible to specify a larger value by accepting common
1216 suffixes. */
1217 if (!strcmp (end, "kB"))
1218 unit = 1000;
1219 else if (!strcasecmp (end, "KiB") || strcmp (end, "KB"))
1220 unit = 1024;
1221 else if (!strcmp (end, "MB"))
1222 unit = HOST_WIDE_INT_UC (1000) * 1000;
1223 else if (!strcasecmp (end, "MiB"))
1224 unit = HOST_WIDE_INT_UC (1024) * 1024;
1225 else if (!strcasecmp (end, "GB"))
1226 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000;
1227 else if (!strcasecmp (end, "GiB"))
1228 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024;
1229 else if (!strcasecmp (end, "TB"))
1230 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000;
1231 else if (!strcasecmp (end, "TiB"))
1232 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024;
1233 else if (!strcasecmp (end, "PB"))
1234 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000;
1235 else if (!strcasecmp (end, "PiB"))
1236 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024;
1237 else if (!strcasecmp (end, "EB"))
1238 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000
1239 * 1000;
1240 else if (!strcasecmp (end, "EiB"))
1241 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024
1242 * 1024;
1243 else
1244 unit = 0;
1245 }
1246
1247 if (unit)
1248 {
1249 widest_int w = wi::mul (limit, unit);
1250 if (w < wi::to_widest (alloc_object_size_limit))
1251 alloc_object_size_limit = wide_int_to_tree (ssizetype, w);
1252 }
1253 }
1254 }
1255 }
1256 return alloc_object_size_limit; 1234 return alloc_object_size_limit;
1257 } 1235 }
1258 1236
1259 /* Return true when EXP's range can be determined and set RANGE[] to it 1237 /* Return true when EXP's range can be determined and set RANGE[] to it
1260 after adjusting it if necessary to make EXP a valid size argument to 1238 after adjusting it if necessary to make EXP a represents a valid size
1261 an allocation function declared with attribute alloc_size (whose 1239 of object, or a valid size argument to an allocation function declared
1262 argument may be signed), or to a string manipulation function like 1240 with attribute alloc_size (whose argument may be signed), or to a string
1263 memset. */ 1241 manipulation function like memset. When ALLOW_ZERO is true, allow
1242 returning a range of [0, 0] for a size in an anti-range [1, N] where
1243 N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
1244 allocation functions like malloc but it is a valid argument to
1245 functions like memset. */
1264 1246
1265 bool 1247 bool
1266 get_size_range (tree exp, tree range[2]) 1248 get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
1267 { 1249 {
1268 if (tree_fits_uhwi_p (exp)) 1250 if (tree_fits_uhwi_p (exp))
1269 { 1251 {
1270 /* EXP is a constant. */ 1252 /* EXP is a constant. */
1271 range[0] = range[1] = exp; 1253 range[0] = range[1] = exp;
1272 return true; 1254 return true;
1273 } 1255 }
1274 1256
1257 tree exptype = TREE_TYPE (exp);
1258 bool integral = INTEGRAL_TYPE_P (exptype);
1259
1275 wide_int min, max; 1260 wide_int min, max;
1276 enum value_range_type range_type 1261 enum value_range_kind range_type;
1277 = ((TREE_CODE (exp) == SSA_NAME && INTEGRAL_TYPE_P (TREE_TYPE (exp))) 1262
1278 ? get_range_info (exp, &min, &max) : VR_VARYING); 1263 if (integral)
1264 range_type = determine_value_range (exp, &min, &max);
1265 else
1266 range_type = VR_VARYING;
1279 1267
1280 if (range_type == VR_VARYING) 1268 if (range_type == VR_VARYING)
1281 { 1269 {
1282 /* No range information available. */ 1270 if (integral)
1271 {
1272 /* Use the full range of the type of the expression when
1273 no value range information is available. */
1274 range[0] = TYPE_MIN_VALUE (exptype);
1275 range[1] = TYPE_MAX_VALUE (exptype);
1276 return true;
1277 }
1278
1283 range[0] = NULL_TREE; 1279 range[0] = NULL_TREE;
1284 range[1] = NULL_TREE; 1280 range[1] = NULL_TREE;
1285 return false; 1281 return false;
1286 } 1282 }
1287 1283
1288 tree exptype = TREE_TYPE (exp);
1289 unsigned expprec = TYPE_PRECISION (exptype); 1284 unsigned expprec = TYPE_PRECISION (exptype);
1290 1285
1291 bool signed_p = !TYPE_UNSIGNED (exptype); 1286 bool signed_p = !TYPE_UNSIGNED (exptype);
1292 1287
1293 if (range_type == VR_ANTI_RANGE) 1288 if (range_type == VR_ANTI_RANGE)
1321 } 1316 }
1322 else if (wi::eq_p (0, min - 1)) 1317 else if (wi::eq_p (0, min - 1))
1323 { 1318 {
1324 /* EXP is unsigned and not in the range [1, MAX]. That means 1319 /* EXP is unsigned and not in the range [1, MAX]. That means
1325 it's either zero or greater than MAX. Even though 0 would 1320 it's either zero or greater than MAX. Even though 0 would
1326 normally be detected by -Walloc-zero set the range to 1321 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1327 [MAX, TYPE_MAX] so that when MAX is greater than the limit 1322 is true, set the range to [MAX, TYPE_MAX] so that when MAX
1328 the whole range is diagnosed. */ 1323 is greater than the limit the whole range is diagnosed. */
1329 min = max + 1; 1324 if (allow_zero)
1330 max = wi::to_wide (TYPE_MAX_VALUE (exptype)); 1325 min = max = wi::zero (expprec);
1326 else
1327 {
1328 min = max + 1;
1329 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1330 }
1331 } 1331 }
1332 else 1332 else
1333 { 1333 {
1334 max = min - 1; 1334 max = min - 1;
1335 min = wi::zero (expprec); 1335 min = wi::zero (expprec);
1452 attribute alloc_size (X, Y). */ 1452 attribute alloc_size (X, Y). */
1453 unsigned szprec = TYPE_PRECISION (size_type_node); 1453 unsigned szprec = TYPE_PRECISION (size_type_node);
1454 wide_int x = wi::to_wide (argrange[0][0], szprec); 1454 wide_int x = wi::to_wide (argrange[0][0], szprec);
1455 wide_int y = wi::to_wide (argrange[1][0], szprec); 1455 wide_int y = wi::to_wide (argrange[1][0], szprec);
1456 1456
1457 bool vflow; 1457 wi::overflow_type vflow;
1458 wide_int prod = wi::umul (x, y, &vflow); 1458 wide_int prod = wi::umul (x, y, &vflow);
1459 1459
1460 if (vflow) 1460 if (vflow)
1461 warned = warning_at (loc, OPT_Walloc_size_larger_than_, 1461 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1462 "%Kproduct %<%E * %E%> of arguments %i and %i " 1462 "%Kproduct %<%E * %E%> of arguments %i and %i "
1495 inform (fnloc, 1495 inform (fnloc,
1496 "in a call to allocation function %qD declared here", fn); 1496 "in a call to allocation function %qD declared here", fn);
1497 } 1497 }
1498 } 1498 }
1499 1499
1500 /* If EXPR refers to a character array or pointer declared attribute
1501 nonstring return a decl for that array or pointer and set *REF to
1502 the referenced enclosing object or pointer. Otherwise returns
1503 null. */
1504
1505 tree
1506 get_attr_nonstring_decl (tree expr, tree *ref)
1507 {
1508 tree decl = expr;
1509 if (TREE_CODE (decl) == SSA_NAME)
1510 {
1511 gimple *def = SSA_NAME_DEF_STMT (decl);
1512
1513 if (is_gimple_assign (def))
1514 {
1515 tree_code code = gimple_assign_rhs_code (def);
1516 if (code == ADDR_EXPR
1517 || code == COMPONENT_REF
1518 || code == VAR_DECL)
1519 decl = gimple_assign_rhs1 (def);
1520 }
1521 else if (tree var = SSA_NAME_VAR (decl))
1522 decl = var;
1523 }
1524
1525 if (TREE_CODE (decl) == ADDR_EXPR)
1526 decl = TREE_OPERAND (decl, 0);
1527
1528 if (ref)
1529 *ref = decl;
1530
1531 if (TREE_CODE (decl) == ARRAY_REF)
1532 decl = TREE_OPERAND (decl, 0);
1533 else if (TREE_CODE (decl) == COMPONENT_REF)
1534 decl = TREE_OPERAND (decl, 1);
1535 else if (TREE_CODE (decl) == MEM_REF)
1536 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
1537
1538 if (DECL_P (decl)
1539 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1540 return decl;
1541
1542 return NULL_TREE;
1543 }
1544
1545 /* Warn about passing a non-string array/pointer to a function that
1546 expects a nul-terminated string argument. */
1547
1548 void
1549 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1550 {
1551 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1552 return;
1553
1554 if (TREE_NO_WARNING (exp) || !warn_stringop_overflow)
1555 return;
1556
1557 unsigned nargs = call_expr_nargs (exp);
1558
1559 /* The bound argument to a bounded string function like strncpy. */
1560 tree bound = NULL_TREE;
1561
1562 /* The range of lengths of a string argument to one of the comparison
1563 functions. If the length is less than the bound it is used instead. */
1564 tree lenrng[2] = { NULL_TREE, NULL_TREE };
1565
1566 /* It's safe to call "bounded" string functions with a non-string
1567 argument since the functions provide an explicit bound for this
1568 purpose. The exception is strncat where the bound may refer to
1569 either the destination or the source. */
1570 int fncode = DECL_FUNCTION_CODE (fndecl);
1571 switch (fncode)
1572 {
1573 case BUILT_IN_STRCMP:
1574 case BUILT_IN_STRNCMP:
1575 case BUILT_IN_STRNCASECMP:
1576 {
1577 /* For these, if one argument refers to one or more of a set
1578 of string constants or arrays of known size, determine
1579 the range of their known or possible lengths and use it
1580 conservatively as the bound for the unbounded function,
1581 and to adjust the range of the bound of the bounded ones. */
1582 for (unsigned argno = 0;
1583 argno < MIN (nargs, 2)
1584 && !(lenrng[1] && TREE_CODE (lenrng[1]) == INTEGER_CST); argno++)
1585 {
1586 tree arg = CALL_EXPR_ARG (exp, argno);
1587 if (!get_attr_nonstring_decl (arg))
1588 get_range_strlen (arg, lenrng);
1589 }
1590 }
1591 /* Fall through. */
1592
1593 case BUILT_IN_STRNCAT:
1594 case BUILT_IN_STPNCPY:
1595 case BUILT_IN_STRNCPY:
1596 if (nargs > 2)
1597 bound = CALL_EXPR_ARG (exp, 2);
1598 break;
1599
1600 case BUILT_IN_STRNDUP:
1601 if (nargs > 1)
1602 bound = CALL_EXPR_ARG (exp, 1);
1603 break;
1604
1605 case BUILT_IN_STRNLEN:
1606 {
1607 tree arg = CALL_EXPR_ARG (exp, 0);
1608 if (!get_attr_nonstring_decl (arg))
1609 get_range_strlen (arg, lenrng);
1610
1611 if (nargs > 1)
1612 bound = CALL_EXPR_ARG (exp, 1);
1613 break;
1614 }
1615
1616 default:
1617 break;
1618 }
1619
1620 /* Determine the range of the bound argument (if specified). */
1621 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1622 if (bound)
1623 {
1624 STRIP_NOPS (bound);
1625 get_size_range (bound, bndrng);
1626 }
1627
1628 location_t loc = EXPR_LOCATION (exp);
1629
1630 if (bndrng[0])
1631 {
1632 /* Diagnose excessive bound prior the adjustment below and
1633 regardless of attribute nonstring. */
1634 tree maxobjsize = max_object_size ();
1635 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1636 {
1637 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1638 warning_at (loc, OPT_Wstringop_overflow_,
1639 "%K%qD specified bound %E "
1640 "exceeds maximum object size %E",
1641 exp, fndecl, bndrng[0], maxobjsize);
1642 else
1643 warning_at (loc, OPT_Wstringop_overflow_,
1644 "%K%qD specified bound [%E, %E] "
1645 "exceeds maximum object size %E",
1646 exp, fndecl, bndrng[0], bndrng[1], maxobjsize);
1647 return;
1648 }
1649 }
1650
1651 if (lenrng[1] && TREE_CODE (lenrng[1]) == INTEGER_CST)
1652 {
1653 /* Add one for the nul. */
1654 lenrng[1] = const_binop (PLUS_EXPR, TREE_TYPE (lenrng[1]),
1655 lenrng[1], size_one_node);
1656
1657 if (!bndrng[0])
1658 {
1659 /* Conservatively use the upper bound of the lengths for
1660 both the lower and the upper bound of the operation. */
1661 bndrng[0] = lenrng[1];
1662 bndrng[1] = lenrng[1];
1663 bound = void_type_node;
1664 }
1665 else
1666 {
1667 /* Replace the bound on the operation with the upper bound
1668 of the length of the string if the latter is smaller. */
1669 if (tree_int_cst_lt (lenrng[1], bndrng[0]))
1670 bndrng[0] = lenrng[1];
1671 else if (tree_int_cst_lt (lenrng[1], bndrng[1]))
1672 bndrng[1] = lenrng[1];
1673 }
1674 }
1675
1676 /* Iterate over the built-in function's formal arguments and check
1677 each const char* against the actual argument. If the actual
1678 argument is declared attribute non-string issue a warning unless
1679 the argument's maximum length is bounded. */
1680 function_args_iterator it;
1681 function_args_iter_init (&it, TREE_TYPE (fndecl));
1682
1683 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1684 {
1685 /* Avoid iterating past the declared argument in a call
1686 to function declared without a prototype. */
1687 if (argno >= nargs)
1688 break;
1689
1690 tree argtype = function_args_iter_cond (&it);
1691 if (!argtype)
1692 break;
1693
1694 if (TREE_CODE (argtype) != POINTER_TYPE)
1695 continue;
1696
1697 argtype = TREE_TYPE (argtype);
1698
1699 if (TREE_CODE (argtype) != INTEGER_TYPE
1700 || !TYPE_READONLY (argtype))
1701 continue;
1702
1703 argtype = TYPE_MAIN_VARIANT (argtype);
1704 if (argtype != char_type_node)
1705 continue;
1706
1707 tree callarg = CALL_EXPR_ARG (exp, argno);
1708 if (TREE_CODE (callarg) == ADDR_EXPR)
1709 callarg = TREE_OPERAND (callarg, 0);
1710
1711 /* See if the destination is declared with attribute "nonstring". */
1712 tree decl = get_attr_nonstring_decl (callarg);
1713 if (!decl)
1714 continue;
1715
1716 /* The maximum number of array elements accessed. */
1717 offset_int wibnd = 0;
1718
1719 if (argno && fncode == BUILT_IN_STRNCAT)
1720 {
1721 /* See if the bound in strncat is derived from the length
1722 of the strlen of the destination (as it's expected to be).
1723 If so, reset BOUND and FNCODE to trigger a warning. */
1724 tree dstarg = CALL_EXPR_ARG (exp, 0);
1725 if (is_strlen_related_p (dstarg, bound))
1726 {
1727 /* The bound applies to the destination, not to the source,
1728 so reset these to trigger a warning without mentioning
1729 the bound. */
1730 bound = NULL;
1731 fncode = 0;
1732 }
1733 else if (bndrng[1])
1734 /* Use the upper bound of the range for strncat. */
1735 wibnd = wi::to_offset (bndrng[1]);
1736 }
1737 else if (bndrng[0])
1738 /* Use the lower bound of the range for functions other than
1739 strncat. */
1740 wibnd = wi::to_offset (bndrng[0]);
1741
1742 /* Determine the size of the argument array if it is one. */
1743 offset_int asize = wibnd;
1744 bool known_size = false;
1745 tree type = TREE_TYPE (decl);
1746
1747 /* Determine the array size. For arrays of unknown bound and
1748 pointers reset BOUND to trigger the appropriate warning. */
1749 if (TREE_CODE (type) == ARRAY_TYPE)
1750 {
1751 if (tree arrbnd = TYPE_DOMAIN (type))
1752 {
1753 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1754 {
1755 asize = wi::to_offset (arrbnd) + 1;
1756 known_size = true;
1757 }
1758 }
1759 else if (bound == void_type_node)
1760 bound = NULL_TREE;
1761 }
1762 else if (bound == void_type_node)
1763 bound = NULL_TREE;
1764
1765 /* In a call to strncat with a bound in a range whose lower but
1766 not upper bound is less than the array size, reset ASIZE to
1767 be the same as the bound and the other variable to trigger
1768 the apprpriate warning below. */
1769 if (fncode == BUILT_IN_STRNCAT
1770 && bndrng[0] != bndrng[1]
1771 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1772 && (!known_size
1773 || wi::ltu_p (asize, wibnd)))
1774 {
1775 asize = wibnd;
1776 bound = NULL_TREE;
1777 fncode = 0;
1778 }
1779
1780 bool warned = false;
1781
1782 auto_diagnostic_group d;
1783 if (wi::ltu_p (asize, wibnd))
1784 {
1785 if (bndrng[0] == bndrng[1])
1786 warned = warning_at (loc, OPT_Wstringop_overflow_,
1787 "%qD argument %i declared attribute "
1788 "%<nonstring%> is smaller than the specified "
1789 "bound %wu",
1790 fndecl, argno + 1, wibnd.to_uhwi ());
1791 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1792 warned = warning_at (loc, OPT_Wstringop_overflow_,
1793 "%qD argument %i declared attribute "
1794 "%<nonstring%> is smaller than "
1795 "the specified bound [%E, %E]",
1796 fndecl, argno + 1, bndrng[0], bndrng[1]);
1797 else
1798 warned = warning_at (loc, OPT_Wstringop_overflow_,
1799 "%qD argument %i declared attribute "
1800 "%<nonstring%> may be smaller than "
1801 "the specified bound [%E, %E]",
1802 fndecl, argno + 1, bndrng[0], bndrng[1]);
1803 }
1804 else if (fncode == BUILT_IN_STRNCAT)
1805 ; /* Avoid warning for calls to strncat() when the bound
1806 is equal to the size of the non-string argument. */
1807 else if (!bound)
1808 warned = warning_at (loc, OPT_Wstringop_overflow_,
1809 "%qD argument %i declared attribute %<nonstring%>",
1810 fndecl, argno + 1);
1811
1812 if (warned)
1813 inform (DECL_SOURCE_LOCATION (decl),
1814 "argument %qD declared here", decl);
1815 }
1816 }
1817
1500 /* Issue an error if CALL_EXPR was flagged as requiring 1818 /* Issue an error if CALL_EXPR was flagged as requiring
1501 tall-call optimization. */ 1819 tall-call optimization. */
1502 1820
1503 static void 1821 static void
1504 maybe_complain_about_tail_call (tree call_expr, const char *reason) 1822 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1547 int n_named_args ATTRIBUTE_UNUSED, 1865 int n_named_args ATTRIBUTE_UNUSED,
1548 tree exp, tree struct_value_addr_value, 1866 tree exp, tree struct_value_addr_value,
1549 tree fndecl, tree fntype, 1867 tree fndecl, tree fntype,
1550 cumulative_args_t args_so_far, 1868 cumulative_args_t args_so_far,
1551 int reg_parm_stack_space, 1869 int reg_parm_stack_space,
1552 rtx *old_stack_level, int *old_pending_adj, 1870 rtx *old_stack_level,
1871 poly_int64_pod *old_pending_adj,
1553 int *must_preallocate, int *ecf_flags, 1872 int *must_preallocate, int *ecf_flags,
1554 bool *may_tailcall, bool call_from_thunk_p) 1873 bool *may_tailcall, bool call_from_thunk_p)
1555 { 1874 {
1556 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far); 1875 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1557 location_t loc = EXPR_LOCATION (exp); 1876 location_t loc = EXPR_LOCATION (exp);
1569 /* In this loop, we consider args in the order they are written. 1888 /* In this loop, we consider args in the order they are written.
1570 We fill up ARGS from the back. */ 1889 We fill up ARGS from the back. */
1571 1890
1572 i = num_actuals - 1; 1891 i = num_actuals - 1;
1573 { 1892 {
1574 int j = i, ptr_arg = -1; 1893 int j = i;
1575 call_expr_arg_iterator iter; 1894 call_expr_arg_iterator iter;
1576 tree arg; 1895 tree arg;
1577 bitmap slots = NULL; 1896 bitmap slots = NULL;
1578 1897
1579 if (struct_value_addr_value) 1898 if (struct_value_addr_value)
1580 { 1899 {
1581 args[j].tree_value = struct_value_addr_value; 1900 args[j].tree_value = struct_value_addr_value;
1582 j--; 1901 j--;
1583
1584 /* If we pass structure address then we need to
1585 create bounds for it. Since created bounds is
1586 a call statement, we expand it right here to avoid
1587 fixing all other places where it may be expanded. */
1588 if (CALL_WITH_BOUNDS_P (exp))
1589 {
1590 args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
1591 args[j].tree_value
1592 = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
1593 expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
1594 EXPAND_NORMAL, 0, false);
1595 args[j].pointer_arg = j + 1;
1596 j--;
1597 }
1598 } 1902 }
1599 argpos = 0; 1903 argpos = 0;
1600 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) 1904 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1601 { 1905 {
1602 tree argtype = TREE_TYPE (arg); 1906 tree argtype = TREE_TYPE (arg);
1603
1604 /* Remember last param with pointer and associate it
1605 with following pointer bounds. */
1606 if (CALL_WITH_BOUNDS_P (exp)
1607 && chkp_type_has_pointer (argtype))
1608 {
1609 if (slots)
1610 BITMAP_FREE (slots);
1611 ptr_arg = j;
1612 if (!BOUNDED_TYPE_P (argtype))
1613 {
1614 slots = BITMAP_ALLOC (NULL);
1615 chkp_find_bound_slots (argtype, slots);
1616 }
1617 }
1618 else if (CALL_WITH_BOUNDS_P (exp)
1619 && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
1620 argpos < n_named_args))
1621 {
1622 if (slots)
1623 BITMAP_FREE (slots);
1624 ptr_arg = j;
1625 }
1626 else if (POINTER_BOUNDS_TYPE_P (argtype))
1627 {
1628 /* We expect bounds in instrumented calls only.
1629 Otherwise it is a sign we lost flag due to some optimization
1630 and may emit call args incorrectly. */
1631 gcc_assert (CALL_WITH_BOUNDS_P (exp));
1632
1633 /* For structures look for the next available pointer. */
1634 if (ptr_arg != -1 && slots)
1635 {
1636 unsigned bnd_no = bitmap_first_set_bit (slots);
1637 args[j].pointer_offset =
1638 bnd_no * POINTER_SIZE / BITS_PER_UNIT;
1639
1640 bitmap_clear_bit (slots, bnd_no);
1641
1642 /* Check we have no more pointers in the structure. */
1643 if (bitmap_empty_p (slots))
1644 BITMAP_FREE (slots);
1645 }
1646 args[j].pointer_arg = ptr_arg;
1647
1648 /* Check we covered all pointers in the previous
1649 non bounds arg. */
1650 if (!slots)
1651 ptr_arg = -1;
1652 }
1653 else
1654 ptr_arg = -1;
1655 1907
1656 if (targetm.calls.split_complex_arg 1908 if (targetm.calls.split_complex_arg
1657 && argtype 1909 && argtype
1658 && TREE_CODE (argtype) == COMPLEX_TYPE 1910 && TREE_CODE (argtype) == COMPLEX_TYPE
1659 && targetm.calls.split_complex_arg (argtype)) 1911 && targetm.calls.split_complex_arg (argtype))
1771 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base)) 2023 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1772 { 2024 {
1773 *may_tailcall = false; 2025 *may_tailcall = false;
1774 maybe_complain_about_tail_call (exp, 2026 maybe_complain_about_tail_call (exp,
1775 "a callee-copied argument is" 2027 "a callee-copied argument is"
1776 " stored in the current " 2028 " stored in the current"
1777 " function's frame"); 2029 " function's frame");
1778 } 2030 }
1779 2031
1780 args[i].tree_value = build_fold_addr_expr_loc (loc, 2032 args[i].tree_value = build_fold_addr_expr_loc (loc,
1781 args[i].tree_value); 2033 args[i].tree_value);
1851 fndecl ? TREE_TYPE (fndecl) : fntype, 0); 2103 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1852 2104
1853 args[i].unsignedp = unsignedp; 2105 args[i].unsignedp = unsignedp;
1854 args[i].mode = mode; 2106 args[i].mode = mode;
1855 2107
2108 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2109
1856 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type, 2110 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1857 argpos < n_named_args); 2111 argpos < n_named_args);
1858 2112
1859 if (args[i].reg && CONST_INT_P (args[i].reg)) 2113 if (args[i].reg && CONST_INT_P (args[i].reg))
1860 { 2114 {
1893 to preallocate. */ 2147 to preallocate. */
1894 if (TREE_ADDRESSABLE (type) 2148 if (TREE_ADDRESSABLE (type)
1895 || (args[i].pass_on_stack && args[i].reg != 0)) 2149 || (args[i].pass_on_stack && args[i].reg != 0))
1896 *must_preallocate = 1; 2150 *must_preallocate = 1;
1897 2151
1898 /* No stack allocation and padding for bounds. */
1899 if (POINTER_BOUNDS_P (args[i].tree_value))
1900 ;
1901 /* Compute the stack-size of this argument. */ 2152 /* Compute the stack-size of this argument. */
1902 else if (args[i].reg == 0 || args[i].partial != 0 2153 if (args[i].reg == 0 || args[i].partial != 0
1903 || reg_parm_stack_space > 0 2154 || reg_parm_stack_space > 0
1904 || args[i].pass_on_stack) 2155 || args[i].pass_on_stack)
1905 locate_and_pad_parm (mode, type, 2156 locate_and_pad_parm (mode, type,
1906 #ifdef STACK_PARMS_IN_REG_PARM_AREA 2157 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1907 1, 2158 1,
1944 { 2195 {
1945 /* Check the arguments of functions decorated with attribute 2196 /* Check the arguments of functions decorated with attribute
1946 alloc_size. */ 2197 alloc_size. */
1947 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx); 2198 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
1948 } 2199 }
2200
2201 /* Detect passing non-string arguments to functions expecting
2202 nul-terminated strings. */
2203 maybe_warn_nonstring_arg (fndecl, exp);
1949 } 2204 }
1950 2205
1951 /* Update ARGS_SIZE to contain the total size for the argument block. 2206 /* Update ARGS_SIZE to contain the total size for the argument block.
1952 Return the original constant component of the argument block's size. 2207 Return the original constant component of the argument block's size.
1953 2208
1954 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved 2209 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1955 for arguments passed in registers. */ 2210 for arguments passed in registers. */
1956 2211
1957 static int 2212 static poly_int64
1958 compute_argument_block_size (int reg_parm_stack_space, 2213 compute_argument_block_size (int reg_parm_stack_space,
1959 struct args_size *args_size, 2214 struct args_size *args_size,
1960 tree fndecl ATTRIBUTE_UNUSED, 2215 tree fndecl ATTRIBUTE_UNUSED,
1961 tree fntype ATTRIBUTE_UNUSED, 2216 tree fntype ATTRIBUTE_UNUSED,
1962 int preferred_stack_boundary ATTRIBUTE_UNUSED) 2217 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1963 { 2218 {
1964 int unadjusted_args_size = args_size->constant; 2219 poly_int64 unadjusted_args_size = args_size->constant;
1965 2220
1966 /* For accumulate outgoing args mode we don't need to align, since the frame 2221 /* For accumulate outgoing args mode we don't need to align, since the frame
1967 will be already aligned. Align to STACK_BOUNDARY in order to prevent 2222 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1968 backends from generating misaligned frame sizes. */ 2223 backends from generating misaligned frame sizes. */
1969 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY) 2224 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1982 if (preferred_stack_boundary > 1) 2237 if (preferred_stack_boundary > 1)
1983 { 2238 {
1984 /* We don't handle this case yet. To handle it correctly we have 2239 /* We don't handle this case yet. To handle it correctly we have
1985 to add the delta, round and subtract the delta. 2240 to add the delta, round and subtract the delta.
1986 Currently no machine description requires this support. */ 2241 Currently no machine description requires this support. */
1987 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1))); 2242 gcc_assert (multiple_p (stack_pointer_delta,
2243 preferred_stack_boundary));
1988 args_size->var = round_up (args_size->var, preferred_stack_boundary); 2244 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1989 } 2245 }
1990 2246
1991 if (reg_parm_stack_space > 0) 2247 if (reg_parm_stack_space > 0)
1992 { 2248 {
2005 else 2261 else
2006 { 2262 {
2007 preferred_stack_boundary /= BITS_PER_UNIT; 2263 preferred_stack_boundary /= BITS_PER_UNIT;
2008 if (preferred_stack_boundary < 1) 2264 if (preferred_stack_boundary < 1)
2009 preferred_stack_boundary = 1; 2265 preferred_stack_boundary = 1;
2010 args_size->constant = (((args_size->constant 2266 args_size->constant = (aligned_upper_bound (args_size->constant
2011 + stack_pointer_delta 2267 + stack_pointer_delta,
2012 + preferred_stack_boundary - 1) 2268 preferred_stack_boundary)
2013 / preferred_stack_boundary
2014 * preferred_stack_boundary)
2015 - stack_pointer_delta); 2269 - stack_pointer_delta);
2016 2270
2017 args_size->constant = MAX (args_size->constant, 2271 args_size->constant = upper_bound (args_size->constant,
2018 reg_parm_stack_space); 2272 reg_parm_stack_space);
2019 2273
2020 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) 2274 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2021 args_size->constant -= reg_parm_stack_space; 2275 args_size->constant -= reg_parm_stack_space;
2022 } 2276 }
2023 return unadjusted_args_size; 2277 return unadjusted_args_size;
2118 PUSH_ROUNDING. */ 2372 PUSH_ROUNDING. */
2119 2373
2120 if (! must_preallocate) 2374 if (! must_preallocate)
2121 { 2375 {
2122 int partial_seen = 0; 2376 int partial_seen = 0;
2123 int copy_to_evaluate_size = 0; 2377 poly_int64 copy_to_evaluate_size = 0;
2124 int i; 2378 int i;
2125 2379
2126 for (i = 0; i < num_actuals && ! must_preallocate; i++) 2380 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2127 { 2381 {
2128 if (args[i].partial > 0 && ! args[i].pass_on_stack) 2382 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2129 partial_seen = 1; 2383 partial_seen = 1;
2130 else if (partial_seen && args[i].reg == 0) 2384 else if (partial_seen && args[i].reg == 0)
2131 must_preallocate = 1;
2132 /* We preallocate in case there are bounds passed
2133 in the bounds table to have precomputed address
2134 for bounds association. */
2135 else if (POINTER_BOUNDS_P (args[i].tree_value)
2136 && !args[i].reg)
2137 must_preallocate = 1; 2385 must_preallocate = 1;
2138 2386
2139 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode 2387 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2140 && (TREE_CODE (args[i].tree_value) == CALL_EXPR 2388 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2141 || TREE_CODE (args[i].tree_value) == TARGET_EXPR 2389 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2143 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))) 2391 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2144 copy_to_evaluate_size 2392 copy_to_evaluate_size
2145 += int_size_in_bytes (TREE_TYPE (args[i].tree_value)); 2393 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2146 } 2394 }
2147 2395
2148 if (copy_to_evaluate_size * 2 >= args_size->constant 2396 if (maybe_ne (args_size->constant, 0)
2149 && args_size->constant > 0) 2397 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
2150 must_preallocate = 1; 2398 must_preallocate = 1;
2151 } 2399 }
2152 return must_preallocate; 2400 return must_preallocate;
2153 } 2401 }
2154 2402
2164 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals) 2412 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2165 { 2413 {
2166 if (argblock) 2414 if (argblock)
2167 { 2415 {
2168 rtx arg_reg = argblock; 2416 rtx arg_reg = argblock;
2169 int i, arg_offset = 0; 2417 int i;
2418 poly_int64 arg_offset = 0;
2170 2419
2171 if (GET_CODE (argblock) == PLUS) 2420 if (GET_CODE (argblock) == PLUS)
2172 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1)); 2421 {
2422 arg_reg = XEXP (argblock, 0);
2423 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2424 }
2173 2425
2174 for (i = 0; i < num_actuals; i++) 2426 for (i = 0; i < num_actuals; i++)
2175 { 2427 {
2176 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset); 2428 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2177 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset); 2429 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2178 rtx addr; 2430 rtx addr;
2179 unsigned int align, boundary; 2431 unsigned int align, boundary;
2180 unsigned int units_on_stack = 0; 2432 poly_uint64 units_on_stack = 0;
2181 machine_mode partial_mode = VOIDmode; 2433 machine_mode partial_mode = VOIDmode;
2182 2434
2183 /* Skip this parm if it will not be passed on the stack. */ 2435 /* Skip this parm if it will not be passed on the stack. */
2184 if (! args[i].pass_on_stack 2436 if (! args[i].pass_on_stack
2185 && args[i].reg != 0 2437 && args[i].reg != 0
2186 && args[i].partial == 0) 2438 && args[i].partial == 0)
2187 continue; 2439 continue;
2188 2440
2189 /* Pointer Bounds are never passed on the stack. */ 2441 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2190 if (POINTER_BOUNDS_P (args[i].tree_value))
2191 continue; 2442 continue;
2192 2443
2193 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset); 2444 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2194 addr = plus_constant (Pmode, addr, arg_offset); 2445 addr = plus_constant (Pmode, addr, arg_offset);
2195 2446
2196 if (args[i].partial != 0) 2447 if (args[i].partial != 0)
2197 { 2448 {
2198 /* Only part of the parameter is being passed on the stack. 2449 /* Only part of the parameter is being passed on the stack.
2199 Generate a simple memory reference of the correct size. */ 2450 Generate a simple memory reference of the correct size. */
2200 units_on_stack = args[i].locate.size.constant; 2451 units_on_stack = args[i].locate.size.constant;
2201 unsigned int bits_on_stack = units_on_stack * BITS_PER_UNIT; 2452 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2202 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk (); 2453 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2203 args[i].stack = gen_rtx_MEM (partial_mode, addr); 2454 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2204 set_mem_size (args[i].stack, units_on_stack); 2455 set_mem_size (args[i].stack, units_on_stack);
2205 } 2456 }
2206 else 2457 else
2209 set_mem_attributes (args[i].stack, 2460 set_mem_attributes (args[i].stack,
2210 TREE_TYPE (args[i].tree_value), 1); 2461 TREE_TYPE (args[i].tree_value), 1);
2211 } 2462 }
2212 align = BITS_PER_UNIT; 2463 align = BITS_PER_UNIT;
2213 boundary = args[i].locate.boundary; 2464 boundary = args[i].locate.boundary;
2465 poly_int64 offset_val;
2214 if (args[i].locate.where_pad != PAD_DOWNWARD) 2466 if (args[i].locate.where_pad != PAD_DOWNWARD)
2215 align = boundary; 2467 align = boundary;
2216 else if (CONST_INT_P (offset)) 2468 else if (poly_int_rtx_p (offset, &offset_val))
2217 { 2469 {
2218 align = INTVAL (offset) * BITS_PER_UNIT | boundary; 2470 align = least_bit_hwi (boundary);
2219 align = least_bit_hwi (align); 2471 unsigned int offset_align
2472 = known_alignment (offset_val) * BITS_PER_UNIT;
2473 if (offset_align != 0)
2474 align = MIN (align, offset_align);
2220 } 2475 }
2221 set_mem_align (args[i].stack, align); 2476 set_mem_align (args[i].stack, align);
2222 2477
2223 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset); 2478 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2224 addr = plus_constant (Pmode, addr, arg_offset); 2479 addr = plus_constant (Pmode, addr, arg_offset);
2277 push_temp_slots (); 2532 push_temp_slots ();
2278 funexp = expand_normal (addr); 2533 funexp = expand_normal (addr);
2279 pop_temp_slots (); /* FUNEXP can't be BLKmode. */ 2534 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2280 } 2535 }
2281 return funexp; 2536 return funexp;
2537 }
2538
2539 /* Return the static chain for this function, if any. */
2540
2541 rtx
2542 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2543 {
2544 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2545 return NULL;
2546
2547 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2282 } 2548 }
2283 2549
2284 /* Internal state for internal_arg_pointer_based_exp and its helpers. */ 2550 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2285 static struct 2551 static struct
2286 { 2552 {
2357 return const0_rtx; 2623 return const0_rtx;
2358 2624
2359 if (REG_P (rtl) && HARD_REGISTER_P (rtl)) 2625 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2360 return NULL_RTX; 2626 return NULL_RTX;
2361 2627
2362 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1))) 2628 poly_int64 offset;
2629 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
2363 { 2630 {
2364 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel); 2631 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2365 if (val == NULL_RTX || val == pc_rtx) 2632 if (val == NULL_RTX || val == pc_rtx)
2366 return val; 2633 return val;
2367 return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1))); 2634 return plus_constant (Pmode, val, offset);
2368 } 2635 }
2369 2636
2370 /* When called at the topmost level, scan pseudo assignments in between the 2637 /* When called at the topmost level, scan pseudo assignments in between the
2371 last scanned instruction in the tail call sequence and the latest insn 2638 last scanned instruction in the tail call sequence and the latest insn
2372 in that sequence. */ 2639 in that sequence. */
2393 } 2660 }
2394 2661
2395 return NULL_RTX; 2662 return NULL_RTX;
2396 } 2663 }
2397 2664
2398 /* Return true if and only if SIZE storage units (usually bytes) 2665 /* Return true if SIZE bytes starting from address ADDR might overlap an
2399 starting from address ADDR overlap with already clobbered argument 2666 already-clobbered argument area. This function is used to determine
2400 area. This function is used to determine if we should give up a 2667 if we should give up a sibcall. */
2401 sibcall. */
2402 2668
2403 static bool 2669 static bool
2404 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size) 2670 mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
2405 { 2671 {
2406 HOST_WIDE_INT i; 2672 poly_int64 i;
2673 unsigned HOST_WIDE_INT start, end;
2407 rtx val; 2674 rtx val;
2408 2675
2409 if (bitmap_empty_p (stored_args_map)) 2676 if (bitmap_empty_p (stored_args_map)
2677 && stored_args_watermark == HOST_WIDE_INT_M1U)
2410 return false; 2678 return false;
2411 val = internal_arg_pointer_based_exp (addr, true); 2679 val = internal_arg_pointer_based_exp (addr, true);
2412 if (val == NULL_RTX) 2680 if (val == NULL_RTX)
2413 return false; 2681 return false;
2414 else if (val == pc_rtx) 2682 else if (!poly_int_rtx_p (val, &i))
2415 return true; 2683 return true;
2416 else 2684
2417 i = INTVAL (val); 2685 if (known_eq (size, 0U))
2686 return false;
2418 2687
2419 if (STACK_GROWS_DOWNWARD) 2688 if (STACK_GROWS_DOWNWARD)
2420 i -= crtl->args.pretend_args_size; 2689 i -= crtl->args.pretend_args_size;
2421 else 2690 else
2422 i += crtl->args.pretend_args_size; 2691 i += crtl->args.pretend_args_size;
2423 2692
2424
2425 if (ARGS_GROW_DOWNWARD) 2693 if (ARGS_GROW_DOWNWARD)
2426 i = -i - size; 2694 i = -i - size;
2427 2695
2428 if (size > 0) 2696 /* We can ignore any references to the function's pretend args,
2429 { 2697 which at this point would manifest as negative values of I. */
2430 unsigned HOST_WIDE_INT k; 2698 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2431 2699 return false;
2432 for (k = 0; k < size; k++) 2700
2433 if (i + k < SBITMAP_SIZE (stored_args_map) 2701 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
2434 && bitmap_bit_p (stored_args_map, i + k)) 2702 if (!(i + size).is_constant (&end))
2435 return true; 2703 end = HOST_WIDE_INT_M1U;
2436 } 2704
2705 if (end > stored_args_watermark)
2706 return true;
2707
2708 end = MIN (end, SBITMAP_SIZE (stored_args_map));
2709 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2710 if (bitmap_bit_p (stored_args_map, k))
2711 return true;
2437 2712
2438 return false; 2713 return false;
2439 } 2714 }
2440 2715
2441 /* Do the register loads required for any wholly-register parms or any 2716 /* Do the register loads required for any wholly-register parms or any
2461 ? args[i].tail_call_reg : args[i].reg); 2736 ? args[i].tail_call_reg : args[i].reg);
2462 if (reg) 2737 if (reg)
2463 { 2738 {
2464 int partial = args[i].partial; 2739 int partial = args[i].partial;
2465 int nregs; 2740 int nregs;
2466 int size = 0; 2741 poly_int64 size = 0;
2742 HOST_WIDE_INT const_size = 0;
2467 rtx_insn *before_arg = get_last_insn (); 2743 rtx_insn *before_arg = get_last_insn ();
2468 /* Set non-negative if we must move a word at a time, even if 2744 /* Set non-negative if we must move a word at a time, even if
2469 just one word (e.g, partial == 4 && mode == DFmode). Set 2745 just one word (e.g, partial == 4 && mode == DFmode). Set
2470 to -1 if we just use a normal move insn. This value can be 2746 to -1 if we just use a normal move insn. This value can be
2471 zero if the argument is a zero size structure. */ 2747 zero if the argument is a zero size structure. */
2477 gcc_assert (partial % UNITS_PER_WORD == 0); 2753 gcc_assert (partial % UNITS_PER_WORD == 0);
2478 nregs = partial / UNITS_PER_WORD; 2754 nregs = partial / UNITS_PER_WORD;
2479 } 2755 }
2480 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode) 2756 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2481 { 2757 {
2482 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); 2758 /* Variable-sized parameters should be described by a
2483 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; 2759 PARALLEL instead. */
2760 const_size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2761 gcc_assert (const_size >= 0);
2762 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2763 size = const_size;
2484 } 2764 }
2485 else 2765 else
2486 size = GET_MODE_SIZE (args[i].mode); 2766 size = GET_MODE_SIZE (args[i].mode);
2487 2767
2488 /* Handle calls that pass values in multiple non-contiguous 2768 /* Handle calls that pass values in multiple non-contiguous
2500 emit_move_insn (reg, args[i].value); 2780 emit_move_insn (reg, args[i].value);
2501 #ifdef BLOCK_REG_PADDING 2781 #ifdef BLOCK_REG_PADDING
2502 /* Handle case where we have a value that needs shifting 2782 /* Handle case where we have a value that needs shifting
2503 up to the msb. eg. a QImode value and we're padding 2783 up to the msb. eg. a QImode value and we're padding
2504 upward on a BYTES_BIG_ENDIAN machine. */ 2784 upward on a BYTES_BIG_ENDIAN machine. */
2505 if (size < UNITS_PER_WORD 2785 if (args[i].locate.where_pad
2506 && (args[i].locate.where_pad 2786 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
2507 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2508 { 2787 {
2509 rtx x; 2788 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2510 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; 2789 if (maybe_lt (size, UNITS_PER_WORD))
2511 2790 {
2512 /* Assigning REG here rather than a temp makes CALL_FUSAGE 2791 rtx x;
2513 report the whole reg as used. Strictly speaking, the 2792 poly_int64 shift
2514 call only uses SIZE bytes at the msb end, but it doesn't 2793 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2515 seem worth generating rtl to say that. */ 2794
2516 reg = gen_rtx_REG (word_mode, REGNO (reg)); 2795 /* Assigning REG here rather than a temp makes
2517 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1); 2796 CALL_FUSAGE report the whole reg as used.
2518 if (x != reg) 2797 Strictly speaking, the call only uses SIZE
2519 emit_move_insn (reg, x); 2798 bytes at the msb end, but it doesn't seem worth
2799 generating rtl to say that. */
2800 reg = gen_rtx_REG (word_mode, REGNO (reg));
2801 x = expand_shift (LSHIFT_EXPR, word_mode,
2802 reg, shift, reg, 1);
2803 if (x != reg)
2804 emit_move_insn (reg, x);
2805 }
2520 } 2806 }
2521 #endif 2807 #endif
2522 } 2808 }
2523 2809
2524 /* If we have pre-computed the values to put in the registers in 2810 /* If we have pre-computed the values to put in the registers in
2529 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j), 2815 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2530 args[i].aligned_regs[j]); 2816 args[i].aligned_regs[j]);
2531 2817
2532 else if (partial == 0 || args[i].pass_on_stack) 2818 else if (partial == 0 || args[i].pass_on_stack)
2533 { 2819 {
2820 /* SIZE and CONST_SIZE are 0 for partial arguments and
2821 the size of a BLKmode type otherwise. */
2822 gcc_checking_assert (known_eq (size, const_size));
2534 rtx mem = validize_mem (copy_rtx (args[i].value)); 2823 rtx mem = validize_mem (copy_rtx (args[i].value));
2535 2824
2536 /* Check for overlap with already clobbered argument area, 2825 /* Check for overlap with already clobbered argument area,
2537 providing that this has non-zero size. */ 2826 providing that this has non-zero size. */
2538 if (is_sibcall 2827 if (is_sibcall
2539 && size != 0 2828 && const_size != 0
2540 && (mem_overlaps_already_clobbered_arg_p 2829 && (mem_might_overlap_already_clobbered_arg_p
2541 (XEXP (args[i].value, 0), size))) 2830 (XEXP (args[i].value, 0), const_size)))
2542 *sibcall_failure = 1; 2831 *sibcall_failure = 1;
2543 2832
2544 if (size % UNITS_PER_WORD == 0 2833 if (const_size % UNITS_PER_WORD == 0
2545 || MEM_ALIGN (mem) % BITS_PER_WORD == 0) 2834 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2546 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode); 2835 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2547 else 2836 else
2548 { 2837 {
2549 if (nregs > 1) 2838 if (nregs > 1)
2550 move_block_to_reg (REGNO (reg), mem, nregs - 1, 2839 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2551 args[i].mode); 2840 args[i].mode);
2552 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1); 2841 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2553 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD; 2842 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2554 unsigned int bitsize = size * BITS_PER_UNIT - bitoff; 2843 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
2555 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest, 2844 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2556 word_mode, word_mode, false, 2845 word_mode, word_mode, false,
2557 NULL); 2846 NULL);
2558 if (BYTES_BIG_ENDIAN) 2847 if (BYTES_BIG_ENDIAN)
2559 x = expand_shift (LSHIFT_EXPR, word_mode, x, 2848 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2561 if (x != dest) 2850 if (x != dest)
2562 emit_move_insn (dest, x); 2851 emit_move_insn (dest, x);
2563 } 2852 }
2564 2853
2565 /* Handle a BLKmode that needs shifting. */ 2854 /* Handle a BLKmode that needs shifting. */
2566 if (nregs == 1 && size < UNITS_PER_WORD 2855 if (nregs == 1 && const_size < UNITS_PER_WORD
2567 #ifdef BLOCK_REG_PADDING 2856 #ifdef BLOCK_REG_PADDING
2568 && args[i].locate.where_pad == PAD_DOWNWARD 2857 && args[i].locate.where_pad == PAD_DOWNWARD
2569 #else 2858 #else
2570 && BYTES_BIG_ENDIAN 2859 && BYTES_BIG_ENDIAN
2571 #endif 2860 #endif
2572 ) 2861 )
2573 { 2862 {
2574 rtx dest = gen_rtx_REG (word_mode, REGNO (reg)); 2863 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
2575 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; 2864 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
2576 enum tree_code dir = (BYTES_BIG_ENDIAN 2865 enum tree_code dir = (BYTES_BIG_ENDIAN
2577 ? RSHIFT_EXPR : LSHIFT_EXPR); 2866 ? RSHIFT_EXPR : LSHIFT_EXPR);
2578 rtx x; 2867 rtx x;
2579 2868
2580 x = expand_shift (dir, word_mode, dest, shift, dest, 1); 2869 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2604 } 2893 }
2605 2894
2606 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments 2895 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2607 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY 2896 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2608 bytes, then we would need to push some additional bytes to pad the 2897 bytes, then we would need to push some additional bytes to pad the
2609 arguments. So, we compute an adjust to the stack pointer for an 2898 arguments. So, we try to compute an adjust to the stack pointer for an
2610 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE 2899 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2611 bytes. Then, when the arguments are pushed the stack will be perfectly 2900 bytes. Then, when the arguments are pushed the stack will be perfectly
2612 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should 2901 aligned.
2613 be popped after the call. Returns the adjustment. */ 2902
2614 2903 Return true if this optimization is possible, storing the adjustment
2615 static int 2904 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2616 combine_pending_stack_adjustment_and_call (int unadjusted_args_size, 2905 bytes that should be popped after the call. */
2906
2907 static bool
2908 combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
2909 poly_int64 unadjusted_args_size,
2617 struct args_size *args_size, 2910 struct args_size *args_size,
2618 unsigned int preferred_unit_stack_boundary) 2911 unsigned int preferred_unit_stack_boundary)
2619 { 2912 {
2620 /* The number of bytes to pop so that the stack will be 2913 /* The number of bytes to pop so that the stack will be
2621 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */ 2914 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2622 HOST_WIDE_INT adjustment; 2915 poly_int64 adjustment;
2623 /* The alignment of the stack after the arguments are pushed, if we 2916 /* The alignment of the stack after the arguments are pushed, if we
2624 just pushed the arguments without adjust the stack here. */ 2917 just pushed the arguments without adjust the stack here. */
2625 unsigned HOST_WIDE_INT unadjusted_alignment; 2918 unsigned HOST_WIDE_INT unadjusted_alignment;
2626 2919
2627 unadjusted_alignment 2920 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2628 = ((stack_pointer_delta + unadjusted_args_size) 2921 preferred_unit_stack_boundary,
2629 % preferred_unit_stack_boundary); 2922 &unadjusted_alignment))
2923 return false;
2630 2924
2631 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes 2925 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2632 as possible -- leaving just enough left to cancel out the 2926 as possible -- leaving just enough left to cancel out the
2633 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the 2927 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2634 PENDING_STACK_ADJUST is non-negative, and congruent to 2928 PENDING_STACK_ADJUST is non-negative, and congruent to
2635 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */ 2929 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2636 2930
2637 /* Begin by trying to pop all the bytes. */ 2931 /* Begin by trying to pop all the bytes. */
2638 unadjusted_alignment 2932 unsigned HOST_WIDE_INT tmp_misalignment;
2639 = (unadjusted_alignment 2933 if (!known_misalignment (pending_stack_adjust,
2640 - (pending_stack_adjust % preferred_unit_stack_boundary)); 2934 preferred_unit_stack_boundary,
2935 &tmp_misalignment))
2936 return false;
2937 unadjusted_alignment -= tmp_misalignment;
2641 adjustment = pending_stack_adjust; 2938 adjustment = pending_stack_adjust;
2642 /* Push enough additional bytes that the stack will be aligned 2939 /* Push enough additional bytes that the stack will be aligned
2643 after the arguments are pushed. */ 2940 after the arguments are pushed. */
2644 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment) 2941 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2645 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment; 2942 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2943
2944 /* We need to know whether the adjusted argument size
2945 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2946 or a deallocation. */
2947 if (!ordered_p (adjustment, unadjusted_args_size))
2948 return false;
2646 2949
2647 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of 2950 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2648 bytes after the call. The right number is the entire 2951 bytes after the call. The right number is the entire
2649 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required 2952 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2650 by the arguments in the first place. */ 2953 by the arguments in the first place. */
2651 args_size->constant 2954 args_size->constant
2652 = pending_stack_adjust - adjustment + unadjusted_args_size; 2955 = pending_stack_adjust - adjustment + unadjusted_args_size;
2653 2956
2654 return adjustment; 2957 *adjustment_out = adjustment;
2958 return true;
2655 } 2959 }
2656 2960
2657 /* Scan X expression if it does not dereference any argument slots 2961 /* Scan X expression if it does not dereference any argument slots
2658 we already clobbered by tail call arguments (as noted in stored_args_map 2962 we already clobbered by tail call arguments (as noted in stored_args_map
2659 bitmap). 2963 bitmap).
2675 /* We need not check the operands of the CALL expression itself. */ 2979 /* We need not check the operands of the CALL expression itself. */
2676 if (code == CALL) 2980 if (code == CALL)
2677 return 0; 2981 return 0;
2678 2982
2679 if (code == MEM) 2983 if (code == MEM)
2680 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0), 2984 return (mem_might_overlap_already_clobbered_arg_p
2681 GET_MODE_SIZE (GET_MODE (x))); 2985 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
2682 2986
2683 /* Scan all subexpressions. */ 2987 /* Scan all subexpressions. */
2684 fmt = GET_RTX_FORMAT (code); 2988 fmt = GET_RTX_FORMAT (code);
2685 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) 2989 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2686 { 2990 {
2708 3012
2709 static int 3013 static int
2710 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg, 3014 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2711 int mark_stored_args_map) 3015 int mark_stored_args_map)
2712 { 3016 {
2713 int low, high; 3017 poly_uint64 low, high;
3018 unsigned HOST_WIDE_INT const_low, const_high;
2714 3019
2715 if (insn == NULL_RTX) 3020 if (insn == NULL_RTX)
2716 insn = get_insns (); 3021 insn = get_insns ();
2717 else 3022 else
2718 insn = NEXT_INSN (insn); 3023 insn = NEXT_INSN (insn);
2726 { 3031 {
2727 if (ARGS_GROW_DOWNWARD) 3032 if (ARGS_GROW_DOWNWARD)
2728 low = -arg->locate.slot_offset.constant - arg->locate.size.constant; 3033 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2729 else 3034 else
2730 low = arg->locate.slot_offset.constant; 3035 low = arg->locate.slot_offset.constant;
2731 3036 high = low + arg->locate.size.constant;
2732 for (high = low + arg->locate.size.constant; low < high; low++) 3037
2733 bitmap_set_bit (stored_args_map, low); 3038 const_low = constant_lower_bound (low);
3039 if (high.is_constant (&const_high))
3040 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3041 bitmap_set_bit (stored_args_map, i);
3042 else
3043 stored_args_watermark = MIN (stored_args_watermark, const_low);
2734 } 3044 }
2735 return insn != NULL_RTX; 3045 return insn != NULL_RTX;
2736 } 3046 }
2737 3047
2738 /* Given that a function returns a value of mode MODE at the most 3048 /* Given that a function returns a value of mode MODE at the most
2740 as specified by LEFT_P. Return true if some action was needed. */ 3050 as specified by LEFT_P. Return true if some action was needed. */
2741 3051
2742 bool 3052 bool
2743 shift_return_value (machine_mode mode, bool left_p, rtx value) 3053 shift_return_value (machine_mode mode, bool left_p, rtx value)
2744 { 3054 {
2745 HOST_WIDE_INT shift;
2746
2747 gcc_assert (REG_P (value) && HARD_REGISTER_P (value)); 3055 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2748 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode); 3056 machine_mode value_mode = GET_MODE (value);
2749 if (shift == 0) 3057 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3058
3059 if (known_eq (shift, 0))
2750 return false; 3060 return false;
2751 3061
2752 /* Use ashr rather than lshr for right shifts. This is for the benefit 3062 /* Use ashr rather than lshr for right shifts. This is for the benefit
2753 of the MIPS port, which requires SImode values to be sign-extended 3063 of the MIPS port, which requires SImode values to be sign-extended
2754 when stored in 64-bit registers. */ 3064 when stored in 64-bit registers. */
2755 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab, 3065 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
2756 value, GEN_INT (shift), value, 1, OPTAB_WIDEN)) 3066 value, gen_int_shift_amount (value_mode, shift),
3067 value, 1, OPTAB_WIDEN))
2757 gcc_unreachable (); 3068 gcc_unreachable ();
2758 return true; 3069 return true;
2759 } 3070 }
2760 3071
2761 #ifndef noCbC 3072 #ifndef noCbC
2873 3184
2874 /* If this function requires more stack slots than the current 3185 /* If this function requires more stack slots than the current
2875 function, we cannot change it into a sibling call. 3186 function, we cannot change it into a sibling call.
2876 crtl->args.pretend_args_size is not part of the 3187 crtl->args.pretend_args_size is not part of the
2877 stack allocated by our caller. */ 3188 stack allocated by our caller. */
2878 if (args_size.constant > (crtl->args.size - crtl->args.pretend_args_size)) 3189 if (maybe_gt (args_size.constant,
3190 crtl->args.size - crtl->args.pretend_args_size))
2879 { 3191 {
2880 maybe_complain_about_tail_call (exp, 3192 maybe_complain_about_tail_call (exp,
2881 "callee required more stack slots" 3193 "callee required more stack slots"
2882 " than the caller"); 3194 " than the caller");
2883 return false; 3195 return false;
2884 } 3196 }
2885 3197
2886 /* If the callee pops its own arguments, then it must pop exactly 3198 /* If the callee pops its own arguments, then it must pop exactly
2887 the same number of arguments as the current function. */ 3199 the same number of arguments as the current function. */
2888 if (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant) 3200 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
2889 != targetm.calls.return_pops_args (current_function_decl, 3201 args_size.constant),
2890 TREE_TYPE (current_function_decl), 3202 targetm.calls.return_pops_args (current_function_decl,
2891 crtl->args.size)) 3203 TREE_TYPE
3204 (current_function_decl),
3205 crtl->args.size)))
2892 { 3206 {
2893 maybe_complain_about_tail_call (exp, 3207 maybe_complain_about_tail_call (exp,
2894 "inconsistent number of" 3208 "inconsistent number of"
2895 " popped arguments"); 3209 " popped arguments");
2896 return false; 3210 return false;
2939 int pass; 3253 int pass;
2940 3254
2941 /* Register in which non-BLKmode value will be returned, 3255 /* Register in which non-BLKmode value will be returned,
2942 or 0 if no value or if value is BLKmode. */ 3256 or 0 if no value or if value is BLKmode. */
2943 rtx valreg; 3257 rtx valreg;
2944 /* Register(s) in which bounds are returned. */
2945 rtx valbnd = NULL;
2946 /* Address where we should return a BLKmode value; 3258 /* Address where we should return a BLKmode value;
2947 0 if value not BLKmode. */ 3259 0 if value not BLKmode. */
2948 rtx structure_value_addr = 0; 3260 rtx structure_value_addr = 0;
2949 /* Nonzero if that address is being passed by treating it as 3261 /* Nonzero if that address is being passed by treating it as
2950 an extra, implicit first parameter. Otherwise, 3262 an extra, implicit first parameter. Otherwise,
2953 /* Holds the value of implicit argument for the struct value. */ 3265 /* Holds the value of implicit argument for the struct value. */
2954 tree structure_value_addr_value = NULL_TREE; 3266 tree structure_value_addr_value = NULL_TREE;
2955 /* Size of aggregate value wanted, or zero if none wanted 3267 /* Size of aggregate value wanted, or zero if none wanted
2956 or if we are using the non-reentrant PCC calling convention 3268 or if we are using the non-reentrant PCC calling convention
2957 or expecting the value in registers. */ 3269 or expecting the value in registers. */
2958 HOST_WIDE_INT struct_value_size = 0; 3270 poly_int64 struct_value_size = 0;
2959 /* Nonzero if called function returns an aggregate in memory PCC style, 3271 /* Nonzero if called function returns an aggregate in memory PCC style,
2960 by returning the address of where to find it. */ 3272 by returning the address of where to find it. */
2961 int pcc_struct_value = 0; 3273 int pcc_struct_value = 0;
2962 rtx struct_value = 0; 3274 rtx struct_value = 0;
2963 3275
2976 3288
2977 /* Total size in bytes of all the stack-parms scanned so far. */ 3289 /* Total size in bytes of all the stack-parms scanned so far. */
2978 struct args_size args_size; 3290 struct args_size args_size;
2979 struct args_size adjusted_args_size; 3291 struct args_size adjusted_args_size;
2980 /* Size of arguments before any adjustments (such as rounding). */ 3292 /* Size of arguments before any adjustments (such as rounding). */
2981 int unadjusted_args_size; 3293 poly_int64 unadjusted_args_size;
2982 /* Data on reg parms scanned so far. */ 3294 /* Data on reg parms scanned so far. */
2983 CUMULATIVE_ARGS args_so_far_v; 3295 CUMULATIVE_ARGS args_so_far_v;
2984 cumulative_args_t args_so_far; 3296 cumulative_args_t args_so_far;
2985 /* Nonzero if a reg parm has been scanned. */ 3297 /* Nonzero if a reg parm has been scanned. */
2986 int reg_parm_seen; 3298 int reg_parm_seen;
3009 saved, if any. */ 3321 saved, if any. */
3010 int low_to_save, high_to_save; 3322 int low_to_save, high_to_save;
3011 rtx save_area = 0; /* Place that it is saved */ 3323 rtx save_area = 0; /* Place that it is saved */
3012 #endif 3324 #endif
3013 3325
3014 int initial_highest_arg_in_use = highest_outgoing_arg_in_use; 3326 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3015 char *initial_stack_usage_map = stack_usage_map; 3327 char *initial_stack_usage_map = stack_usage_map;
3328 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
3016 char *stack_usage_map_buf = NULL; 3329 char *stack_usage_map_buf = NULL;
3017 3330
3018 int old_stack_allocated; 3331 poly_int64 old_stack_allocated;
3019 3332
3020 /* State variables to track stack modifications. */ 3333 /* State variables to track stack modifications. */
3021 rtx old_stack_level = 0; 3334 rtx old_stack_level = 0;
3022 int old_stack_arg_under_construction = 0; 3335 int old_stack_arg_under_construction = 0;
3023 int old_pending_adj = 0; 3336 poly_int64 old_pending_adj = 0;
3024 int old_inhibit_defer_pop = inhibit_defer_pop; 3337 int old_inhibit_defer_pop = inhibit_defer_pop;
3025 3338
3026 /* Some stack pointer alterations we make are performed via 3339 /* Some stack pointer alterations we make are performed via
3027 allocate_dynamic_stack_space. This modifies the stack_pointer_delta, 3340 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3028 which we then also need to save/restore along the way. */ 3341 which we then also need to save/restore along the way. */
3029 int old_stack_pointer_delta = 0; 3342 poly_int64 old_stack_pointer_delta = 0;
3030 3343
3031 rtx call_fusage; 3344 rtx call_fusage;
3032 tree addr = CALL_EXPR_FN (exp); 3345 tree addr = CALL_EXPR_FN (exp);
3033 int i; 3346 int i;
3034 /* The alignment of the stack, in bits. */ 3347 /* The alignment of the stack, in bits. */
3114 { 3427 {
3115 pcc_struct_value = 1; 3428 pcc_struct_value = 1;
3116 } 3429 }
3117 #else /* not PCC_STATIC_STRUCT_RETURN */ 3430 #else /* not PCC_STATIC_STRUCT_RETURN */
3118 { 3431 {
3119 struct_value_size = int_size_in_bytes (rettype); 3432 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3433 struct_value_size = -1;
3120 3434
3121 /* Even if it is semantically safe to use the target as the return 3435 /* Even if it is semantically safe to use the target as the return
3122 slot, it may be not sufficiently aligned for the return type. */ 3436 slot, it may be not sufficiently aligned for the return type. */
3123 if (CALL_EXPR_RETURN_SLOT_OPT (exp) 3437 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3124 && target 3438 && target
3125 && MEM_P (target) 3439 && MEM_P (target)
3126 && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype) 3440 /* If rettype is addressable, we may not create a temporary.
3127 && targetm.slow_unaligned_access (TYPE_MODE (rettype), 3441 If target is properly aligned at runtime and the compiler
3128 MEM_ALIGN (target)))) 3442 just doesn't know about it, it will work fine, otherwise it
3443 will be UB. */
3444 && (TREE_ADDRESSABLE (rettype)
3445 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3446 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3447 MEM_ALIGN (target)))))
3129 structure_value_addr = XEXP (target, 0); 3448 structure_value_addr = XEXP (target, 0);
3130 else 3449 else
3131 { 3450 {
3132 /* For variable-sized objects, we must be called with a target 3451 /* For variable-sized objects, we must be called with a target
3133 specified. If we were to allocate space on the stack here, 3452 specified. If we were to allocate space on the stack here,
3201 (Pmode, structure_value_addr)) 3520 (Pmode, structure_value_addr))
3202 : structure_value_addr); 3521 : structure_value_addr);
3203 3522
3204 structure_value_addr_value = 3523 structure_value_addr_value =
3205 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp); 3524 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3206 structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1; 3525 structure_value_addr_parm = 1;
3207 } 3526 }
3208 3527
3209 /* Count the arguments and set NUM_ACTUALS. */ 3528 /* Count the arguments and set NUM_ACTUALS. */
3210 num_actuals = 3529 num_actuals =
3211 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm; 3530 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3288 if (structure_value_addr 3607 if (structure_value_addr
3289 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr) 3608 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3290 || reg_mentioned_p (virtual_outgoing_args_rtx, 3609 || reg_mentioned_p (virtual_outgoing_args_rtx,
3291 structure_value_addr)) 3610 structure_value_addr))
3292 && (args_size.var 3611 && (args_size.var
3293 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant))) 3612 || (!ACCUMULATE_OUTGOING_ARGS
3613 && maybe_ne (args_size.constant, 0))))
3294 structure_value_addr = copy_to_reg (structure_value_addr); 3614 structure_value_addr = copy_to_reg (structure_value_addr);
3295 3615
3296 /* Tail calls can make things harder to debug, and we've traditionally 3616 /* Tail calls can make things harder to debug, and we've traditionally
3297 pushed these optimizations into -O2. Don't try if we're already 3617 pushed these optimizations into -O2. Don't try if we're already
3298 expanding a call, as that means we're an argument. Don't try if 3618 expanding a call, as that means we're an argument. Don't try if
3299 there's cleanups, as we know there's code to follow the call. */ 3619 there's cleanups, as we know there's code to follow the call. */
3300 3620
3301 // -O2オプションがないときも末尾最適化が行われるように(Code Segmentのみ) 3621 // -O2オプションがないときも末尾最適化が行われるように(Code Segmentのみ)
3622
3302 if (currently_expanding_call++ != 0 3623 if (currently_expanding_call++ != 0
3303 #ifndef noCbC 3624 #ifndef noCbC
3304 || ((!fntype|| !CbC_IS_CODE_SEGMENT (fntype)) && !flag_optimize_sibling_calls) 3625 || ((!fntype|| !CbC_IS_CODE_SEGMENT (fntype)) && (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp)))
3305 #else 3626 #else
3306 || !flag_optimize_sibling_calls 3627 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
3307 #endif 3628 #endif
3308 || args_size.var 3629 || args_size.var
3309 || dbg_cnt (tail_call) == false) 3630 || dbg_cnt (tail_call) == false)
3310 try_tail_call = 0; 3631 try_tail_call = 0;
3311 3632
3463 Also, do all pending adjustments now if there is any chance 3784 Also, do all pending adjustments now if there is any chance
3464 this might be a call to alloca or if we are expanding a sibling 3785 this might be a call to alloca or if we are expanding a sibling
3465 call sequence. 3786 call sequence.
3466 Also do the adjustments before a throwing call, otherwise 3787 Also do the adjustments before a throwing call, otherwise
3467 exception handling can fail; PR 19225. */ 3788 exception handling can fail; PR 19225. */
3468 if (pending_stack_adjust >= 32 3789 if (maybe_ge (pending_stack_adjust, 32)
3469 || (pending_stack_adjust > 0 3790 || (maybe_ne (pending_stack_adjust, 0)
3470 && (flags & ECF_MAY_BE_ALLOCA)) 3791 && (flags & ECF_MAY_BE_ALLOCA))
3471 || (pending_stack_adjust > 0 3792 || (maybe_ne (pending_stack_adjust, 0)
3472 && flag_exceptions && !(flags & ECF_NOTHROW)) 3793 && flag_exceptions && !(flags & ECF_NOTHROW))
3473 || pass == 0) 3794 || pass == 0)
3474 do_pending_stack_adjust (); 3795 do_pending_stack_adjust ();
3475 3796
3476 /* Precompute any arguments as needed. */ 3797 /* Precompute any arguments as needed. */
3527 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size); 3848 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3528 else 3849 else
3529 argblock 3850 argblock
3530 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size); 3851 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3531 3852
3532 stored_args_map = sbitmap_alloc (args_size.constant); 3853 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
3854 stored_args_map = sbitmap_alloc (map_size);
3533 bitmap_clear (stored_args_map); 3855 bitmap_clear (stored_args_map);
3856 stored_args_watermark = HOST_WIDE_INT_M1U;
3534 } 3857 }
3535 3858
3536 /* If we have no actual push instructions, or shouldn't use them, 3859 /* If we have no actual push instructions, or shouldn't use them,
3537 make space for all args right now. */ 3860 make space for all args right now. */
3538 else if (adjusted_args_size.var != 0) 3861 else if (adjusted_args_size.var != 0)
3558 /* Note that we must go through the motions of allocating an argument 3881 /* Note that we must go through the motions of allocating an argument
3559 block even if the size is zero because we may be storing args 3882 block even if the size is zero because we may be storing args
3560 in the area reserved for register arguments, which may be part of 3883 in the area reserved for register arguments, which may be part of
3561 the stack frame. */ 3884 the stack frame. */
3562 3885
3563 int needed = adjusted_args_size.constant; 3886 poly_int64 needed = adjusted_args_size.constant;
3564 3887
3565 /* Store the maximum argument space used. It will be pushed by 3888 /* Store the maximum argument space used. It will be pushed by
3566 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow 3889 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3567 checking). */ 3890 checking). */
3568 3891
3569 if (needed > crtl->outgoing_args_size) 3892 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3570 crtl->outgoing_args_size = needed; 3893 needed);
3571 3894
3572 if (must_preallocate) 3895 if (must_preallocate)
3573 { 3896 {
3574 if (ACCUMULATE_OUTGOING_ARGS) 3897 if (ACCUMULATE_OUTGOING_ARGS)
3575 { 3898 {
3591 the map must be allocated for its entire size, not just 3914 the map must be allocated for its entire size, not just
3592 the part that is the responsibility of the caller. */ 3915 the part that is the responsibility of the caller. */
3593 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) 3916 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3594 needed += reg_parm_stack_space; 3917 needed += reg_parm_stack_space;
3595 3918
3919 poly_int64 limit = needed;
3596 if (ARGS_GROW_DOWNWARD) 3920 if (ARGS_GROW_DOWNWARD)
3597 highest_outgoing_arg_in_use 3921 limit += 1;
3598 = MAX (initial_highest_arg_in_use, needed + 1); 3922
3599 else 3923 /* For polynomial sizes, this is the maximum possible
3600 highest_outgoing_arg_in_use 3924 size needed for arguments with a constant size
3601 = MAX (initial_highest_arg_in_use, needed); 3925 and offset. */
3926 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
3927 highest_outgoing_arg_in_use
3928 = MAX (initial_highest_arg_in_use, const_limit);
3602 3929
3603 free (stack_usage_map_buf); 3930 free (stack_usage_map_buf);
3604 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); 3931 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3605 stack_usage_map = stack_usage_map_buf; 3932 stack_usage_map = stack_usage_map_buf;
3606 3933
3621 3948
3622 argblock = virtual_outgoing_args_rtx; 3949 argblock = virtual_outgoing_args_rtx;
3623 } 3950 }
3624 else 3951 else
3625 { 3952 {
3626 if (inhibit_defer_pop == 0) 3953 /* Try to reuse some or all of the pending_stack_adjust
3954 to get this space. */
3955 if (inhibit_defer_pop == 0
3956 && (combine_pending_stack_adjustment_and_call
3957 (&needed,
3958 unadjusted_args_size,
3959 &adjusted_args_size,
3960 preferred_unit_stack_boundary)))
3627 { 3961 {
3628 /* Try to reuse some or all of the pending_stack_adjust
3629 to get this space. */
3630 needed
3631 = (combine_pending_stack_adjustment_and_call
3632 (unadjusted_args_size,
3633 &adjusted_args_size,
3634 preferred_unit_stack_boundary));
3635
3636 /* combine_pending_stack_adjustment_and_call computes 3962 /* combine_pending_stack_adjustment_and_call computes
3637 an adjustment before the arguments are allocated. 3963 an adjustment before the arguments are allocated.
3638 Account for them and see whether or not the stack 3964 Account for them and see whether or not the stack
3639 needs to go up or down. */ 3965 needs to go up or down. */
3640 needed = unadjusted_args_size - needed; 3966 needed = unadjusted_args_size - needed;
3641 3967
3642 if (needed < 0) 3968 /* Checked by
3969 combine_pending_stack_adjustment_and_call. */
3970 gcc_checking_assert (ordered_p (needed, 0));
3971 if (maybe_lt (needed, 0))
3643 { 3972 {
3644 /* We're releasing stack space. */ 3973 /* We're releasing stack space. */
3645 /* ??? We can avoid any adjustment at all if we're 3974 /* ??? We can avoid any adjustment at all if we're
3646 already aligned. FIXME. */ 3975 already aligned. FIXME. */
3647 pending_stack_adjust = -needed; 3976 pending_stack_adjust = -needed;
3654 pending_stack_adjust = 0; 3983 pending_stack_adjust = 0;
3655 } 3984 }
3656 3985
3657 /* Special case this because overhead of `push_block' in 3986 /* Special case this because overhead of `push_block' in
3658 this case is non-trivial. */ 3987 this case is non-trivial. */
3659 if (needed == 0) 3988 if (known_eq (needed, 0))
3660 argblock = virtual_outgoing_args_rtx; 3989 argblock = virtual_outgoing_args_rtx;
3661 else 3990 else
3662 { 3991 {
3663 argblock = push_block (GEN_INT (needed), 0, 0); 3992 rtx needed_rtx = gen_int_mode (needed, Pmode);
3993 argblock = push_block (needed_rtx, 0, 0);
3664 if (ARGS_GROW_DOWNWARD) 3994 if (ARGS_GROW_DOWNWARD)
3665 argblock = plus_constant (Pmode, argblock, needed); 3995 argblock = plus_constant (Pmode, argblock, needed);
3666 } 3996 }
3667 3997
3668 /* We only really need to call `copy_to_reg' in the case 3998 /* We only really need to call `copy_to_reg' in the case
3684 function returning a BLKmode struct) to initialize 4014 function returning a BLKmode struct) to initialize
3685 an argument. */ 4015 an argument. */
3686 if (stack_arg_under_construction) 4016 if (stack_arg_under_construction)
3687 { 4017 {
3688 rtx push_size 4018 rtx push_size
3689 = GEN_INT (adjusted_args_size.constant 4019 = (gen_int_mode
3690 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype 4020 (adjusted_args_size.constant
3691 : TREE_TYPE (fndecl))) ? 0 4021 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3692 : reg_parm_stack_space)); 4022 : TREE_TYPE (fndecl))
4023 ? 0 : reg_parm_stack_space), Pmode));
3693 if (old_stack_level == 0) 4024 if (old_stack_level == 0)
3694 { 4025 {
3695 emit_stack_save (SAVE_BLOCK, &old_stack_level); 4026 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3696 old_stack_pointer_delta = stack_pointer_delta; 4027 old_stack_pointer_delta = stack_pointer_delta;
3697 old_pending_adj = pending_stack_adjust; 4028 old_pending_adj = pending_stack_adjust;
3706 /* Make a new map for the new argument list. */ 4037 /* Make a new map for the new argument list. */
3707 free (stack_usage_map_buf); 4038 free (stack_usage_map_buf);
3708 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use); 4039 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
3709 stack_usage_map = stack_usage_map_buf; 4040 stack_usage_map = stack_usage_map_buf;
3710 highest_outgoing_arg_in_use = 0; 4041 highest_outgoing_arg_in_use = 0;
4042 stack_usage_watermark = HOST_WIDE_INT_M1U;
3711 } 4043 }
3712 /* We can pass TRUE as the 4th argument because we just 4044 /* We can pass TRUE as the 4th argument because we just
3713 saved the stack pointer and will restore it right after 4045 saved the stack pointer and will restore it right after
3714 the call. */ 4046 the call. */
3715 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT, 4047 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
3741 4073
3742 OK_DEFER_POP; 4074 OK_DEFER_POP;
3743 4075
3744 /* Perform stack alignment before the first push (the last arg). */ 4076 /* Perform stack alignment before the first push (the last arg). */
3745 if (argblock == 0 4077 if (argblock == 0
3746 && adjusted_args_size.constant > reg_parm_stack_space 4078 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
3747 && adjusted_args_size.constant != unadjusted_args_size) 4079 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
3748 { 4080 {
3749 /* When the stack adjustment is pending, we get better code 4081 /* When the stack adjustment is pending, we get better code
3750 by combining the adjustments. */ 4082 by combining the adjustments. */
3751 if (pending_stack_adjust 4083 if (maybe_ne (pending_stack_adjust, 0)
3752 && ! inhibit_defer_pop) 4084 && ! inhibit_defer_pop
3753 { 4085 && (combine_pending_stack_adjustment_and_call
3754 pending_stack_adjust 4086 (&pending_stack_adjust,
3755 = (combine_pending_stack_adjustment_and_call 4087 unadjusted_args_size,
3756 (unadjusted_args_size, 4088 &adjusted_args_size,
3757 &adjusted_args_size, 4089 preferred_unit_stack_boundary)))
3758 preferred_unit_stack_boundary)); 4090 do_pending_stack_adjust ();
3759 do_pending_stack_adjust ();
3760 }
3761 else if (argblock == 0) 4091 else if (argblock == 0)
3762 anti_adjust_stack (GEN_INT (adjusted_args_size.constant 4092 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
3763 - unadjusted_args_size)); 4093 - unadjusted_args_size,
4094 Pmode));
3764 } 4095 }
3765 /* Now that the stack is properly aligned, pops can't safely 4096 /* Now that the stack is properly aligned, pops can't safely
3766 be deferred during the evaluation of the arguments. */ 4097 be deferred during the evaluation of the arguments. */
3767 NO_DEFER_POP; 4098 NO_DEFER_POP;
3768 4099
3772 if (flag_stack_usage_info 4103 if (flag_stack_usage_info
3773 && !ACCUMULATE_OUTGOING_ARGS 4104 && !ACCUMULATE_OUTGOING_ARGS
3774 && pass 4105 && pass
3775 && adjusted_args_size.var == 0) 4106 && adjusted_args_size.var == 0)
3776 { 4107 {
3777 int pushed = adjusted_args_size.constant + pending_stack_adjust; 4108 poly_int64 pushed = (adjusted_args_size.constant
3778 if (pushed > current_function_pushed_stack_size) 4109 + pending_stack_adjust);
3779 current_function_pushed_stack_size = pushed; 4110 current_function_pushed_stack_size
4111 = upper_bound (current_function_pushed_stack_size, pushed);
3780 } 4112 }
3781 4113
3782 funexp = rtx_for_function_call (fndecl, addr); 4114 funexp = rtx_for_function_call (fndecl, addr);
3783 4115
3784 if (CALL_EXPR_STATIC_CHAIN (exp)) 4116 if (CALL_EXPR_STATIC_CHAIN (exp))
3800 Parms which have partial registers are not stored here, 4132 Parms which have partial registers are not stored here,
3801 but we do preallocate space here if they want that. */ 4133 but we do preallocate space here if they want that. */
3802 4134
3803 for (i = 0; i < num_actuals; i++) 4135 for (i = 0; i < num_actuals; i++)
3804 { 4136 {
3805 /* Delay bounds until all other args are stored. */ 4137 if (args[i].reg == 0 || args[i].pass_on_stack)
3806 if (POINTER_BOUNDS_P (args[i].tree_value))
3807 continue;
3808 else if (args[i].reg == 0 || args[i].pass_on_stack)
3809 { 4138 {
3810 rtx_insn *before_arg = get_last_insn (); 4139 rtx_insn *before_arg = get_last_insn ();
3811 4140
3812 /* We don't allow passing huge (> 2^30 B) arguments 4141 /* We don't allow passing huge (> 2^30 B) arguments
3813 by value. It would cause an overflow later on. */ 4142 by value. It would cause an overflow later on. */
3814 if (adjusted_args_size.constant 4143 if (constant_lower_bound (adjusted_args_size.constant)
3815 >= (1 << (HOST_BITS_PER_INT - 2))) 4144 >= (1 << (HOST_BITS_PER_INT - 2)))
3816 { 4145 {
3817 sorry ("passing too large argument on stack"); 4146 sorry ("passing too large argument on stack");
3818 continue; 4147 continue;
3819 } 4148 }
3877 if (!any_regs) 4206 if (!any_regs)
3878 targetm.calls.call_args (pc_rtx, funtype); 4207 targetm.calls.call_args (pc_rtx, funtype);
3879 4208
3880 /* Figure out the register where the value, if any, will come back. */ 4209 /* Figure out the register where the value, if any, will come back. */
3881 valreg = 0; 4210 valreg = 0;
3882 valbnd = 0;
3883 if (TYPE_MODE (rettype) != VOIDmode 4211 if (TYPE_MODE (rettype) != VOIDmode
3884 && ! structure_value_addr) 4212 && ! structure_value_addr)
3885 { 4213 {
3886 if (pcc_struct_value) 4214 if (pcc_struct_value)
3887 { 4215 valreg = hard_function_value (build_pointer_type (rettype),
3888 valreg = hard_function_value (build_pointer_type (rettype), 4216 fndecl, NULL, (pass == 0));
3889 fndecl, NULL, (pass == 0));
3890 if (CALL_WITH_BOUNDS_P (exp))
3891 valbnd = targetm.calls.
3892 chkp_function_value_bounds (build_pointer_type (rettype),
3893 fndecl, (pass == 0));
3894 }
3895 else 4217 else
3896 { 4218 valreg = hard_function_value (rettype, fndecl, fntype,
3897 valreg = hard_function_value (rettype, fndecl, fntype, 4219 (pass == 0));
3898 (pass == 0));
3899 if (CALL_WITH_BOUNDS_P (exp))
3900 valbnd = targetm.calls.chkp_function_value_bounds (rettype,
3901 fndecl,
3902 (pass == 0));
3903 }
3904 4220
3905 /* If VALREG is a PARALLEL whose first member has a zero 4221 /* If VALREG is a PARALLEL whose first member has a zero
3906 offset, use that. This is for targets such as m68k that 4222 offset, use that. This is for targets such as m68k that
3907 return the same value in multiple places. */ 4223 return the same value in multiple places. */
3908 if (GET_CODE (valreg) == PARALLEL) 4224 if (GET_CODE (valreg) == PARALLEL)
3914 && GET_MODE (where) == GET_MODE (valreg)) 4230 && GET_MODE (where) == GET_MODE (valreg))
3915 valreg = where; 4231 valreg = where;
3916 } 4232 }
3917 } 4233 }
3918 4234
3919 /* Store all bounds not passed in registers. */
3920 for (i = 0; i < num_actuals; i++)
3921 {
3922 if (POINTER_BOUNDS_P (args[i].tree_value)
3923 && !args[i].reg)
3924 store_bounds (&args[i],
3925 args[i].pointer_arg == -1
3926 ? NULL
3927 : &args[args[i].pointer_arg]);
3928 }
3929
3930 /* If register arguments require space on the stack and stack space 4235 /* If register arguments require space on the stack and stack space
3931 was not preallocated, allocate stack space here for arguments 4236 was not preallocated, allocate stack space here for arguments
3932 passed in registers. */ 4237 passed in registers. */
3933 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))) 4238 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3934 && !ACCUMULATE_OUTGOING_ARGS 4239 && !ACCUMULATE_OUTGOING_ARGS
3992 /* All arguments and registers used for the call must be set up by 4297 /* All arguments and registers used for the call must be set up by
3993 now! */ 4298 now! */
3994 4299
3995 /* Stack must be properly aligned now. */ 4300 /* Stack must be properly aligned now. */
3996 gcc_assert (!pass 4301 gcc_assert (!pass
3997 || !(stack_pointer_delta % preferred_unit_stack_boundary)); 4302 || multiple_p (stack_pointer_delta,
4303 preferred_unit_stack_boundary));
3998 4304
3999 /* Generate the actual call instruction. */ 4305 /* Generate the actual call instruction. */
4000 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, 4306 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4001 adjusted_args_size.constant, struct_value_size, 4307 adjusted_args_size.constant, struct_value_size,
4002 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, 4308 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4196 /* Ensure we promote as expected, and get the new unsignedness. */ 4502 /* Ensure we promote as expected, and get the new unsignedness. */
4197 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, 4503 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4198 funtype, 1); 4504 funtype, 1);
4199 gcc_assert (GET_MODE (target) == pmode); 4505 gcc_assert (GET_MODE (target) == pmode);
4200 4506
4201 unsigned int offset = subreg_lowpart_offset (TYPE_MODE (type), 4507 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4202 GET_MODE (target)); 4508 GET_MODE (target));
4203 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); 4509 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4204 SUBREG_PROMOTED_VAR_P (target) = 1; 4510 SUBREG_PROMOTED_VAR_P (target) = 1;
4205 SUBREG_PROMOTED_SET (target, unsignedp); 4511 SUBREG_PROMOTED_SET (target, unsignedp);
4206 } 4512 }
4207 4513
4220 pending_stack_adjust = old_pending_adj; 4526 pending_stack_adjust = old_pending_adj;
4221 old_stack_allocated = stack_pointer_delta - pending_stack_adjust; 4527 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4222 stack_arg_under_construction = old_stack_arg_under_construction; 4528 stack_arg_under_construction = old_stack_arg_under_construction;
4223 highest_outgoing_arg_in_use = initial_highest_arg_in_use; 4529 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4224 stack_usage_map = initial_stack_usage_map; 4530 stack_usage_map = initial_stack_usage_map;
4531 stack_usage_watermark = initial_stack_usage_watermark;
4225 sibcall_failure = 1; 4532 sibcall_failure = 1;
4226 } 4533 }
4227 else if (ACCUMULATE_OUTGOING_ARGS && pass) 4534 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4228 { 4535 {
4229 #ifdef REG_PARM_STACK_SPACE 4536 #ifdef REG_PARM_STACK_SPACE
4244 4551
4245 if (save_mode != BLKmode) 4552 if (save_mode != BLKmode)
4246 emit_move_insn (stack_area, args[i].save_area); 4553 emit_move_insn (stack_area, args[i].save_area);
4247 else 4554 else
4248 emit_block_move (stack_area, args[i].save_area, 4555 emit_block_move (stack_area, args[i].save_area,
4249 GEN_INT (args[i].locate.size.constant), 4556 (gen_int_mode
4557 (args[i].locate.size.constant, Pmode)),
4250 BLOCK_OP_CALL_PARM); 4558 BLOCK_OP_CALL_PARM);
4251 } 4559 }
4252 4560
4253 highest_outgoing_arg_in_use = initial_highest_arg_in_use; 4561 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4254 stack_usage_map = initial_stack_usage_map; 4562 stack_usage_map = initial_stack_usage_map;
4563 stack_usage_watermark = initial_stack_usage_watermark;
4255 } 4564 }
4256 4565
4257 /* If this was alloca, record the new stack level. */ 4566 /* If this was alloca, record the new stack level. */
4258 if (flags & ECF_MAY_BE_ALLOCA) 4567 if (flags & ECF_MAY_BE_ALLOCA)
4259 record_new_stack_level (); 4568 record_new_stack_level ();
4292 { 4601 {
4293 normal_call_insns = insns; 4602 normal_call_insns = insns;
4294 4603
4295 /* Verify that we've deallocated all the stack we used. */ 4604 /* Verify that we've deallocated all the stack we used. */
4296 gcc_assert ((flags & ECF_NORETURN) 4605 gcc_assert ((flags & ECF_NORETURN)
4297 || (old_stack_allocated 4606 || known_eq (old_stack_allocated,
4298 == stack_pointer_delta - pending_stack_adjust)); 4607 stack_pointer_delta
4608 - pending_stack_adjust));
4299 } 4609 }
4300 4610
4301 /* If something prevents making this a sibling call, 4611 /* If something prevents making this a sibling call,
4302 zero out the sequence. */ 4612 zero out the sequence. */
4303 #ifndef noCbC 4613 #ifndef noCbC
4339 4649
4340 currently_expanding_call--; 4650 currently_expanding_call--;
4341 4651
4342 free (stack_usage_map_buf); 4652 free (stack_usage_map_buf);
4343 free (args); 4653 free (args);
4344
4345 /* Join result with returned bounds so caller may use them if needed. */
4346 target = chkp_join_splitted_slot (target, valbnd);
4347
4348 return target; 4654 return target;
4349 } 4655 }
4350 4656
4351 /* A sibling call sequence invalidates any REG_EQUIV notes made for 4657 /* A sibling call sequence invalidates any REG_EQUIV notes made for
4352 this function's incoming arguments. 4658 this function's incoming arguments.
4472 int old_inhibit_defer_pop = inhibit_defer_pop; 4778 int old_inhibit_defer_pop = inhibit_defer_pop;
4473 rtx call_fusage = 0; 4779 rtx call_fusage = 0;
4474 rtx mem_value = 0; 4780 rtx mem_value = 0;
4475 rtx valreg; 4781 rtx valreg;
4476 int pcc_struct_value = 0; 4782 int pcc_struct_value = 0;
4477 int struct_value_size = 0; 4783 poly_int64 struct_value_size = 0;
4478 int flags; 4784 int flags;
4479 int reg_parm_stack_space = 0; 4785 int reg_parm_stack_space = 0;
4480 int needed; 4786 poly_int64 needed;
4481 rtx_insn *before_call; 4787 rtx_insn *before_call;
4482 bool have_push_fusage; 4788 bool have_push_fusage;
4483 tree tfom; /* type_for_mode (outmode, 0) */ 4789 tree tfom; /* type_for_mode (outmode, 0) */
4484 4790
4485 #ifdef REG_PARM_STACK_SPACE 4791 #ifdef REG_PARM_STACK_SPACE
4488 int low_to_save = 0, high_to_save = 0; 4794 int low_to_save = 0, high_to_save = 0;
4489 rtx save_area = 0; /* Place that it is saved. */ 4795 rtx save_area = 0; /* Place that it is saved. */
4490 #endif 4796 #endif
4491 4797
4492 /* Size of the stack reserved for parameter registers. */ 4798 /* Size of the stack reserved for parameter registers. */
4493 int initial_highest_arg_in_use = highest_outgoing_arg_in_use; 4799 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
4494 char *initial_stack_usage_map = stack_usage_map; 4800 char *initial_stack_usage_map = stack_usage_map;
4801 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
4495 char *stack_usage_map_buf = NULL; 4802 char *stack_usage_map_buf = NULL;
4496 4803
4497 rtx struct_value = targetm.calls.struct_value_rtx (0, 0); 4804 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4498 4805
4499 #ifdef REG_PARM_STACK_SPACE 4806 #ifdef REG_PARM_STACK_SPACE
4710 else 5017 else
4711 /* The argument is passed entirely in registers. See at which 5018 /* The argument is passed entirely in registers. See at which
4712 end it should be padded. */ 5019 end it should be padded. */
4713 argvec[count].locate.where_pad = 5020 argvec[count].locate.where_pad =
4714 BLOCK_REG_PADDING (mode, NULL_TREE, 5021 BLOCK_REG_PADDING (mode, NULL_TREE,
4715 GET_MODE_SIZE (mode) <= UNITS_PER_WORD); 5022 known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4716 #endif 5023 #endif
4717 5024
4718 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true); 5025 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
4719 } 5026 }
4720 5027
4721 /* If this machine requires an external definition for library 5028 /* If this machine requires an external definition for library
4722 functions, write one out. */ 5029 functions, write one out. */
4723 assemble_external_libcall (fun); 5030 assemble_external_libcall (fun);
4724 5031
4725 original_args_size = args_size; 5032 original_args_size = args_size;
4726 args_size.constant = (((args_size.constant 5033 args_size.constant = (aligned_upper_bound (args_size.constant
4727 + stack_pointer_delta 5034 + stack_pointer_delta,
4728 + STACK_BYTES - 1) 5035 STACK_BYTES)
4729 / STACK_BYTES 5036 - stack_pointer_delta);
4730 * STACK_BYTES) 5037
4731 - stack_pointer_delta); 5038 args_size.constant = upper_bound (args_size.constant,
4732 5039 reg_parm_stack_space);
4733 args_size.constant = MAX (args_size.constant,
4734 reg_parm_stack_space);
4735 5040
4736 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) 5041 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4737 args_size.constant -= reg_parm_stack_space; 5042 args_size.constant -= reg_parm_stack_space;
4738 5043
4739 if (args_size.constant > crtl->outgoing_args_size) 5044 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4740 crtl->outgoing_args_size = args_size.constant; 5045 args_size.constant);
4741 5046
4742 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS) 5047 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
4743 { 5048 {
4744 int pushed = args_size.constant + pending_stack_adjust; 5049 poly_int64 pushed = args_size.constant + pending_stack_adjust;
4745 if (pushed > current_function_pushed_stack_size) 5050 current_function_pushed_stack_size
4746 current_function_pushed_stack_size = pushed; 5051 = upper_bound (current_function_pushed_stack_size, pushed);
4747 } 5052 }
4748 5053
4749 if (ACCUMULATE_OUTGOING_ARGS) 5054 if (ACCUMULATE_OUTGOING_ARGS)
4750 { 5055 {
4751 /* Since the stack pointer will never be pushed, it is possible for 5056 /* Since the stack pointer will never be pushed, it is possible for
4766 map must be allocated for its entire size, not just the part that 5071 map must be allocated for its entire size, not just the part that
4767 is the responsibility of the caller. */ 5072 is the responsibility of the caller. */
4768 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) 5073 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4769 needed += reg_parm_stack_space; 5074 needed += reg_parm_stack_space;
4770 5075
5076 poly_int64 limit = needed;
4771 if (ARGS_GROW_DOWNWARD) 5077 if (ARGS_GROW_DOWNWARD)
4772 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, 5078 limit += 1;
4773 needed + 1); 5079
4774 else 5080 /* For polynomial sizes, this is the maximum possible size needed
4775 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed); 5081 for arguments with a constant size and offset. */
5082 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5083 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5084 const_limit);
4776 5085
4777 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); 5086 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4778 stack_usage_map = stack_usage_map_buf; 5087 stack_usage_map = stack_usage_map_buf;
4779 5088
4780 if (initial_highest_arg_in_use) 5089 if (initial_highest_arg_in_use)
4798 argblock = virtual_outgoing_args_rtx; 5107 argblock = virtual_outgoing_args_rtx;
4799 } 5108 }
4800 else 5109 else
4801 { 5110 {
4802 if (!PUSH_ARGS) 5111 if (!PUSH_ARGS)
4803 argblock = push_block (GEN_INT (args_size.constant), 0, 0); 5112 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
4804 } 5113 }
4805 5114
4806 /* We push args individually in reverse order, perform stack alignment 5115 /* We push args individually in reverse order, perform stack alignment
4807 before the first push (the last arg). */ 5116 before the first push (the last arg). */
4808 if (argblock == 0) 5117 if (argblock == 0)
4809 anti_adjust_stack (GEN_INT (args_size.constant 5118 anti_adjust_stack (gen_int_mode (args_size.constant
4810 - original_args_size.constant)); 5119 - original_args_size.constant,
5120 Pmode));
4811 5121
4812 argnum = nargs - 1; 5122 argnum = nargs - 1;
4813 5123
4814 #ifdef REG_PARM_STACK_SPACE 5124 #ifdef REG_PARM_STACK_SPACE
4815 if (ACCUMULATE_OUTGOING_ARGS) 5125 if (ACCUMULATE_OUTGOING_ARGS)
4845 machine_mode mode = argvec[argnum].mode; 5155 machine_mode mode = argvec[argnum].mode;
4846 rtx val = argvec[argnum].value; 5156 rtx val = argvec[argnum].value;
4847 rtx reg = argvec[argnum].reg; 5157 rtx reg = argvec[argnum].reg;
4848 int partial = argvec[argnum].partial; 5158 int partial = argvec[argnum].partial;
4849 unsigned int parm_align = argvec[argnum].locate.boundary; 5159 unsigned int parm_align = argvec[argnum].locate.boundary;
4850 int lower_bound = 0, upper_bound = 0, i; 5160 poly_int64 lower_bound = 0, upper_bound = 0;
4851 5161
4852 if (! (reg != 0 && partial == 0)) 5162 if (! (reg != 0 && partial == 0))
4853 { 5163 {
4854 rtx use; 5164 rtx use;
4855 5165
4869 { 5179 {
4870 lower_bound = argvec[argnum].locate.slot_offset.constant; 5180 lower_bound = argvec[argnum].locate.slot_offset.constant;
4871 upper_bound = lower_bound + argvec[argnum].locate.size.constant; 5181 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4872 } 5182 }
4873 5183
4874 i = lower_bound; 5184 if (stack_region_maybe_used_p (lower_bound, upper_bound,
4875 /* Don't worry about things in the fixed argument area; 5185 reg_parm_stack_space))
4876 it has already been saved. */
4877 if (i < reg_parm_stack_space)
4878 i = reg_parm_stack_space;
4879 while (i < upper_bound && stack_usage_map[i] == 0)
4880 i++;
4881
4882 if (i < upper_bound)
4883 { 5186 {
4884 /* We need to make a save area. */ 5187 /* We need to make a save area. */
4885 unsigned int size 5188 poly_uint64 size
4886 = argvec[argnum].locate.size.constant * BITS_PER_UNIT; 5189 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4887 machine_mode save_mode 5190 machine_mode save_mode
4888 = int_mode_for_size (size, 1).else_blk (); 5191 = int_mode_for_size (size, 1).else_blk ();
4889 rtx adr 5192 rtx adr
4890 = plus_constant (Pmode, argblock, 5193 = plus_constant (Pmode, argblock,
4900 ); 5203 );
4901 5204
4902 emit_block_move (validize_mem 5205 emit_block_move (validize_mem
4903 (copy_rtx (argvec[argnum].save_area)), 5206 (copy_rtx (argvec[argnum].save_area)),
4904 stack_area, 5207 stack_area,
4905 GEN_INT (argvec[argnum].locate.size.constant), 5208 (gen_int_mode
5209 (argvec[argnum].locate.size.constant,
5210 Pmode)),
4906 BLOCK_OP_CALL_PARM); 5211 BLOCK_OP_CALL_PARM);
4907 } 5212 }
4908 else 5213 else
4909 { 5214 {
4910 argvec[argnum].save_area = gen_reg_rtx (save_mode); 5215 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4914 } 5219 }
4915 } 5220 }
4916 5221
4917 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align, 5222 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
4918 partial, reg, 0, argblock, 5223 partial, reg, 0, argblock,
4919 GEN_INT (argvec[argnum].locate.offset.constant), 5224 (gen_int_mode
5225 (argvec[argnum].locate.offset.constant, Pmode)),
4920 reg_parm_stack_space, 5226 reg_parm_stack_space,
4921 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false); 5227 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
4922 5228
4923 /* Now mark the segment we just used. */ 5229 /* Now mark the segment we just used. */
4924 if (ACCUMULATE_OUTGOING_ARGS) 5230 if (ACCUMULATE_OUTGOING_ARGS)
4925 for (i = lower_bound; i < upper_bound; i++) 5231 mark_stack_region_used (lower_bound, upper_bound);
4926 stack_usage_map[i] = 1;
4927 5232
4928 NO_DEFER_POP; 5233 NO_DEFER_POP;
4929 5234
4930 /* Indicate argument access so that alias.c knows that these 5235 /* Indicate argument access so that alias.c knows that these
4931 values are live. */ 5236 values are live. */
4963 { 5268 {
4964 machine_mode mode = argvec[argnum].mode; 5269 machine_mode mode = argvec[argnum].mode;
4965 rtx val = argvec[argnum].value; 5270 rtx val = argvec[argnum].value;
4966 rtx reg = argvec[argnum].reg; 5271 rtx reg = argvec[argnum].reg;
4967 int partial = argvec[argnum].partial; 5272 int partial = argvec[argnum].partial;
4968 #ifdef BLOCK_REG_PADDING
4969 int size = 0;
4970 #endif
4971 5273
4972 /* Handle calls that pass values in multiple non-contiguous 5274 /* Handle calls that pass values in multiple non-contiguous
4973 locations. The PA64 has examples of this for library calls. */ 5275 locations. The PA64 has examples of this for library calls. */
4974 if (reg != 0 && GET_CODE (reg) == PARALLEL) 5276 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4975 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode)); 5277 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
4976 else if (reg != 0 && partial == 0) 5278 else if (reg != 0 && partial == 0)
4977 { 5279 {
4978 emit_move_insn (reg, val); 5280 emit_move_insn (reg, val);
4979 #ifdef BLOCK_REG_PADDING 5281 #ifdef BLOCK_REG_PADDING
4980 size = GET_MODE_SIZE (argvec[argnum].mode); 5282 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
4981 5283
4982 /* Copied from load_register_parameters. */ 5284 /* Copied from load_register_parameters. */
4983 5285
4984 /* Handle case where we have a value that needs shifting 5286 /* Handle case where we have a value that needs shifting
4985 up to the msb. eg. a QImode value and we're padding 5287 up to the msb. eg. a QImode value and we're padding
4986 upward on a BYTES_BIG_ENDIAN machine. */ 5288 upward on a BYTES_BIG_ENDIAN machine. */
4987 if (size < UNITS_PER_WORD 5289 if (known_lt (size, UNITS_PER_WORD)
4988 && (argvec[argnum].locate.where_pad 5290 && (argvec[argnum].locate.where_pad
4989 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) 5291 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
4990 { 5292 {
4991 rtx x; 5293 rtx x;
4992 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; 5294 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4993 5295
4994 /* Assigning REG here rather than a temp makes CALL_FUSAGE 5296 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4995 report the whole reg as used. Strictly speaking, the 5297 report the whole reg as used. Strictly speaking, the
4996 call only uses SIZE bytes at the msb end, but it doesn't 5298 call only uses SIZE bytes at the msb end, but it doesn't
4997 seem worth generating rtl to say that. */ 5299 seem worth generating rtl to say that. */
5043 NO_DEFER_POP; 5345 NO_DEFER_POP;
5044 valreg = (mem_value == 0 && outmode != VOIDmode 5346 valreg = (mem_value == 0 && outmode != VOIDmode
5045 ? hard_libcall_value (outmode, orgfun) : NULL_RTX); 5347 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5046 5348
5047 /* Stack must be properly aligned now. */ 5349 /* Stack must be properly aligned now. */
5048 gcc_assert (!(stack_pointer_delta 5350 gcc_assert (multiple_p (stack_pointer_delta,
5049 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))); 5351 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
5050 5352
5051 before_call = get_last_insn (); 5353 before_call = get_last_insn ();
5052 5354
5053 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which 5355 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5054 will set inhibit_defer_pop to that value. */ 5356 will set inhibit_defer_pop to that value. */
5182 5484
5183 if (save_mode == BLKmode) 5485 if (save_mode == BLKmode)
5184 emit_block_move (stack_area, 5486 emit_block_move (stack_area,
5185 validize_mem 5487 validize_mem
5186 (copy_rtx (argvec[count].save_area)), 5488 (copy_rtx (argvec[count].save_area)),
5187 GEN_INT (argvec[count].locate.size.constant), 5489 (gen_int_mode
5490 (argvec[count].locate.size.constant, Pmode)),
5188 BLOCK_OP_CALL_PARM); 5491 BLOCK_OP_CALL_PARM);
5189 else 5492 else
5190 emit_move_insn (stack_area, argvec[count].save_area); 5493 emit_move_insn (stack_area, argvec[count].save_area);
5191 } 5494 }
5192 5495
5193 highest_outgoing_arg_in_use = initial_highest_arg_in_use; 5496 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5194 stack_usage_map = initial_stack_usage_map; 5497 stack_usage_map = initial_stack_usage_map;
5498 stack_usage_watermark = initial_stack_usage_watermark;
5195 } 5499 }
5196 5500
5197 free (stack_usage_map_buf); 5501 free (stack_usage_map_buf);
5198 5502
5199 return value; 5503 return value;
5200 5504
5201 } 5505 }
5202 5506
5203
5204 /* Store pointer bounds argument ARG into Bounds Table entry
5205 associated with PARM. */
5206 static void
5207 store_bounds (struct arg_data *arg, struct arg_data *parm)
5208 {
5209 rtx slot = NULL, ptr = NULL, addr = NULL;
5210
5211 /* We may pass bounds not associated with any pointer. */
5212 if (!parm)
5213 {
5214 gcc_assert (arg->special_slot);
5215 slot = arg->special_slot;
5216 ptr = const0_rtx;
5217 }
5218 /* Find pointer associated with bounds and where it is
5219 passed. */
5220 else
5221 {
5222 if (!parm->reg)
5223 {
5224 gcc_assert (!arg->special_slot);
5225
5226 addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
5227 }
5228 else if (REG_P (parm->reg))
5229 {
5230 gcc_assert (arg->special_slot);
5231 slot = arg->special_slot;
5232
5233 if (MEM_P (parm->value))
5234 addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
5235 else if (REG_P (parm->value))
5236 ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
5237 else
5238 {
5239 gcc_assert (!arg->pointer_offset);
5240 ptr = parm->value;
5241 }
5242 }
5243 else
5244 {
5245 gcc_assert (GET_CODE (parm->reg) == PARALLEL);
5246
5247 gcc_assert (arg->special_slot);
5248 slot = arg->special_slot;
5249
5250 if (parm->parallel_value)
5251 ptr = chkp_get_value_with_offs (parm->parallel_value,
5252 GEN_INT (arg->pointer_offset));
5253 else
5254 gcc_unreachable ();
5255 }
5256 }
5257
5258 /* Expand bounds. */
5259 if (!arg->value)
5260 arg->value = expand_normal (arg->tree_value);
5261
5262 targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
5263 }
5264 5507
5265 /* Store a single argument for a function call 5508 /* Store a single argument for a function call
5266 into the register or memory area where it must be passed. 5509 into the register or memory area where it must be passed.
5267 *ARG describes the argument value and where to pass it. 5510 *ARG describes the argument value and where to pass it.
5268 5511
5286 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) 5529 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5287 { 5530 {
5288 tree pval = arg->tree_value; 5531 tree pval = arg->tree_value;
5289 rtx reg = 0; 5532 rtx reg = 0;
5290 int partial = 0; 5533 int partial = 0;
5291 int used = 0; 5534 poly_int64 used = 0;
5292 int i, lower_bound = 0, upper_bound = 0; 5535 poly_int64 lower_bound = 0, upper_bound = 0;
5293 int sibcall_failure = 0; 5536 int sibcall_failure = 0;
5294 5537
5295 if (TREE_CODE (pval) == ERROR_MARK) 5538 if (TREE_CODE (pval) == ERROR_MARK)
5296 return 1; 5539 return 1;
5297 5540
5308 if (ARGS_GROW_DOWNWARD) 5551 if (ARGS_GROW_DOWNWARD)
5309 { 5552 {
5310 /* stack_slot is negative, but we want to index stack_usage_map 5553 /* stack_slot is negative, but we want to index stack_usage_map
5311 with positive values. */ 5554 with positive values. */
5312 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) 5555 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5313 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1; 5556 {
5557 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5558 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5559 }
5314 else 5560 else
5315 upper_bound = 0; 5561 upper_bound = 0;
5316 5562
5317 lower_bound = upper_bound - arg->locate.size.constant; 5563 lower_bound = upper_bound - arg->locate.size.constant;
5318 } 5564 }
5319 else 5565 else
5320 { 5566 {
5321 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) 5567 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5322 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)); 5568 {
5569 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5570 lower_bound = rtx_to_poly_int64 (offset);
5571 }
5323 else 5572 else
5324 lower_bound = 0; 5573 lower_bound = 0;
5325 5574
5326 upper_bound = lower_bound + arg->locate.size.constant; 5575 upper_bound = lower_bound + arg->locate.size.constant;
5327 } 5576 }
5328 5577
5329 i = lower_bound; 5578 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5330 /* Don't worry about things in the fixed argument area; 5579 reg_parm_stack_space))
5331 it has already been saved. */
5332 if (i < reg_parm_stack_space)
5333 i = reg_parm_stack_space;
5334 while (i < upper_bound && stack_usage_map[i] == 0)
5335 i++;
5336
5337 if (i < upper_bound)
5338 { 5580 {
5339 /* We need to make a save area. */ 5581 /* We need to make a save area. */
5340 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT; 5582 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5341 machine_mode save_mode 5583 machine_mode save_mode
5342 = int_mode_for_size (size, 1).else_blk (); 5584 = int_mode_for_size (size, 1).else_blk ();
5343 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0)); 5585 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5344 rtx stack_area = gen_rtx_MEM (save_mode, adr); 5586 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5345 5587
5348 arg->save_area 5590 arg->save_area
5349 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1); 5591 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5350 preserve_temp_slots (arg->save_area); 5592 preserve_temp_slots (arg->save_area);
5351 emit_block_move (validize_mem (copy_rtx (arg->save_area)), 5593 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5352 stack_area, 5594 stack_area,
5353 GEN_INT (arg->locate.size.constant), 5595 (gen_int_mode
5596 (arg->locate.size.constant, Pmode)),
5354 BLOCK_OP_CALL_PARM); 5597 BLOCK_OP_CALL_PARM);
5355 } 5598 }
5356 else 5599 else
5357 { 5600 {
5358 arg->save_area = gen_reg_rtx (save_mode); 5601 arg->save_area = gen_reg_rtx (save_mode);
5425 } 5668 }
5426 5669
5427 /* Check for overlap with already clobbered argument area. */ 5670 /* Check for overlap with already clobbered argument area. */
5428 if ((flags & ECF_SIBCALL) 5671 if ((flags & ECF_SIBCALL)
5429 && MEM_P (arg->value) 5672 && MEM_P (arg->value)
5430 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0), 5673 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5431 arg->locate.size.constant)) 5674 arg->locate.size.constant))
5432 sibcall_failure = 1; 5675 sibcall_failure = 1;
5433 5676
5434 /* Don't allow anything left on stack from computation 5677 /* Don't allow anything left on stack from computation
5435 of argument to alloca. */ 5678 of argument to alloca. */
5436 if (flags & ECF_MAY_BE_ALLOCA) 5679 if (flags & ECF_MAY_BE_ALLOCA)
5439 if (arg->value == arg->stack) 5682 if (arg->value == arg->stack)
5440 /* If the value is already in the stack slot, we are done. */ 5683 /* If the value is already in the stack slot, we are done. */
5441 ; 5684 ;
5442 else if (arg->mode != BLKmode) 5685 else if (arg->mode != BLKmode)
5443 { 5686 {
5444 int size;
5445 unsigned int parm_align; 5687 unsigned int parm_align;
5446 5688
5447 /* Argument is a scalar, not entirely passed in registers. 5689 /* Argument is a scalar, not entirely passed in registers.
5448 (If part is passed in registers, arg->partial says how much 5690 (If part is passed in registers, arg->partial says how much
5449 and emit_push_insn will take care of putting it there.) 5691 and emit_push_insn will take care of putting it there.)
5452 amount of space allocated to it, 5694 amount of space allocated to it,
5453 also bump stack pointer by the additional space. 5695 also bump stack pointer by the additional space.
5454 Note that in C the default argument promotions 5696 Note that in C the default argument promotions
5455 will prevent such mismatches. */ 5697 will prevent such mismatches. */
5456 5698
5457 size = GET_MODE_SIZE (arg->mode); 5699 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5700 ? 0 : GET_MODE_SIZE (arg->mode));
5701
5458 /* Compute how much space the push instruction will push. 5702 /* Compute how much space the push instruction will push.
5459 On many machines, pushing a byte will advance the stack 5703 On many machines, pushing a byte will advance the stack
5460 pointer by a halfword. */ 5704 pointer by a halfword. */
5461 #ifdef PUSH_ROUNDING 5705 #ifdef PUSH_ROUNDING
5462 size = PUSH_ROUNDING (size); 5706 size = PUSH_ROUNDING (size);
5465 5709
5466 /* Compute how much space the argument should get: 5710 /* Compute how much space the argument should get:
5467 round up to a multiple of the alignment for arguments. */ 5711 round up to a multiple of the alignment for arguments. */
5468 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval)) 5712 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5469 != PAD_NONE) 5713 != PAD_NONE)
5470 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) 5714 /* At the moment we don't (need to) support ABIs for which the
5471 / (PARM_BOUNDARY / BITS_PER_UNIT)) 5715 padding isn't known at compile time. In principle it should
5472 * (PARM_BOUNDARY / BITS_PER_UNIT)); 5716 be easy to add though. */
5717 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
5473 5718
5474 /* Compute the alignment of the pushed argument. */ 5719 /* Compute the alignment of the pushed argument. */
5475 parm_align = arg->locate.boundary; 5720 parm_align = arg->locate.boundary;
5476 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval)) 5721 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5477 == PAD_DOWNWARD) 5722 == PAD_DOWNWARD)
5478 { 5723 {
5479 int pad = used - size; 5724 poly_int64 pad = used - size;
5480 if (pad) 5725 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
5481 { 5726 if (pad_align != 0)
5482 unsigned int pad_align = least_bit_hwi (pad) * BITS_PER_UNIT; 5727 parm_align = MIN (parm_align, pad_align);
5483 parm_align = MIN (parm_align, pad_align);
5484 }
5485 } 5728 }
5486 5729
5487 /* This isn't already where we want it on the stack, so put it there. 5730 /* This isn't already where we want it on the stack, so put it there.
5488 This can either be done with push or copy insns. */ 5731 This can either be done with push or copy insns. */
5489 if (!emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 5732 if (maybe_ne (used, 0)
5490 parm_align, partial, reg, used - size, argblock, 5733 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5491 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, 5734 NULL_RTX, parm_align, partial, reg, used - size,
5492 ARGS_SIZE_RTX (arg->locate.alignment_pad), true)) 5735 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5736 reg_parm_stack_space,
5737 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5493 sibcall_failure = 1; 5738 sibcall_failure = 1;
5494 5739
5495 /* Unless this is a partially-in-register argument, the argument is now 5740 /* Unless this is a partially-in-register argument, the argument is now
5496 in the stack. */ 5741 in the stack. */
5497 if (partial == 0) 5742 if (partial == 0)
5500 else 5745 else
5501 { 5746 {
5502 /* BLKmode, at least partly to be pushed. */ 5747 /* BLKmode, at least partly to be pushed. */
5503 5748
5504 unsigned int parm_align; 5749 unsigned int parm_align;
5505 int excess; 5750 poly_int64 excess;
5506 rtx size_rtx; 5751 rtx size_rtx;
5507 5752
5508 /* Pushing a nonscalar. 5753 /* Pushing a nonscalar.
5509 If part is passed in registers, PARTIAL says how much 5754 If part is passed in registers, PARTIAL says how much
5510 and emit_push_insn will take care of putting it there. */ 5755 and emit_push_insn will take care of putting it there. */
5520 else 5765 else
5521 { 5766 {
5522 /* PUSH_ROUNDING has no effect on us, because emit_push_insn 5767 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5523 for BLKmode is careful to avoid it. */ 5768 for BLKmode is careful to avoid it. */
5524 excess = (arg->locate.size.constant 5769 excess = (arg->locate.size.constant
5525 - int_size_in_bytes (TREE_TYPE (pval)) 5770 - arg_int_size_in_bytes (TREE_TYPE (pval))
5526 + partial); 5771 + partial);
5527 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), 5772 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
5528 NULL_RTX, TYPE_MODE (sizetype), 5773 NULL_RTX, TYPE_MODE (sizetype),
5529 EXPAND_NORMAL); 5774 EXPAND_NORMAL);
5530 } 5775 }
5531 5776
5532 parm_align = arg->locate.boundary; 5777 parm_align = arg->locate.boundary;
5536 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval)) 5781 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5537 == PAD_DOWNWARD) 5782 == PAD_DOWNWARD)
5538 { 5783 {
5539 if (arg->locate.size.var) 5784 if (arg->locate.size.var)
5540 parm_align = BITS_PER_UNIT; 5785 parm_align = BITS_PER_UNIT;
5541 else if (excess) 5786 else
5542 { 5787 {
5543 unsigned int excess_align = least_bit_hwi (excess) * BITS_PER_UNIT; 5788 unsigned int excess_align
5544 parm_align = MIN (parm_align, excess_align); 5789 = known_alignment (excess) * BITS_PER_UNIT;
5790 if (excess_align != 0)
5791 parm_align = MIN (parm_align, excess_align);
5545 } 5792 }
5546 } 5793 }
5547 5794
5548 if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) 5795 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
5549 { 5796 {
5550 /* emit_push_insn might not work properly if arg->value and 5797 /* emit_push_insn might not work properly if arg->value and
5551 argblock + arg->locate.offset areas overlap. */ 5798 argblock + arg->locate.offset areas overlap. */
5552 rtx x = arg->value; 5799 rtx x = arg->value;
5553 int i = 0; 5800 poly_int64 i = 0;
5554 5801
5555 if (XEXP (x, 0) == crtl->args.internal_arg_pointer 5802 if (strip_offset (XEXP (x, 0), &i)
5556 || (GET_CODE (XEXP (x, 0)) == PLUS 5803 == crtl->args.internal_arg_pointer)
5557 && XEXP (XEXP (x, 0), 0) ==
5558 crtl->args.internal_arg_pointer
5559 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
5560 { 5804 {
5561 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
5562 i = INTVAL (XEXP (XEXP (x, 0), 1));
5563
5564 /* arg.locate doesn't contain the pretend_args_size offset, 5805 /* arg.locate doesn't contain the pretend_args_size offset,
5565 it's part of argblock. Ensure we don't count it in I. */ 5806 it's part of argblock. Ensure we don't count it in I. */
5566 if (STACK_GROWS_DOWNWARD) 5807 if (STACK_GROWS_DOWNWARD)
5567 i -= crtl->args.pretend_args_size; 5808 i -= crtl->args.pretend_args_size;
5568 else 5809 else
5569 i += crtl->args.pretend_args_size; 5810 i += crtl->args.pretend_args_size;
5570 5811
5571 /* expand_call should ensure this. */ 5812 /* expand_call should ensure this. */
5572 gcc_assert (!arg->locate.offset.var 5813 gcc_assert (!arg->locate.offset.var
5573 && arg->locate.size.var == 0 5814 && arg->locate.size.var == 0);
5574 && CONST_INT_P (size_rtx)); 5815 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
5575 5816
5576 if (arg->locate.offset.constant > i) 5817 if (known_eq (arg->locate.offset.constant, i))
5577 {
5578 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
5579 sibcall_failure = 1;
5580 }
5581 else if (arg->locate.offset.constant < i)
5582 {
5583 /* Use arg->locate.size.constant instead of size_rtx
5584 because we only care about the part of the argument
5585 on the stack. */
5586 if (i < (arg->locate.offset.constant
5587 + arg->locate.size.constant))
5588 sibcall_failure = 1;
5589 }
5590 else
5591 { 5818 {
5592 /* Even though they appear to be at the same location, 5819 /* Even though they appear to be at the same location,
5593 if part of the outgoing argument is in registers, 5820 if part of the outgoing argument is in registers,
5594 they aren't really at the same location. Check for 5821 they aren't really at the same location. Check for
5595 this by making sure that the incoming size is the 5822 this by making sure that the incoming size is the
5596 same as the outgoing size. */ 5823 same as the outgoing size. */
5597 if (arg->locate.size.constant != INTVAL (size_rtx)) 5824 if (maybe_ne (arg->locate.size.constant, size_val))
5598 sibcall_failure = 1; 5825 sibcall_failure = 1;
5599 } 5826 }
5827 else if (maybe_in_range_p (arg->locate.offset.constant,
5828 i, size_val))
5829 sibcall_failure = 1;
5830 /* Use arg->locate.size.constant instead of size_rtx
5831 because we only care about the part of the argument
5832 on the stack. */
5833 else if (maybe_in_range_p (i, arg->locate.offset.constant,
5834 arg->locate.size.constant))
5835 sibcall_failure = 1;
5600 } 5836 }
5601 } 5837 }
5602 5838
5603 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, 5839 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5604 parm_align, partial, reg, excess, argblock, 5840 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5605 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, 5841 parm_align, partial, reg, excess, argblock,
5606 ARGS_SIZE_RTX (arg->locate.alignment_pad), false); 5842 ARGS_SIZE_RTX (arg->locate.offset),
5843 reg_parm_stack_space,
5844 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
5607 5845
5608 /* Unless this is a partially-in-register argument, the argument is now 5846 /* Unless this is a partially-in-register argument, the argument is now
5609 in the stack. 5847 in the stack.
5610 5848
5611 ??? Unlike the case above, in which we want the actual 5849 ??? Unlike the case above, in which we want the actual
5626 } 5864 }
5627 5865
5628 /* Mark all slots this store used. */ 5866 /* Mark all slots this store used. */
5629 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL) 5867 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5630 && argblock && ! variable_size && arg->stack) 5868 && argblock && ! variable_size && arg->stack)
5631 for (i = lower_bound; i < upper_bound; i++) 5869 mark_stack_region_used (lower_bound, upper_bound);
5632 stack_usage_map[i] = 1;
5633 5870
5634 /* Once we have pushed something, pops can't safely 5871 /* Once we have pushed something, pops can't safely
5635 be deferred during the rest of the arguments. */ 5872 be deferred during the rest of the arguments. */
5636 NO_DEFER_POP; 5873 NO_DEFER_POP;
5637 5874
5678 5915
5679 /* If the type is marked as addressable (it is required 5916 /* If the type is marked as addressable (it is required
5680 to be constructed into the stack)... */ 5917 to be constructed into the stack)... */
5681 if (TREE_ADDRESSABLE (type)) 5918 if (TREE_ADDRESSABLE (type))
5682 return true; 5919 return true;
5920
5921 if (TYPE_EMPTY_P (type))
5922 return false;
5683 5923
5684 /* If the padding and mode of the type is such that a copy into 5924 /* If the padding and mode of the type is such that a copy into
5685 a register would put it into the wrong part of the register. */ 5925 a register would put it into the wrong part of the register. */
5686 if (mode == BLKmode 5926 if (mode == BLKmode
5687 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT) 5927 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)