comparison gcc/calls.c @ 70:b81903832de2

merge c-decl.c
author Nobuyasu Oshiro <dimolto@cr.ie.u-ryukyu.ac.jp>
date Sun, 21 Aug 2011 09:24:16 +0900
parents 1b10fe6932e1
children ce75bd9117e4
comparison
equal deleted inserted replaced
69:1b10fe6932e1 70:b81903832de2
97 copy in smaller-sized pieces into pseudos. These are stored in a 97 copy in smaller-sized pieces into pseudos. These are stored in a
98 block pointed to by this field. The next field says how many 98 block pointed to by this field. The next field says how many
99 word-sized pseudos we made. */ 99 word-sized pseudos we made. */
100 rtx *aligned_regs; 100 rtx *aligned_regs;
101 int n_aligned_regs; 101 int n_aligned_regs;
102 #ifndef noCbC
103 rtx exprs;
104 #endif
105 }; 102 };
106 103
107 /* A vector of one char per byte of stack space. A byte if nonzero if 104 /* A vector of one char per byte of stack space. A byte if nonzero if
108 the corresponding stack location has been used. 105 the corresponding stack location has been used.
109 This vector is used to prevent a function call within an argument from 106 This vector is used to prevent a function call within an argument from
126 to make sure the object being constructed does not overlap the 123 to make sure the object being constructed does not overlap the
127 argument list for the constructor call. */ 124 argument list for the constructor call. */
128 static int stack_arg_under_construction; 125 static int stack_arg_under_construction;
129 126
130 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, 127 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
131 HOST_WIDE_INT, rtx, rtx, int, rtx, int, 128 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
132 CUMULATIVE_ARGS *); 129 CUMULATIVE_ARGS *);
133 static void precompute_register_parameters (int, struct arg_data *, int *); 130 static void precompute_register_parameters (int, struct arg_data *, int *);
134 static int store_one_arg (struct arg_data *, rtx, int, int, int); 131 static int store_one_arg (struct arg_data *, rtx, int, int, int);
135 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int); 132 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
136 static int finalize_must_preallocate (int, int, struct arg_data *, 133 static int finalize_must_preallocate (int, int, struct arg_data *,
137 struct args_size *); 134 struct args_size *);
138 static void precompute_arguments (int, struct arg_data *); 135 static void precompute_arguments (int, struct arg_data *);
139 static int compute_argument_block_size (int, struct args_size *, tree, tree, int); 136 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
140 static void initialize_argument_information (int, struct arg_data *, 137 static void initialize_argument_information (int, struct arg_data *,
141 struct args_size *, int, 138 struct args_size *, int,
142 tree, tree, 139 tree, tree,
143 tree, tree, CUMULATIVE_ARGS *, int, 140 tree, tree, CUMULATIVE_ARGS *, int,
144 rtx *, int *, int *, int *, 141 rtx *, int *, int *, int *,
145 bool *, bool); 142 bool *, bool);
146 static void compute_argument_addresses (struct arg_data *, rtx, int); 143 static void compute_argument_addresses (struct arg_data *, rtx, int);
147 static rtx rtx_for_function_call (tree, tree); 144 static rtx rtx_for_function_call (tree, tree);
148 static void load_register_parameters (struct arg_data *, int, rtx *, int, 145 static void load_register_parameters (struct arg_data *, int, rtx *, int,
149 int, int *); 146 int, int *);
150 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type, 147 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
151 enum machine_mode, int, va_list); 148 enum machine_mode, int, va_list);
152 static int special_function_p (const_tree, int); 149 static int special_function_p (const_tree, int);
153 static int check_sibcall_argument_overlap_1 (rtx); 150 static int check_sibcall_argument_overlap_1 (rtx);
154 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int); 151 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
155 152
156 static int combine_pending_stack_adjustment_and_call (int, struct args_size *, 153 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
157 unsigned int); 154 unsigned int);
158 static tree split_complex_types (tree); 155 static tree split_complex_types (tree);
159 156
160 #ifdef REG_PARM_STACK_SPACE 157 #ifdef REG_PARM_STACK_SPACE
161 static rtx save_fixed_argument_area (int, rtx, int *, int *); 158 static rtx save_fixed_argument_area (int, rtx, int *, int *);
162 static void restore_fixed_argument_area (rtx, rtx, int, int); 159 static void restore_fixed_argument_area (rtx, rtx, int, int);
169 CALL_FUSAGE points to a variable holding the prospective 166 CALL_FUSAGE points to a variable holding the prospective
170 CALL_INSN_FUNCTION_USAGE information. */ 167 CALL_INSN_FUNCTION_USAGE information. */
171 168
172 rtx 169 rtx
173 prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value, 170 prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
174 rtx *call_fusage, int reg_parm_seen, int sibcallp) 171 rtx *call_fusage, int reg_parm_seen, int sibcallp)
175 { 172 {
176 /* Make a valid memory address and copy constants through pseudo-regs, 173 /* Make a valid memory address and copy constants through pseudo-regs,
177 but not for a constant address if -fno-function-cse. */ 174 but not for a constant address if -fno-function-cse. */
178 if (GET_CODE (funexp) != SYMBOL_REF) 175 if (GET_CODE (funexp) != SYMBOL_REF)
179 /* If we are using registers for parameters, force the 176 /* If we are using registers for parameters, force the
184 : memory_address (FUNCTION_MODE, funexp)); 181 : memory_address (FUNCTION_MODE, funexp));
185 else if (! sibcallp) 182 else if (! sibcallp)
186 { 183 {
187 #ifndef NO_FUNCTION_CSE 184 #ifndef NO_FUNCTION_CSE
188 if (optimize && ! flag_no_function_cse) 185 if (optimize && ! flag_no_function_cse)
189 funexp = force_reg (Pmode, funexp); 186 funexp = force_reg (Pmode, funexp);
190 #endif 187 #endif
191 } 188 }
192 189
193 if (static_chain_value != 0) 190 if (static_chain_value != 0)
194 { 191 {
198 chain = targetm.calls.static_chain (fndecl, false); 195 chain = targetm.calls.static_chain (fndecl, false);
199 static_chain_value = convert_memory_address (Pmode, static_chain_value); 196 static_chain_value = convert_memory_address (Pmode, static_chain_value);
200 197
201 emit_move_insn (chain, static_chain_value); 198 emit_move_insn (chain, static_chain_value);
202 if (REG_P (chain)) 199 if (REG_P (chain))
203 use_reg (call_fusage, chain); 200 use_reg (call_fusage, chain);
204 } 201 }
205 202
206 return funexp; 203 return funexp;
207 } 204 }
208 205
248 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that 245 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
249 denote registers used by the called function. */ 246 denote registers used by the called function. */
250 247
251 static void 248 static void
252 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED, 249 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
253 tree funtype ATTRIBUTE_UNUSED, 250 tree funtype ATTRIBUTE_UNUSED,
254 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED, 251 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
255 HOST_WIDE_INT rounded_stack_size, 252 HOST_WIDE_INT rounded_stack_size,
256 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED, 253 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
257 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, 254 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
258 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags, 255 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
259 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED) 256 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
260 { 257 {
261 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size); 258 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
262 rtx call_insn; 259 rtx call_insn;
263 int already_popped = 0; 260 int already_popped = 0;
264 HOST_WIDE_INT n_popped 261 HOST_WIDE_INT n_popped
281 { 278 {
282 rtx n_pop = GEN_INT (n_popped); 279 rtx n_pop = GEN_INT (n_popped);
283 rtx pat; 280 rtx pat;
284 281
285 /* If this subroutine pops its own args, record that in the call insn 282 /* If this subroutine pops its own args, record that in the call insn
286 if possible, for the sake of frame pointer elimination. */ 283 if possible, for the sake of frame pointer elimination. */
287 284
288 if (valreg) 285 if (valreg)
289 pat = GEN_SIBCALL_VALUE_POP (valreg, 286 pat = GEN_SIBCALL_VALUE_POP (valreg,
290 gen_rtx_MEM (FUNCTION_MODE, funexp), 287 gen_rtx_MEM (FUNCTION_MODE, funexp),
291 rounded_stack_size_rtx, next_arg_reg, 288 rounded_stack_size_rtx, next_arg_reg,
292 n_pop); 289 n_pop);
293 else 290 else
294 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp), 291 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
295 rounded_stack_size_rtx, next_arg_reg, n_pop); 292 rounded_stack_size_rtx, next_arg_reg, n_pop);
296 293
297 emit_call_insn (pat); 294 emit_call_insn (pat);
298 already_popped = 1; 295 already_popped = 1;
299 } 296 }
300 else 297 else
314 { 311 {
315 rtx n_pop = GEN_INT (n_popped); 312 rtx n_pop = GEN_INT (n_popped);
316 rtx pat; 313 rtx pat;
317 314
318 /* If this subroutine pops its own args, record that in the call insn 315 /* If this subroutine pops its own args, record that in the call insn
319 if possible, for the sake of frame pointer elimination. */ 316 if possible, for the sake of frame pointer elimination. */
320 317
321 if (valreg) 318 if (valreg)
322 pat = GEN_CALL_VALUE_POP (valreg, 319 pat = GEN_CALL_VALUE_POP (valreg,
323 gen_rtx_MEM (FUNCTION_MODE, funexp), 320 gen_rtx_MEM (FUNCTION_MODE, funexp),
324 rounded_stack_size_rtx, next_arg_reg, n_pop); 321 rounded_stack_size_rtx, next_arg_reg, n_pop);
325 else 322 else
326 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp), 323 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
327 rounded_stack_size_rtx, next_arg_reg, n_pop); 324 rounded_stack_size_rtx, next_arg_reg, n_pop);
328 325
329 emit_call_insn (pat); 326 emit_call_insn (pat);
330 already_popped = 1; 327 already_popped = 1;
331 } 328 }
332 else 329 else
335 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value) 332 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
336 if ((ecf_flags & ECF_SIBCALL) 333 if ((ecf_flags & ECF_SIBCALL)
337 && HAVE_sibcall && HAVE_sibcall_value) 334 && HAVE_sibcall && HAVE_sibcall_value)
338 { 335 {
339 if (valreg) 336 if (valreg)
340 emit_call_insn (GEN_SIBCALL_VALUE (valreg, 337 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
341 gen_rtx_MEM (FUNCTION_MODE, funexp), 338 gen_rtx_MEM (FUNCTION_MODE, funexp),
342 rounded_stack_size_rtx, 339 rounded_stack_size_rtx,
343 next_arg_reg, NULL_RTX)); 340 next_arg_reg, NULL_RTX));
344 else 341 else
345 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp), 342 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
346 rounded_stack_size_rtx, next_arg_reg, 343 rounded_stack_size_rtx, next_arg_reg,
347 GEN_INT (struct_value_size))); 344 GEN_INT (struct_value_size)));
348 } 345 }
349 else 346 else
350 #endif 347 #endif
351 348
352 #if defined (HAVE_call) && defined (HAVE_call_value) 349 #if defined (HAVE_call) && defined (HAVE_call_value)
353 if (HAVE_call && HAVE_call_value) 350 if (HAVE_call && HAVE_call_value)
354 { 351 {
355 if (valreg) 352 if (valreg)
356 emit_call_insn (GEN_CALL_VALUE (valreg, 353 emit_call_insn (GEN_CALL_VALUE (valreg,
357 gen_rtx_MEM (FUNCTION_MODE, funexp), 354 gen_rtx_MEM (FUNCTION_MODE, funexp),
358 rounded_stack_size_rtx, next_arg_reg, 355 rounded_stack_size_rtx, next_arg_reg,
359 NULL_RTX)); 356 NULL_RTX));
360 else 357 else
361 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp), 358 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
362 rounded_stack_size_rtx, next_arg_reg, 359 rounded_stack_size_rtx, next_arg_reg,
363 GEN_INT (struct_value_size))); 360 GEN_INT (struct_value_size)));
364 } 361 }
365 else 362 else
366 #endif 363 #endif
367 gcc_unreachable (); 364 gcc_unreachable ();
368 365
408 inhibit_defer_pop = old_inhibit_defer_pop; 405 inhibit_defer_pop = old_inhibit_defer_pop;
409 406
410 if (n_popped > 0) 407 if (n_popped > 0)
411 { 408 {
412 if (!already_popped) 409 if (!already_popped)
413 CALL_INSN_FUNCTION_USAGE (call_insn) 410 CALL_INSN_FUNCTION_USAGE (call_insn)
414 = gen_rtx_EXPR_LIST (VOIDmode, 411 = gen_rtx_EXPR_LIST (VOIDmode,
415 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx), 412 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
416 CALL_INSN_FUNCTION_USAGE (call_insn)); 413 CALL_INSN_FUNCTION_USAGE (call_insn));
417 rounded_stack_size -= n_popped; 414 rounded_stack_size -= n_popped;
418 rounded_stack_size_rtx = GEN_INT (rounded_stack_size); 415 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
419 stack_pointer_delta -= n_popped; 416 stack_pointer_delta -= n_popped;
420 417
421 /* If popup is needed, stack realign must use DRAP */ 418 /* If popup is needed, stack realign must use DRAP */
424 } 421 }
425 422
426 if (!ACCUMULATE_OUTGOING_ARGS) 423 if (!ACCUMULATE_OUTGOING_ARGS)
427 { 424 {
428 /* If returning from the subroutine does not automatically pop the args, 425 /* If returning from the subroutine does not automatically pop the args,
429 we need an instruction to pop them sooner or later. 426 we need an instruction to pop them sooner or later.
430 Perhaps do it now; perhaps just record how much space to pop later. 427 Perhaps do it now; perhaps just record how much space to pop later.
431 428
432 If returning from the subroutine does pop the args, indicate that the 429 If returning from the subroutine does pop the args, indicate that the
433 stack pointer will be changed. */ 430 stack pointer will be changed. */
434 431
435 if (rounded_stack_size != 0) 432 if (rounded_stack_size != 0)
436 { 433 {
437 if (ecf_flags & ECF_NORETURN) 434 if (ecf_flags & ECF_NORETURN)
438 /* Just pretend we did the pop. */ 435 /* Just pretend we did the pop. */
439 stack_pointer_delta -= rounded_stack_size; 436 stack_pointer_delta -= rounded_stack_size;
440 else if (flag_defer_pop && inhibit_defer_pop == 0 437 else if (flag_defer_pop && inhibit_defer_pop == 0
441 && ! (ecf_flags & (ECF_CONST | ECF_PURE))) 438 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
442 pending_stack_adjust += rounded_stack_size; 439 pending_stack_adjust += rounded_stack_size;
443 else 440 else
444 adjust_stack (rounded_stack_size_rtx); 441 adjust_stack (rounded_stack_size_rtx);
445 } 442 }
446 } 443 }
447 /* When we accumulate outgoing args, we must avoid any stack manipulations. 444 /* When we accumulate outgoing args, we must avoid any stack manipulations.
448 Restore the stack pointer to its original value now. Usually 445 Restore the stack pointer to its original value now. Usually
449 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions. 446 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
450 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and 447 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
474 special_function_p (const_tree fndecl, int flags) 471 special_function_p (const_tree fndecl, int flags)
475 { 472 {
476 if (fndecl && DECL_NAME (fndecl) 473 if (fndecl && DECL_NAME (fndecl)
477 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17 474 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
478 /* Exclude functions not at the file scope, or not `extern', 475 /* Exclude functions not at the file scope, or not `extern',
479 since they are not the magic functions we would otherwise 476 since they are not the magic functions we would otherwise
480 think they are. 477 think they are.
481 FIXME: this should be handled with attributes, not with this 478 FIXME: this should be handled with attributes, not with this
482 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong 479 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
483 because you can declare fork() inside a function if you 480 because you can declare fork() inside a function if you
484 wish. */ 481 wish. */
485 && (DECL_CONTEXT (fndecl) == NULL_TREE 482 && (DECL_CONTEXT (fndecl) == NULL_TREE
486 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL) 483 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
487 && TREE_PUBLIC (fndecl)) 484 && TREE_PUBLIC (fndecl))
488 { 485 {
489 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl)); 486 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
490 const char *tname = name; 487 const char *tname = name;
491 488
492 /* We assume that alloca will always be called by name. It 489 /* We assume that alloca will always be called by name. It
493 makes no sense to pass it as a pointer-to-function to 490 makes no sense to pass it as a pointer-to-function to
494 anything that does not understand its behavior. */ 491 anything that does not understand its behavior. */
495 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6 492 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
496 && name[0] == 'a' 493 && name[0] == 'a'
497 && ! strcmp (name, "alloca")) 494 && ! strcmp (name, "alloca"))
498 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16 495 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
499 && name[0] == '_' 496 && name[0] == '_'
500 && ! strcmp (name, "__builtin_alloca")))) 497 && ! strcmp (name, "__builtin_alloca"))))
501 flags |= ECF_MAY_BE_ALLOCA; 498 flags |= ECF_MAY_BE_ALLOCA;
502 499
503 /* Disregard prefix _, __, __x or __builtin_. */ 500 /* Disregard prefix _, __, __x or __builtin_. */
504 if (name[0] == '_') 501 if (name[0] == '_')
505 { 502 {
506 if (name[1] == '_' 503 if (name[1] == '_'
507 && name[2] == 'b' 504 && name[2] == 'b'
508 && !strncmp (name + 3, "uiltin_", 7)) 505 && !strncmp (name + 3, "uiltin_", 7))
509 tname += 10; 506 tname += 10;
510 else if (name[1] == '_' && name[2] == 'x') 507 else if (name[1] == '_' && name[2] == 'x')
511 tname += 3; 508 tname += 3;
512 else if (name[1] == '_') 509 else if (name[1] == '_')
513 tname += 2; 510 tname += 2;
514 else 511 else
515 tname += 1; 512 tname += 1;
516 } 513 }
517 514
518 if (tname[0] == 's') 515 if (tname[0] == 's')
519 { 516 {
520 if ((tname[1] == 'e' 517 if ((tname[1] == 'e'
521 && (! strcmp (tname, "setjmp") 518 && (! strcmp (tname, "setjmp")
522 || ! strcmp (tname, "setjmp_syscall"))) 519 || ! strcmp (tname, "setjmp_syscall")))
523 || (tname[1] == 'i' 520 || (tname[1] == 'i'
524 && ! strcmp (tname, "sigsetjmp")) 521 && ! strcmp (tname, "sigsetjmp"))
525 || (tname[1] == 'a' 522 || (tname[1] == 'a'
526 && ! strcmp (tname, "savectx"))) 523 && ! strcmp (tname, "savectx")))
527 flags |= ECF_RETURNS_TWICE; 524 flags |= ECF_RETURNS_TWICE;
528 525
529 if (tname[1] == 'i' 526 if (tname[1] == 'i'
530 && ! strcmp (tname, "siglongjmp")) 527 && ! strcmp (tname, "siglongjmp"))
531 flags |= ECF_NORETURN; 528 flags |= ECF_NORETURN;
532 } 529 }
533 else if ((tname[0] == 'q' && tname[1] == 's' 530 else if ((tname[0] == 'q' && tname[1] == 's'
534 && ! strcmp (tname, "qsetjmp")) 531 && ! strcmp (tname, "qsetjmp"))
535 || (tname[0] == 'v' && tname[1] == 'f' 532 || (tname[0] == 'v' && tname[1] == 'f'
536 && ! strcmp (tname, "vfork")) 533 && ! strcmp (tname, "vfork"))
537 || (tname[0] == 'g' && tname[1] == 'e' 534 || (tname[0] == 'g' && tname[1] == 'e'
538 && !strcmp (tname, "getcontext"))) 535 && !strcmp (tname, "getcontext")))
539 flags |= ECF_RETURNS_TWICE; 536 flags |= ECF_RETURNS_TWICE;
540 537
541 else if (tname[0] == 'l' && tname[1] == 'o' 538 else if (tname[0] == 'l' && tname[1] == 'o'
542 && ! strcmp (tname, "longjmp")) 539 && ! strcmp (tname, "longjmp"))
543 flags |= ECF_NORETURN; 540 flags |= ECF_NORETURN;
544 } 541 }
545 542
546 return flags; 543 return flags;
547 } 544 }
548 545
579 { 576 {
580 if (TREE_CODE (exp) == CALL_EXPR 577 if (TREE_CODE (exp) == CALL_EXPR
581 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR 578 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
582 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL) 579 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
583 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0) 580 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
584 & ECF_MAY_BE_ALLOCA)) 581 & ECF_MAY_BE_ALLOCA))
585 return true; 582 return true;
586 return false; 583 return false;
587 } 584 }
588 585
589 /* Detect flags (function attributes) from the function decl or type node. */ 586 /* Detect flags (function attributes) from the function decl or type node. */
595 592
596 if (DECL_P (exp)) 593 if (DECL_P (exp))
597 { 594 {
598 /* The function exp may have the `malloc' attribute. */ 595 /* The function exp may have the `malloc' attribute. */
599 if (DECL_IS_MALLOC (exp)) 596 if (DECL_IS_MALLOC (exp))
600 flags |= ECF_MALLOC; 597 flags |= ECF_MALLOC;
601 598
602 /* The function exp may have the `returns_twice' attribute. */ 599 /* The function exp may have the `returns_twice' attribute. */
603 if (DECL_IS_RETURNS_TWICE (exp)) 600 if (DECL_IS_RETURNS_TWICE (exp))
604 flags |= ECF_RETURNS_TWICE; 601 flags |= ECF_RETURNS_TWICE;
605 602
606 /* Process the pure and const attributes. */ 603 /* Process the pure and const attributes. */
607 if (TREE_READONLY (exp)) 604 if (TREE_READONLY (exp))
608 flags |= ECF_CONST; 605 flags |= ECF_CONST;
609 if (DECL_PURE_P (exp)) 606 if (DECL_PURE_P (exp))
610 flags |= ECF_PURE; 607 flags |= ECF_PURE;
611 if (DECL_LOOPING_CONST_OR_PURE_P (exp)) 608 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
612 flags |= ECF_LOOPING_CONST_OR_PURE; 609 flags |= ECF_LOOPING_CONST_OR_PURE;
613 610
614 if (DECL_IS_NOVOPS (exp)) 611 if (DECL_IS_NOVOPS (exp))
615 flags |= ECF_NOVOPS; 612 flags |= ECF_NOVOPS;
616 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp))) 613 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
617 flags |= ECF_LEAF; 614 flags |= ECF_LEAF;
618 615
619 if (TREE_NOTHROW (exp)) 616 if (TREE_NOTHROW (exp))
620 flags |= ECF_NOTHROW; 617 flags |= ECF_NOTHROW;
621 618
622 flags = special_function_p (exp, flags); 619 flags = special_function_p (exp, flags);
623 } 620 }
624 else if (TYPE_P (exp) && TYPE_READONLY (exp)) 621 else if (TYPE_P (exp) && TYPE_READONLY (exp))
625 flags |= ECF_CONST; 622 flags |= ECF_CONST;
646 flags = flags_from_decl_or_type (decl); 643 flags = flags_from_decl_or_type (decl);
647 else 644 else
648 { 645 {
649 t = TREE_TYPE (CALL_EXPR_FN (t)); 646 t = TREE_TYPE (CALL_EXPR_FN (t));
650 if (t && TREE_CODE (t) == POINTER_TYPE) 647 if (t && TREE_CODE (t) == POINTER_TYPE)
651 flags = flags_from_decl_or_type (TREE_TYPE (t)); 648 flags = flags_from_decl_or_type (TREE_TYPE (t));
652 else 649 else
653 flags = 0; 650 flags = 0;
654 } 651 }
655 652
656 return flags; 653 return flags;
657 } 654 }
658 655
663 660
664 Set REG_PARM_SEEN if we encounter a register parameter. */ 661 Set REG_PARM_SEEN if we encounter a register parameter. */
665 662
666 static void 663 static void
667 precompute_register_parameters (int num_actuals, struct arg_data *args, 664 precompute_register_parameters (int num_actuals, struct arg_data *args,
668 int *reg_parm_seen) 665 int *reg_parm_seen)
669 { 666 {
670 int i; 667 int i;
671 668
672 *reg_parm_seen = 0; 669 *reg_parm_seen = 0;
673 670
752 high = highest_outgoing_arg_in_use; 749 high = highest_outgoing_arg_in_use;
753 750
754 for (low = 0; low < high; low++) 751 for (low = 0; low < high; low++)
755 if (stack_usage_map[low] != 0) 752 if (stack_usage_map[low] != 0)
756 { 753 {
757 int num_to_save; 754 int num_to_save;
758 enum machine_mode save_mode; 755 enum machine_mode save_mode;
759 int delta; 756 int delta;
760 rtx stack_area; 757 rtx stack_area;
761 rtx save_area; 758 rtx save_area;
762 759
763 while (stack_usage_map[--high] == 0) 760 while (stack_usage_map[--high] == 0)
764 ; 761 ;
765 762
766 *low_to_save = low; 763 *low_to_save = low;
767 *high_to_save = high; 764 *high_to_save = high;
768 765
769 num_to_save = high - low + 1; 766 num_to_save = high - low + 1;
770 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1); 767 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
771 768
772 /* If we don't have the required alignment, must do this 769 /* If we don't have the required alignment, must do this
773 in BLKmode. */ 770 in BLKmode. */
774 if ((low & (MIN (GET_MODE_SIZE (save_mode), 771 if ((low & (MIN (GET_MODE_SIZE (save_mode),
775 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1))) 772 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
776 save_mode = BLKmode; 773 save_mode = BLKmode;
777 774
778 #ifdef ARGS_GROW_DOWNWARD 775 #ifdef ARGS_GROW_DOWNWARD
779 delta = -high; 776 delta = -high;
780 #else 777 #else
781 delta = low; 778 delta = low;
782 #endif 779 #endif
783 stack_area = gen_rtx_MEM (save_mode, 780 stack_area = gen_rtx_MEM (save_mode,
784 memory_address (save_mode, 781 memory_address (save_mode,
785 plus_constant (argblock, 782 plus_constant (argblock,
786 delta))); 783 delta)));
787 784
788 set_mem_align (stack_area, PARM_BOUNDARY); 785 set_mem_align (stack_area, PARM_BOUNDARY);
789 if (save_mode == BLKmode) 786 if (save_mode == BLKmode)
790 { 787 {
791 save_area = assign_stack_temp (BLKmode, num_to_save, 0); 788 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
792 emit_block_move (validize_mem (save_area), stack_area, 789 emit_block_move (validize_mem (save_area), stack_area,
793 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM); 790 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
794 } 791 }
795 else 792 else
796 { 793 {
797 save_area = gen_reg_rtx (save_mode); 794 save_area = gen_reg_rtx (save_mode);
798 emit_move_insn (save_area, stack_area); 795 emit_move_insn (save_area, stack_area);
799 } 796 }
800 797
801 return save_area; 798 return save_area;
802 } 799 }
803 800
804 return NULL_RTX; 801 return NULL_RTX;
805 } 802 }
806 803
815 delta = -high_to_save; 812 delta = -high_to_save;
816 #else 813 #else
817 delta = low_to_save; 814 delta = low_to_save;
818 #endif 815 #endif
819 stack_area = gen_rtx_MEM (save_mode, 816 stack_area = gen_rtx_MEM (save_mode,
820 memory_address (save_mode, 817 memory_address (save_mode,
821 plus_constant (argblock, delta))); 818 plus_constant (argblock, delta)));
822 set_mem_align (stack_area, PARM_BOUNDARY); 819 set_mem_align (stack_area, PARM_BOUNDARY);
823 820
824 if (save_mode != BLKmode) 821 if (save_mode != BLKmode)
825 emit_move_insn (stack_area, save_area); 822 emit_move_insn (stack_area, save_area);
826 else 823 else
827 emit_block_move (stack_area, validize_mem (save_area), 824 emit_block_move (stack_area, validize_mem (save_area),
828 GEN_INT (high_to_save - low_to_save + 1), 825 GEN_INT (high_to_save - low_to_save + 1),
829 BLOCK_OP_CALL_PARM); 826 BLOCK_OP_CALL_PARM);
830 } 827 }
831 #endif /* REG_PARM_STACK_SPACE */ 828 #endif /* REG_PARM_STACK_SPACE */
832 829
833 /* If any elements in ARGS refer to parameters that are to be passed in 830 /* If any elements in ARGS refer to parameters that are to be passed in
834 registers, but not in memory, and whose alignment does not permit a 831 registers, but not in memory, and whose alignment does not permit a
844 { 841 {
845 int i, j; 842 int i, j;
846 843
847 for (i = 0; i < num_actuals; i++) 844 for (i = 0; i < num_actuals; i++)
848 if (args[i].reg != 0 && ! args[i].pass_on_stack 845 if (args[i].reg != 0 && ! args[i].pass_on_stack
849 && args[i].mode == BLKmode 846 && args[i].mode == BLKmode
850 && MEM_P (args[i].value) 847 && MEM_P (args[i].value)
851 && (MEM_ALIGN (args[i].value) 848 && (MEM_ALIGN (args[i].value)
852 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD))) 849 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
853 { 850 {
854 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); 851 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
855 int endian_correction = 0; 852 int endian_correction = 0;
856 853
857 if (args[i].partial) 854 if (args[i].partial)
858 { 855 {
859 gcc_assert (args[i].partial % UNITS_PER_WORD == 0); 856 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
860 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD; 857 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
861 } 858 }
862 else 859 else
863 { 860 {
864 args[i].n_aligned_regs 861 args[i].n_aligned_regs
865 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; 862 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
866 } 863 }
867 864
868 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs); 865 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
869 866
870 /* Structures smaller than a word are normally aligned to the 867 /* Structures smaller than a word are normally aligned to the
871 least significant byte. On a BYTES_BIG_ENDIAN machine, 868 least significant byte. On a BYTES_BIG_ENDIAN machine,
872 this means we must skip the empty high order bytes when 869 this means we must skip the empty high order bytes when
873 calculating the bit offset. */ 870 calculating the bit offset. */
874 if (bytes < UNITS_PER_WORD 871 if (bytes < UNITS_PER_WORD
875 #ifdef BLOCK_REG_PADDING 872 #ifdef BLOCK_REG_PADDING
876 && (BLOCK_REG_PADDING (args[i].mode, 873 && (BLOCK_REG_PADDING (args[i].mode,
877 TREE_TYPE (args[i].tree_value), 1) 874 TREE_TYPE (args[i].tree_value), 1)
878 == downward) 875 == downward)
879 #else 876 #else
880 && BYTES_BIG_ENDIAN 877 && BYTES_BIG_ENDIAN
881 #endif 878 #endif
882 ) 879 )
883 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT; 880 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
884 881
885 for (j = 0; j < args[i].n_aligned_regs; j++) 882 for (j = 0; j < args[i].n_aligned_regs; j++)
942 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to 939 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
943 the thunked-to function. */ 940 the thunked-to function. */
944 941
945 static void 942 static void
946 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, 943 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
947 struct arg_data *args, 944 struct arg_data *args,
948 struct args_size *args_size, 945 struct args_size *args_size,
949 int n_named_args ATTRIBUTE_UNUSED, 946 int n_named_args ATTRIBUTE_UNUSED,
950 tree exp, tree struct_value_addr_value, 947 tree exp, tree struct_value_addr_value,
951 tree fndecl, tree fntype, 948 tree fndecl, tree fntype,
952 CUMULATIVE_ARGS *args_so_far, 949 CUMULATIVE_ARGS *args_so_far,
953 int reg_parm_stack_space, 950 int reg_parm_stack_space,
954 rtx *old_stack_level, int *old_pending_adj, 951 rtx *old_stack_level, int *old_pending_adj,
955 int *must_preallocate, int *ecf_flags, 952 int *must_preallocate, int *ecf_flags,
956 bool *may_tailcall, bool call_from_thunk_p) 953 bool *may_tailcall, bool call_from_thunk_p)
957 { 954 {
958 location_t loc = EXPR_LOCATION (exp); 955 location_t loc = EXPR_LOCATION (exp);
959 /* 1 if scanning parms front to back, -1 if scanning back to front. */ 956 /* 1 if scanning parms front to back, -1 if scanning back to front. */
960 int inc; 957 int inc;
961 958
973 970
974 if (PUSH_ARGS_REVERSED) 971 if (PUSH_ARGS_REVERSED)
975 { 972 {
976 i = num_actuals - 1, inc = -1; 973 i = num_actuals - 1, inc = -1;
977 /* In this case, must reverse order of args 974 /* In this case, must reverse order of args
978 so that we compute and push the last arg first. */ 975 so that we compute and push the last arg first. */
979 } 976 }
980 else 977 else
981 { 978 {
982 i = 0, inc = 1; 979 i = 0, inc = 1;
983 } 980 }
989 call_expr_arg_iterator iter; 986 call_expr_arg_iterator iter;
990 tree arg; 987 tree arg;
991 988
992 if (struct_value_addr_value) 989 if (struct_value_addr_value)
993 { 990 {
994 args[j].tree_value = struct_value_addr_value; 991 args[j].tree_value = struct_value_addr_value;
995 j += inc; 992 j += inc;
996 } 993 }
997 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) 994 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
998 { 995 {
999 tree argtype = TREE_TYPE (arg); 996 tree argtype = TREE_TYPE (arg);
1000 if (targetm.calls.split_complex_arg 997 if (targetm.calls.split_complex_arg
1001 && argtype 998 && argtype
1002 && TREE_CODE (argtype) == COMPLEX_TYPE 999 && TREE_CODE (argtype) == COMPLEX_TYPE
1003 && targetm.calls.split_complex_arg (argtype)) 1000 && targetm.calls.split_complex_arg (argtype))
1004 { 1001 {
1005 tree subtype = TREE_TYPE (argtype); 1002 tree subtype = TREE_TYPE (argtype);
1006 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg); 1003 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1007 j += inc; 1004 j += inc;
1008 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg); 1005 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1009 } 1006 }
1010 else 1007 else
1011 args[j].tree_value = arg; 1008 args[j].tree_value = arg;
1012 j += inc; 1009 j += inc;
1013 } 1010 }
1014 } 1011 }
1015 1012
1016 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */ 1013 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1017 for (argpos = 0; argpos < num_actuals; i += inc, argpos++) 1014 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
1020 int unsignedp; 1017 int unsignedp;
1021 enum machine_mode mode; 1018 enum machine_mode mode;
1022 1019
1023 /* Replace erroneous argument with constant zero. */ 1020 /* Replace erroneous argument with constant zero. */
1024 if (type == error_mark_node || !COMPLETE_TYPE_P (type)) 1021 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1025 args[i].tree_value = integer_zero_node, type = integer_type_node; 1022 args[i].tree_value = integer_zero_node, type = integer_type_node;
1026 1023
1027 /* If TYPE is a transparent union or record, pass things the way 1024 /* If TYPE is a transparent union or record, pass things the way
1028 we would pass the first field of the union or record. We have 1025 we would pass the first field of the union or record. We have
1029 already verified that the modes are the same. */ 1026 already verified that the modes are the same. */
1030 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE) 1027 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1031 && TYPE_TRANSPARENT_AGGR (type)) 1028 && TYPE_TRANSPARENT_AGGR (type))
1032 type = TREE_TYPE (first_field (type)); 1029 type = TREE_TYPE (first_field (type));
1033 1030
1034 /* Decide where to pass this arg. 1031 /* Decide where to pass this arg.
1035 1032
1036 args[i].reg is nonzero if all or part is passed in registers. 1033 args[i].reg is nonzero if all or part is passed in registers.
1037 1034
1038 args[i].partial is nonzero if part but not all is passed in registers, 1035 args[i].partial is nonzero if part but not all is passed in registers,
1039 and the exact value says how many bytes are passed in registers. 1036 and the exact value says how many bytes are passed in registers.
1040 1037
1041 args[i].pass_on_stack is nonzero if the argument must at least be 1038 args[i].pass_on_stack is nonzero if the argument must at least be
1042 computed on the stack. It may then be loaded back into registers 1039 computed on the stack. It may then be loaded back into registers
1043 if args[i].reg is nonzero. 1040 if args[i].reg is nonzero.
1044 1041
1045 These decisions are driven by the FUNCTION_... macros and must agree 1042 These decisions are driven by the FUNCTION_... macros and must agree
1046 with those made by function.c. */ 1043 with those made by function.c. */
1047 1044
1048 /* See if this argument should be passed by invisible reference. */ 1045 /* See if this argument should be passed by invisible reference. */
1049 if (pass_by_reference (args_so_far, TYPE_MODE (type), 1046 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1050 type, argpos < n_named_args)) 1047 type, argpos < n_named_args))
1051 { 1048 {
1134 } 1131 }
1135 } 1132 }
1136 1133
1137 unsignedp = TYPE_UNSIGNED (type); 1134 unsignedp = TYPE_UNSIGNED (type);
1138 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, 1135 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1139 fndecl ? TREE_TYPE (fndecl) : fntype, 0); 1136 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1140 1137
1141 args[i].unsignedp = unsignedp; 1138 args[i].unsignedp = unsignedp;
1142 args[i].mode = mode; 1139 args[i].mode = mode;
1143 1140
1144 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type, 1141 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1153 argpos < n_named_args); 1150 argpos < n_named_args);
1154 else 1151 else
1155 args[i].tail_call_reg = args[i].reg; 1152 args[i].tail_call_reg = args[i].reg;
1156 1153
1157 if (args[i].reg) 1154 if (args[i].reg)
1158 args[i].partial 1155 args[i].partial
1159 = targetm.calls.arg_partial_bytes (args_so_far, mode, type, 1156 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1160 argpos < n_named_args); 1157 argpos < n_named_args);
1161 1158
1162 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type); 1159 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1163 1160
1164 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]), 1161 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1165 it means that we are to pass this arg in the register(s) designated 1162 it means that we are to pass this arg in the register(s) designated
1166 by the PARALLEL, but also to pass it in the stack. */ 1163 by the PARALLEL, but also to pass it in the stack. */
1167 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL 1164 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1168 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0) 1165 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1169 args[i].pass_on_stack = 1; 1166 args[i].pass_on_stack = 1;
1170 1167
1171 /* If this is an addressable type, we must preallocate the stack 1168 /* If this is an addressable type, we must preallocate the stack
1172 since we must evaluate the object into its final location. 1169 since we must evaluate the object into its final location.
1173 1170
1174 If this is to be passed in both registers and the stack, it is simpler 1171 If this is to be passed in both registers and the stack, it is simpler
1175 to preallocate. */ 1172 to preallocate. */
1176 if (TREE_ADDRESSABLE (type) 1173 if (TREE_ADDRESSABLE (type)
1177 || (args[i].pass_on_stack && args[i].reg != 0)) 1174 || (args[i].pass_on_stack && args[i].reg != 0))
1178 *must_preallocate = 1; 1175 *must_preallocate = 1;
1179 1176
1180 /* Compute the stack-size of this argument. */ 1177 /* Compute the stack-size of this argument. */
1181 if (args[i].reg == 0 || args[i].partial != 0 1178 if (args[i].reg == 0 || args[i].partial != 0
1182 || reg_parm_stack_space > 0 1179 || reg_parm_stack_space > 0
1183 || args[i].pass_on_stack) 1180 || args[i].pass_on_stack)
1184 locate_and_pad_parm (mode, type, 1181 locate_and_pad_parm (mode, type,
1185 #ifdef STACK_PARMS_IN_REG_PARM_AREA 1182 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1186 1, 1183 1,
1187 #else 1184 #else
1188 args[i].reg != 0, 1185 args[i].reg != 0,
1189 #endif 1186 #endif
1190 args[i].pass_on_stack ? 0 : args[i].partial, 1187 args[i].pass_on_stack ? 0 : args[i].partial,
1191 fndecl, args_size, &args[i].locate); 1188 fndecl, args_size, &args[i].locate);
1192 #ifdef BLOCK_REG_PADDING 1189 #ifdef BLOCK_REG_PADDING
1193 else 1190 else
1194 /* The argument is passed entirely in registers. See at which 1191 /* The argument is passed entirely in registers. See at which
1195 end it should be padded. */ 1192 end it should be padded. */
1196 args[i].locate.where_pad = 1193 args[i].locate.where_pad =
1197 BLOCK_REG_PADDING (mode, type, 1194 BLOCK_REG_PADDING (mode, type,
1198 int_size_in_bytes (type) <= UNITS_PER_WORD); 1195 int_size_in_bytes (type) <= UNITS_PER_WORD);
1199 #endif 1196 #endif
1200 1197
1201 /* Update ARGS_SIZE, the total stack space for args so far. */ 1198 /* Update ARGS_SIZE, the total stack space for args so far. */
1202 1199
1203 args_size->constant += args[i].locate.size.constant; 1200 args_size->constant += args[i].locate.size.constant;
1204 if (args[i].locate.size.var) 1201 if (args[i].locate.size.var)
1205 ADD_PARM_SIZE (*args_size, args[i].locate.size.var); 1202 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1206 1203
1207 /* Increment ARGS_SO_FAR, which has info about which arg-registers 1204 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1208 have been used, etc. */ 1205 have been used, etc. */
1209 1206
1210 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type), 1207 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1211 type, argpos < n_named_args); 1208 type, argpos < n_named_args);
1212 } 1209 }
1213 } 1210 }
1218 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved 1215 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1219 for arguments passed in registers. */ 1216 for arguments passed in registers. */
1220 1217
1221 static int 1218 static int
1222 compute_argument_block_size (int reg_parm_stack_space, 1219 compute_argument_block_size (int reg_parm_stack_space,
1223 struct args_size *args_size, 1220 struct args_size *args_size,
1224 tree fndecl ATTRIBUTE_UNUSED, 1221 tree fndecl ATTRIBUTE_UNUSED,
1225 tree fntype ATTRIBUTE_UNUSED, 1222 tree fntype ATTRIBUTE_UNUSED,
1226 int preferred_stack_boundary ATTRIBUTE_UNUSED) 1223 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1227 { 1224 {
1228 int unadjusted_args_size = args_size->constant; 1225 int unadjusted_args_size = args_size->constant;
1229 1226
1230 /* For accumulate outgoing args mode we don't need to align, since the frame 1227 /* For accumulate outgoing args mode we don't need to align, since the frame
1231 will be already aligned. Align to STACK_BOUNDARY in order to prevent 1228 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1242 args_size->var = ARGS_SIZE_TREE (*args_size); 1239 args_size->var = ARGS_SIZE_TREE (*args_size);
1243 args_size->constant = 0; 1240 args_size->constant = 0;
1244 1241
1245 preferred_stack_boundary /= BITS_PER_UNIT; 1242 preferred_stack_boundary /= BITS_PER_UNIT;
1246 if (preferred_stack_boundary > 1) 1243 if (preferred_stack_boundary > 1)
1247 { 1244 {
1248 /* We don't handle this case yet. To handle it correctly we have 1245 /* We don't handle this case yet. To handle it correctly we have
1249 to add the delta, round and subtract the delta. 1246 to add the delta, round and subtract the delta.
1250 Currently no machine description requires this support. */ 1247 Currently no machine description requires this support. */
1251 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1))); 1248 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1252 args_size->var = round_up (args_size->var, preferred_stack_boundary); 1249 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1253 } 1250 }
1254 1251
1255 if (reg_parm_stack_space > 0) 1252 if (reg_parm_stack_space > 0)
1256 { 1253 {
1257 args_size->var 1254 args_size->var
1258 = size_binop (MAX_EXPR, args_size->var, 1255 = size_binop (MAX_EXPR, args_size->var,
1259 ssize_int (reg_parm_stack_space)); 1256 ssize_int (reg_parm_stack_space));
1260 1257
1261 /* The area corresponding to register parameters is not to count in 1258 /* The area corresponding to register parameters is not to count in
1262 the size of the block we need. So make the adjustment. */ 1259 the size of the block we need. So make the adjustment. */
1263 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) 1260 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1264 args_size->var 1261 args_size->var
1265 = size_binop (MINUS_EXPR, args_size->var, 1262 = size_binop (MINUS_EXPR, args_size->var,
1266 ssize_int (reg_parm_stack_space)); 1263 ssize_int (reg_parm_stack_space));
1267 } 1264 }
1268 } 1265 }
1269 else 1266 else
1270 { 1267 {
1271 preferred_stack_boundary /= BITS_PER_UNIT; 1268 preferred_stack_boundary /= BITS_PER_UNIT;
1272 if (preferred_stack_boundary < 1) 1269 if (preferred_stack_boundary < 1)
1273 preferred_stack_boundary = 1; 1270 preferred_stack_boundary = 1;
1274 args_size->constant = (((args_size->constant 1271 args_size->constant = (((args_size->constant
1275 + stack_pointer_delta 1272 + stack_pointer_delta
1276 + preferred_stack_boundary - 1) 1273 + preferred_stack_boundary - 1)
1277 / preferred_stack_boundary 1274 / preferred_stack_boundary
1278 * preferred_stack_boundary) 1275 * preferred_stack_boundary)
1279 - stack_pointer_delta); 1276 - stack_pointer_delta);
1280 1277
1281 args_size->constant = MAX (args_size->constant, 1278 args_size->constant = MAX (args_size->constant,
1282 reg_parm_stack_space); 1279 reg_parm_stack_space);
1283 1280
1284 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) 1281 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1285 args_size->constant -= reg_parm_stack_space; 1282 args_size->constant -= reg_parm_stack_space;
1286 } 1283 }
1287 return unadjusted_args_size; 1284 return unadjusted_args_size;
1288 } 1285 }
1289 1286
1290 /* Precompute parameters as needed for a function call. 1287 /* Precompute parameters as needed for a function call.
1319 { 1316 {
1320 tree type; 1317 tree type;
1321 enum machine_mode mode; 1318 enum machine_mode mode;
1322 1319
1323 if (TREE_CODE (args[i].tree_value) != CALL_EXPR) 1320 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1324 continue; 1321 continue;
1325 1322
1326 /* If this is an addressable type, we cannot pre-evaluate it. */ 1323 /* If this is an addressable type, we cannot pre-evaluate it. */
1327 type = TREE_TYPE (args[i].tree_value); 1324 type = TREE_TYPE (args[i].tree_value);
1328 gcc_assert (!TREE_ADDRESSABLE (type)); 1325 gcc_assert (!TREE_ADDRESSABLE (type));
1329 1326
1330 args[i].initial_value = args[i].value 1327 args[i].initial_value = args[i].value
1331 = expand_normal (args[i].tree_value); 1328 = expand_normal (args[i].tree_value);
1332 1329
1333 mode = TYPE_MODE (type); 1330 mode = TYPE_MODE (type);
1334 if (mode != args[i].mode) 1331 if (mode != args[i].mode)
1335 { 1332 {
1336 int unsignedp = args[i].unsignedp; 1333 int unsignedp = args[i].unsignedp;
1337 args[i].value 1334 args[i].value
1338 = convert_modes (args[i].mode, mode, 1335 = convert_modes (args[i].mode, mode,
1339 args[i].value, args[i].unsignedp); 1336 args[i].value, args[i].unsignedp);
1340 1337
1341 /* CSE will replace this only if it contains args[i].value 1338 /* CSE will replace this only if it contains args[i].value
1342 pseudo, so convert it down to the declared mode using 1339 pseudo, so convert it down to the declared mode using
1343 a SUBREG. */ 1340 a SUBREG. */
1344 if (REG_P (args[i].value) 1341 if (REG_P (args[i].value)
1345 && GET_MODE_CLASS (args[i].mode) == MODE_INT 1342 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1346 && promote_mode (type, mode, &unsignedp) != args[i].mode) 1343 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1347 { 1344 {
1348 args[i].initial_value 1345 args[i].initial_value
1349 = gen_lowpart_SUBREG (mode, args[i].value); 1346 = gen_lowpart_SUBREG (mode, args[i].value);
1350 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1; 1347 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1351 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value, 1348 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1352 args[i].unsignedp); 1349 args[i].unsignedp);
1353 } 1350 }
1354 } 1351 }
1355 } 1352 }
1356 } 1353 }
1357 1354
1358 /* Given the current state of MUST_PREALLOCATE and information about 1355 /* Given the current state of MUST_PREALLOCATE and information about
1359 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE, 1356 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1360 compute and return the final value for MUST_PREALLOCATE. */ 1357 compute and return the final value for MUST_PREALLOCATE. */
1361 1358
1362 static int 1359 static int
1363 finalize_must_preallocate (int must_preallocate, int num_actuals, 1360 finalize_must_preallocate (int must_preallocate, int num_actuals,
1364 struct arg_data *args, struct args_size *args_size) 1361 struct arg_data *args, struct args_size *args_size)
1365 { 1362 {
1366 /* See if we have or want to preallocate stack space. 1363 /* See if we have or want to preallocate stack space.
1367 1364
1368 If we would have to push a partially-in-regs parm 1365 If we would have to push a partially-in-regs parm
1369 before other stack parms, preallocate stack space instead. 1366 before other stack parms, preallocate stack space instead.
1387 int partial_seen = 0; 1384 int partial_seen = 0;
1388 int copy_to_evaluate_size = 0; 1385 int copy_to_evaluate_size = 0;
1389 int i; 1386 int i;
1390 1387
1391 for (i = 0; i < num_actuals && ! must_preallocate; i++) 1388 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1392 { 1389 {
1393 if (args[i].partial > 0 && ! args[i].pass_on_stack) 1390 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1394 partial_seen = 1; 1391 partial_seen = 1;
1395 else if (partial_seen && args[i].reg == 0) 1392 else if (partial_seen && args[i].reg == 0)
1396 must_preallocate = 1; 1393 must_preallocate = 1;
1397 1394
1398 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode 1395 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1399 && (TREE_CODE (args[i].tree_value) == CALL_EXPR 1396 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1400 || TREE_CODE (args[i].tree_value) == TARGET_EXPR 1397 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1401 || TREE_CODE (args[i].tree_value) == COND_EXPR 1398 || TREE_CODE (args[i].tree_value) == COND_EXPR
1402 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))) 1399 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1403 copy_to_evaluate_size 1400 copy_to_evaluate_size
1404 += int_size_in_bytes (TREE_TYPE (args[i].tree_value)); 1401 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1405 } 1402 }
1406 1403
1407 if (copy_to_evaluate_size * 2 >= args_size->constant 1404 if (copy_to_evaluate_size * 2 >= args_size->constant
1408 && args_size->constant > 0) 1405 && args_size->constant > 0)
1409 must_preallocate = 1; 1406 must_preallocate = 1;
1410 } 1407 }
1411 return must_preallocate; 1408 return must_preallocate;
1412 } 1409 }
1413 1410
1414 /* If we preallocated stack space, compute the address of each argument 1411 /* If we preallocated stack space, compute the address of each argument
1426 { 1423 {
1427 rtx arg_reg = argblock; 1424 rtx arg_reg = argblock;
1428 int i, arg_offset = 0; 1425 int i, arg_offset = 0;
1429 1426
1430 if (GET_CODE (argblock) == PLUS) 1427 if (GET_CODE (argblock) == PLUS)
1431 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1)); 1428 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1432 1429
1433 for (i = 0; i < num_actuals; i++) 1430 for (i = 0; i < num_actuals; i++)
1434 { 1431 {
1435 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset); 1432 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1436 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset); 1433 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1437 rtx addr; 1434 rtx addr;
1438 unsigned int align, boundary; 1435 unsigned int align, boundary;
1439 unsigned int units_on_stack = 0; 1436 unsigned int units_on_stack = 0;
1440 enum machine_mode partial_mode = VOIDmode; 1437 enum machine_mode partial_mode = VOIDmode;
1441 1438
1442 /* Skip this parm if it will not be passed on the stack. */ 1439 /* Skip this parm if it will not be passed on the stack. */
1443 if (! args[i].pass_on_stack 1440 if (! args[i].pass_on_stack
1444 && args[i].reg != 0 1441 && args[i].reg != 0
1445 && args[i].partial == 0) 1442 && args[i].partial == 0)
1446 continue; 1443 continue;
1447 1444
1448 if (CONST_INT_P (offset)) 1445 if (CONST_INT_P (offset))
1449 addr = plus_constant (arg_reg, INTVAL (offset)); 1446 addr = plus_constant (arg_reg, INTVAL (offset));
1450 else 1447 else
1451 addr = gen_rtx_PLUS (Pmode, arg_reg, offset); 1448 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1452 1449
1453 addr = plus_constant (addr, arg_offset); 1450 addr = plus_constant (addr, arg_offset);
1454 1451
1455 if (args[i].partial != 0) 1452 if (args[i].partial != 0)
1456 { 1453 {
1457 /* Only part of the parameter is being passed on the stack. 1454 /* Only part of the parameter is being passed on the stack.
1458 Generate a simple memory reference of the correct size. */ 1455 Generate a simple memory reference of the correct size. */
1459 units_on_stack = args[i].locate.size.constant; 1456 units_on_stack = args[i].locate.size.constant;
1460 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT, 1457 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1461 MODE_INT, 1); 1458 MODE_INT, 1);
1462 args[i].stack = gen_rtx_MEM (partial_mode, addr); 1459 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1463 set_mem_size (args[i].stack, GEN_INT (units_on_stack)); 1460 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1464 } 1461 }
1465 else 1462 else
1466 { 1463 {
1467 args[i].stack = gen_rtx_MEM (args[i].mode, addr); 1464 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1468 set_mem_attributes (args[i].stack, 1465 set_mem_attributes (args[i].stack,
1469 TREE_TYPE (args[i].tree_value), 1); 1466 TREE_TYPE (args[i].tree_value), 1);
1470 } 1467 }
1471 align = BITS_PER_UNIT; 1468 align = BITS_PER_UNIT;
1472 boundary = args[i].locate.boundary; 1469 boundary = args[i].locate.boundary;
1473 if (args[i].locate.where_pad != downward) 1470 if (args[i].locate.where_pad != downward)
1474 align = boundary; 1471 align = boundary;
1475 else if (CONST_INT_P (offset)) 1472 else if (CONST_INT_P (offset))
1476 { 1473 {
1477 align = INTVAL (offset) * BITS_PER_UNIT | boundary; 1474 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1478 align = align & -align; 1475 align = align & -align;
1479 } 1476 }
1480 set_mem_align (args[i].stack, align); 1477 set_mem_align (args[i].stack, align);
1481 1478
1482 if (CONST_INT_P (slot_offset)) 1479 if (CONST_INT_P (slot_offset))
1483 addr = plus_constant (arg_reg, INTVAL (slot_offset)); 1480 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1484 else 1481 else
1485 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset); 1482 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1486 1483
1487 addr = plus_constant (addr, arg_offset); 1484 addr = plus_constant (addr, arg_offset);
1488 1485
1489 if (args[i].partial != 0) 1486 if (args[i].partial != 0)
1490 { 1487 {
1491 /* Only part of the parameter is being passed on the stack. 1488 /* Only part of the parameter is being passed on the stack.
1492 Generate a simple memory reference of the correct size. 1489 Generate a simple memory reference of the correct size.
1493 */ 1490 */
1494 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr); 1491 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1495 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack)); 1492 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1496 } 1493 }
1497 else 1494 else
1498 { 1495 {
1499 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr); 1496 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1500 set_mem_attributes (args[i].stack_slot, 1497 set_mem_attributes (args[i].stack_slot,
1501 TREE_TYPE (args[i].tree_value), 1); 1498 TREE_TYPE (args[i].tree_value), 1);
1502 } 1499 }
1503 set_mem_align (args[i].stack_slot, args[i].locate.boundary); 1500 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1504 1501
1505 /* Function incoming arguments may overlap with sibling call 1502 /* Function incoming arguments may overlap with sibling call
1506 outgoing arguments and we cannot allow reordering of reads 1503 outgoing arguments and we cannot allow reordering of reads
1507 from function arguments with stores to outgoing arguments 1504 from function arguments with stores to outgoing arguments
1508 of sibling calls. */ 1505 of sibling calls. */
1509 set_mem_alias_set (args[i].stack, 0); 1506 set_mem_alias_set (args[i].stack, 0);
1510 set_mem_alias_set (args[i].stack_slot, 0); 1507 set_mem_alias_set (args[i].stack_slot, 0);
1511 } 1508 }
1512 } 1509 }
1513 } 1510 }
1514 1511
1515 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address 1512 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1516 in a call instruction. 1513 in a call instruction.
1527 1524
1528 /* Get the function to call, in the form of RTL. */ 1525 /* Get the function to call, in the form of RTL. */
1529 if (fndecl) 1526 if (fndecl)
1530 { 1527 {
1531 /* If this is the first use of the function, see if we need to 1528 /* If this is the first use of the function, see if we need to
1532 make an external definition for it. */ 1529 make an external definition for it. */
1533 if (!TREE_USED (fndecl) && fndecl != current_function_decl) 1530 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1534 { 1531 {
1535 assemble_external (fndecl); 1532 assemble_external (fndecl);
1536 TREE_USED (fndecl) = 1; 1533 TREE_USED (fndecl) = 1;
1537 } 1534 }
1538 1535
1539 /* Get a SYMBOL_REF rtx for the function address. */ 1536 /* Get a SYMBOL_REF rtx for the function address. */
1540 funexp = XEXP (DECL_RTL (fndecl), 0); 1537 funexp = XEXP (DECL_RTL (fndecl), 0);
1541 } 1538 }
1542 else 1539 else
1543 /* Generate an rtx (probably a pseudo-register) for the address. */ 1540 /* Generate an rtx (probably a pseudo-register) for the address. */
1544 { 1541 {
1545 push_temp_slots (); 1542 push_temp_slots ();
1546 funexp = expand_normal (addr); 1543 funexp = expand_normal (addr);
1547 pop_temp_slots (); /* FUNEXP can't be BLKmode. */ 1544 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1548 } 1545 }
1549 return funexp; 1546 return funexp;
1550 } 1547 }
1551 1548
1552 /* Return true if and only if SIZE storage units (usually bytes) 1549 /* Return true if and only if SIZE storage units (usually bytes)
1560 HOST_WIDE_INT i; 1557 HOST_WIDE_INT i;
1561 1558
1562 if (addr == crtl->args.internal_arg_pointer) 1559 if (addr == crtl->args.internal_arg_pointer)
1563 i = 0; 1560 i = 0;
1564 else if (GET_CODE (addr) == PLUS 1561 else if (GET_CODE (addr) == PLUS
1565 && XEXP (addr, 0) == crtl->args.internal_arg_pointer 1562 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
1566 && CONST_INT_P (XEXP (addr, 1))) 1563 && CONST_INT_P (XEXP (addr, 1)))
1567 i = INTVAL (XEXP (addr, 1)); 1564 i = INTVAL (XEXP (addr, 1));
1568 /* Return true for arg pointer based indexed addressing. */ 1565 /* Return true for arg pointer based indexed addressing. */
1569 else if (GET_CODE (addr) == PLUS 1566 else if (GET_CODE (addr) == PLUS
1570 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer 1567 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
1571 || XEXP (addr, 1) == crtl->args.internal_arg_pointer)) 1568 || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
1572 return true; 1569 return true;
1573 else 1570 else
1574 return false; 1571 return false;
1575 1572
1576 #ifdef ARGS_GROW_DOWNWARD 1573 #ifdef ARGS_GROW_DOWNWARD
1579 if (size > 0) 1576 if (size > 0)
1580 { 1577 {
1581 unsigned HOST_WIDE_INT k; 1578 unsigned HOST_WIDE_INT k;
1582 1579
1583 for (k = 0; k < size; k++) 1580 for (k = 0; k < size; k++)
1584 if (i + k < stored_args_map->n_bits 1581 if (i + k < stored_args_map->n_bits
1585 && TEST_BIT (stored_args_map, i + k)) 1582 && TEST_BIT (stored_args_map, i + k))
1586 return true; 1583 return true;
1587 } 1584 }
1588 1585
1589 return false; 1586 return false;
1590 } 1587 }
1591 1588
1599 When IS_SIBCALL, perform the check_sibcall_argument_overlap 1596 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1600 checking, setting *SIBCALL_FAILURE if appropriate. */ 1597 checking, setting *SIBCALL_FAILURE if appropriate. */
1601 1598
1602 static void 1599 static void
1603 load_register_parameters (struct arg_data *args, int num_actuals, 1600 load_register_parameters (struct arg_data *args, int num_actuals,
1604 rtx *call_fusage, int flags, int is_sibcall, 1601 rtx *call_fusage, int flags, int is_sibcall,
1605 int *sibcall_failure) 1602 int *sibcall_failure)
1606 { 1603 {
1607 int i, j; 1604 int i, j;
1608 1605
1609 for (i = 0; i < num_actuals; i++) 1606 for (i = 0; i < num_actuals; i++)
1610 { 1607 {
1611 rtx reg = ((flags & ECF_SIBCALL) 1608 rtx reg = ((flags & ECF_SIBCALL)
1612 ? args[i].tail_call_reg : args[i].reg); 1609 ? args[i].tail_call_reg : args[i].reg);
1613 if (reg) 1610 if (reg)
1614 { 1611 {
1615 int partial = args[i].partial; 1612 int partial = args[i].partial;
1616 int nregs; 1613 int nregs;
1617 int size = 0; 1614 int size = 0;
1618 rtx before_arg = get_last_insn (); 1615 rtx before_arg = get_last_insn ();
1619 /* Set non-negative if we must move a word at a time, even if 1616 /* Set non-negative if we must move a word at a time, even if
1620 just one word (e.g, partial == 4 && mode == DFmode). Set 1617 just one word (e.g, partial == 4 && mode == DFmode). Set
1621 to -1 if we just use a normal move insn. This value can be 1618 to -1 if we just use a normal move insn. This value can be
1622 zero if the argument is a zero size structure. */ 1619 zero if the argument is a zero size structure. */
1623 nregs = -1; 1620 nregs = -1;
1624 if (GET_CODE (reg) == PARALLEL) 1621 if (GET_CODE (reg) == PARALLEL)
1625 ; 1622 ;
1626 else if (partial) 1623 else if (partial)
1627 { 1624 {
1628 gcc_assert (partial % UNITS_PER_WORD == 0); 1625 gcc_assert (partial % UNITS_PER_WORD == 0);
1629 nregs = partial / UNITS_PER_WORD; 1626 nregs = partial / UNITS_PER_WORD;
1630 } 1627 }
1631 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode) 1628 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1632 { 1629 {
1633 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); 1630 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1634 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; 1631 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1635 } 1632 }
1636 else 1633 else
1637 size = GET_MODE_SIZE (args[i].mode); 1634 size = GET_MODE_SIZE (args[i].mode);
1638 1635
1639 /* Handle calls that pass values in multiple non-contiguous 1636 /* Handle calls that pass values in multiple non-contiguous
1640 locations. The Irix 6 ABI has examples of this. */ 1637 locations. The Irix 6 ABI has examples of this. */
1641 1638
1642 if (GET_CODE (reg) == PARALLEL) 1639 if (GET_CODE (reg) == PARALLEL)
1643 emit_group_move (reg, args[i].parallel_value); 1640 emit_group_move (reg, args[i].parallel_value);
1644 1641
1645 /* If simple case, just do move. If normal partial, store_one_arg 1642 /* If simple case, just do move. If normal partial, store_one_arg
1646 has already loaded the register for us. In all other cases, 1643 has already loaded the register for us. In all other cases,
1647 load the register(s) from memory. */ 1644 load the register(s) from memory. */
1648 1645
1649 else if (nregs == -1) 1646 else if (nregs == -1)
1650 { 1647 {
1651 emit_move_insn (reg, args[i].value); 1648 emit_move_insn (reg, args[i].value);
1652 #ifdef BLOCK_REG_PADDING 1649 #ifdef BLOCK_REG_PADDING
1653 /* Handle case where we have a value that needs shifting 1650 /* Handle case where we have a value that needs shifting
1654 up to the msb. eg. a QImode value and we're padding 1651 up to the msb. eg. a QImode value and we're padding
1655 upward on a BYTES_BIG_ENDIAN machine. */ 1652 upward on a BYTES_BIG_ENDIAN machine. */
1656 if (size < UNITS_PER_WORD 1653 if (size < UNITS_PER_WORD
1657 && (args[i].locate.where_pad 1654 && (args[i].locate.where_pad
1658 == (BYTES_BIG_ENDIAN ? upward : downward))) 1655 == (BYTES_BIG_ENDIAN ? upward : downward)))
1659 { 1656 {
1660 rtx x; 1657 rtx x;
1661 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; 1658 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1662 1659
1663 /* Assigning REG here rather than a temp makes CALL_FUSAGE 1660 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1664 report the whole reg as used. Strictly speaking, the 1661 report the whole reg as used. Strictly speaking, the
1665 call only uses SIZE bytes at the msb end, but it doesn't 1662 call only uses SIZE bytes at the msb end, but it doesn't
1666 seem worth generating rtl to say that. */ 1663 seem worth generating rtl to say that. */
1667 reg = gen_rtx_REG (word_mode, REGNO (reg)); 1664 reg = gen_rtx_REG (word_mode, REGNO (reg));
1668 x = expand_shift (LSHIFT_EXPR, word_mode, reg, 1665 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1669 build_int_cst (NULL_TREE, shift), 1666 build_int_cst (NULL_TREE, shift),
1670 reg, 1); 1667 reg, 1);
1671 if (x != reg) 1668 if (x != reg)
1672 emit_move_insn (reg, x); 1669 emit_move_insn (reg, x);
1673 } 1670 }
1674 #endif 1671 #endif
1675 } 1672 }
1676 1673
1677 /* If we have pre-computed the values to put in the registers in 1674 /* If we have pre-computed the values to put in the registers in
1678 the case of non-aligned structures, copy them in now. */ 1675 the case of non-aligned structures, copy them in now. */
1695 *sibcall_failure = 1; 1692 *sibcall_failure = 1;
1696 1693
1697 /* Handle a BLKmode that needs shifting. */ 1694 /* Handle a BLKmode that needs shifting. */
1698 if (nregs == 1 && size < UNITS_PER_WORD 1695 if (nregs == 1 && size < UNITS_PER_WORD
1699 #ifdef BLOCK_REG_PADDING 1696 #ifdef BLOCK_REG_PADDING
1700 && args[i].locate.where_pad == downward 1697 && args[i].locate.where_pad == downward
1701 #else 1698 #else
1702 && BYTES_BIG_ENDIAN 1699 && BYTES_BIG_ENDIAN
1703 #endif 1700 #endif
1704 ) 1701 )
1705 { 1702 {
1706 rtx tem = operand_subword_force (mem, 0, args[i].mode); 1703 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1707 rtx ri = gen_rtx_REG (word_mode, REGNO (reg)); 1704 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1708 rtx x = gen_reg_rtx (word_mode); 1705 rtx x = gen_reg_rtx (word_mode);
1709 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; 1706 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1710 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR 1707 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1711 : LSHIFT_EXPR; 1708 : LSHIFT_EXPR;
1712 1709
1713 emit_move_insn (x, tem); 1710 emit_move_insn (x, tem);
1714 x = expand_shift (dir, word_mode, x, 1711 x = expand_shift (dir, word_mode, x,
1715 build_int_cst (NULL_TREE, shift), 1712 build_int_cst (NULL_TREE, shift),
1716 ri, 1); 1713 ri, 1);
1717 if (x != ri) 1714 if (x != ri)
1718 emit_move_insn (ri, x); 1715 emit_move_insn (ri, x);
1719 } 1716 }
1720 else 1717 else
1721 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode); 1718 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1722 } 1719 }
1723 1720
1724 /* When a parameter is a block, and perhaps in other cases, it is 1721 /* When a parameter is a block, and perhaps in other cases, it is
1725 possible that it did a load from an argument slot that was 1722 possible that it did a load from an argument slot that was
1726 already clobbered. */ 1723 already clobbered. */
1727 if (is_sibcall 1724 if (is_sibcall
1728 && check_sibcall_argument_overlap (before_arg, &args[i], 0)) 1725 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1729 *sibcall_failure = 1; 1726 *sibcall_failure = 1;
1730 1727
1731 /* Handle calls that pass values in multiple non-contiguous 1728 /* Handle calls that pass values in multiple non-contiguous
1732 locations. The Irix 6 ABI has examples of this. */ 1729 locations. The Irix 6 ABI has examples of this. */
1733 if (GET_CODE (reg) == PARALLEL) 1730 if (GET_CODE (reg) == PARALLEL)
1734 use_group_regs (call_fusage, reg); 1731 use_group_regs (call_fusage, reg);
1735 else if (nregs == -1) 1732 else if (nregs == -1)
1736 use_reg (call_fusage, reg); 1733 use_reg (call_fusage, reg);
1737 else if (nregs > 0) 1734 else if (nregs > 0)
1738 use_regs (call_fusage, REGNO (reg), nregs); 1735 use_regs (call_fusage, REGNO (reg), nregs);
1739 } 1736 }
1740 } 1737 }
1741 } 1738 }
1742 1739
1743 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments 1740 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1744 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY 1741 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1749 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should 1746 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1750 be popped after the call. Returns the adjustment. */ 1747 be popped after the call. Returns the adjustment. */
1751 1748
1752 static int 1749 static int
1753 combine_pending_stack_adjustment_and_call (int unadjusted_args_size, 1750 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1754 struct args_size *args_size, 1751 struct args_size *args_size,
1755 unsigned int preferred_unit_stack_boundary) 1752 unsigned int preferred_unit_stack_boundary)
1756 { 1753 {
1757 /* The number of bytes to pop so that the stack will be 1754 /* The number of bytes to pop so that the stack will be
1758 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */ 1755 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1759 HOST_WIDE_INT adjustment; 1756 HOST_WIDE_INT adjustment;
1760 /* The alignment of the stack after the arguments are pushed, if we 1757 /* The alignment of the stack after the arguments are pushed, if we
1779 /* Push enough additional bytes that the stack will be aligned 1776 /* Push enough additional bytes that the stack will be aligned
1780 after the arguments are pushed. */ 1777 after the arguments are pushed. */
1781 if (preferred_unit_stack_boundary > 1) 1778 if (preferred_unit_stack_boundary > 1)
1782 { 1779 {
1783 if (unadjusted_alignment > 0) 1780 if (unadjusted_alignment > 0)
1784 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment; 1781 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1785 else 1782 else
1786 adjustment += unadjusted_alignment; 1783 adjustment += unadjusted_alignment;
1787 } 1784 }
1788 1785
1789 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of 1786 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1790 bytes after the call. The right number is the entire 1787 bytes after the call. The right number is the entire
1791 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required 1788 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1814 1811
1815 code = GET_CODE (x); 1812 code = GET_CODE (x);
1816 1813
1817 if (code == MEM) 1814 if (code == MEM)
1818 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0), 1815 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1819 GET_MODE_SIZE (GET_MODE (x))); 1816 GET_MODE_SIZE (GET_MODE (x)));
1820 1817
1821 /* Scan all subexpressions. */ 1818 /* Scan all subexpressions. */
1822 fmt = GET_RTX_FORMAT (code); 1819 fmt = GET_RTX_FORMAT (code);
1823 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) 1820 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1824 { 1821 {
1825 if (*fmt == 'e') 1822 if (*fmt == 'e')
1826 { 1823 {
1827 if (check_sibcall_argument_overlap_1 (XEXP (x, i))) 1824 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1828 return 1; 1825 return 1;
1829 } 1826 }
1830 else if (*fmt == 'E') 1827 else if (*fmt == 'E')
1831 { 1828 {
1832 for (j = 0; j < XVECLEN (x, i); j++) 1829 for (j = 0; j < XVECLEN (x, i); j++)
1833 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j))) 1830 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1834 return 1; 1831 return 1;
1835 } 1832 }
1836 } 1833 }
1837 return 0; 1834 return 0;
1838 } 1835 }
1839 1836
1840 /* Scan sequence after INSN if it does not dereference any argument slots 1837 /* Scan sequence after INSN if it does not dereference any argument slots
1854 else 1851 else
1855 insn = NEXT_INSN (insn); 1852 insn = NEXT_INSN (insn);
1856 1853
1857 for (; insn; insn = NEXT_INSN (insn)) 1854 for (; insn; insn = NEXT_INSN (insn))
1858 if (INSN_P (insn) 1855 if (INSN_P (insn)
1859 && check_sibcall_argument_overlap_1 (PATTERN (insn))) 1856 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1860 break; 1857 break;
1861 1858
1862 if (mark_stored_args_map) 1859 if (mark_stored_args_map)
1863 { 1860 {
1864 #ifdef ARGS_GROW_DOWNWARD 1861 #ifdef ARGS_GROW_DOWNWARD
1866 #else 1863 #else
1867 low = arg->locate.slot_offset.constant; 1864 low = arg->locate.slot_offset.constant;
1868 #endif 1865 #endif
1869 1866
1870 for (high = low + arg->locate.size.constant; low < high; low++) 1867 for (high = low + arg->locate.size.constant; low < high; low++)
1871 SET_BIT (stored_args_map, low); 1868 SET_BIT (stored_args_map, low);
1872 } 1869 }
1873 return insn != NULL_RTX; 1870 return insn != NULL_RTX;
1874 } 1871 }
1875 1872
1876 /* Given that a function returns a value of mode MODE at the most 1873 /* Given that a function returns a value of mode MODE at the most
1889 1886
1890 /* Use ashr rather than lshr for right shifts. This is for the benefit 1887 /* Use ashr rather than lshr for right shifts. This is for the benefit
1891 of the MIPS port, which requires SImode values to be sign-extended 1888 of the MIPS port, which requires SImode values to be sign-extended
1892 when stored in 64-bit registers. */ 1889 when stored in 64-bit registers. */
1893 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab, 1890 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1894 value, GEN_INT (shift), value, 1, OPTAB_WIDEN)) 1891 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1895 gcc_unreachable (); 1892 gcc_unreachable ();
1896 return true; 1893 return true;
1897 } 1894 }
1898
1899 #ifndef noCbC
1900 #include "cbc-tree.h"
1901 #endif
1902 1895
1903 /* If X is a likely-spilled register value, copy it to a pseudo 1896 /* If X is a likely-spilled register value, copy it to a pseudo
1904 register and return that register. Return X otherwise. */ 1897 register and return that register. Return X otherwise. */
1905 1898
1906 static rtx 1899 static rtx
1911 if (REG_P (x) 1904 if (REG_P (x)
1912 && HARD_REGISTER_P (x) 1905 && HARD_REGISTER_P (x)
1913 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x)))) 1906 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
1914 { 1907 {
1915 /* Make sure that we generate a REG rather than a CONCAT. 1908 /* Make sure that we generate a REG rather than a CONCAT.
1916 Moves into CONCATs can need nontrivial instructions, 1909 Moves into CONCATs can need nontrivial instructions,
1917 and the whole point of this function is to avoid 1910 and the whole point of this function is to avoid
1918 using the hard register directly in such a situation. */ 1911 using the hard register directly in such a situation. */
1919 generating_concat_p = 0; 1912 generating_concat_p = 0;
1920 new_rtx = gen_reg_rtx (GET_MODE (x)); 1913 new_rtx = gen_reg_rtx (GET_MODE (x));
1921 generating_concat_p = 1; 1914 generating_concat_p = 1;
1922 emit_move_insn (new_rtx, x); 1915 emit_move_insn (new_rtx, x);
1923 return new_rtx; 1916 return new_rtx;
1924 } 1917 }
1925 return x; 1918 return x;
1926 } 1919 }
1927 1920
1928 /* Generate all the code for a function call 1921 /* Generate all the code for a CALL_EXPR exp
1929 and return an rtx for its value. 1922 and return an rtx for its value.
1930 Store the value in TARGET (specified as an rtx) if convenient. 1923 Store the value in TARGET (specified as an rtx) if convenient.
1931 If the value is stored in TARGET then TARGET is returned. 1924 If the value is stored in TARGET then TARGET is returned.
1932 If IGNORE is nonzero, then we ignore the value of the function call. */ 1925 If IGNORE is nonzero, then we ignore the value of the function call. */
1933 1926
2019 int flags = 0; 2012 int flags = 0;
2020 #ifdef REG_PARM_STACK_SPACE 2013 #ifdef REG_PARM_STACK_SPACE
2021 /* Define the boundary of the register parm stack space that needs to be 2014 /* Define the boundary of the register parm stack space that needs to be
2022 saved, if any. */ 2015 saved, if any. */
2023 int low_to_save, high_to_save; 2016 int low_to_save, high_to_save;
2024 rtx save_area = 0; /* Place that it is saved */ 2017 rtx save_area = 0; /* Place that it is saved */
2025 #endif 2018 #endif
2026 2019
2027 int initial_highest_arg_in_use = highest_outgoing_arg_in_use; 2020 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2028 char *initial_stack_usage_map = stack_usage_map; 2021 char *initial_stack_usage_map = stack_usage_map;
2029 char *stack_usage_map_buf = NULL; 2022 char *stack_usage_map_buf = NULL;
2081 avoid expanding the call and just evaluate the arguments for 2074 avoid expanding the call and just evaluate the arguments for
2082 side-effects. */ 2075 side-effects. */
2083 if ((flags & (ECF_CONST | ECF_PURE)) 2076 if ((flags & (ECF_CONST | ECF_PURE))
2084 && (!(flags & ECF_LOOPING_CONST_OR_PURE)) 2077 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2085 && (ignore || target == const0_rtx 2078 && (ignore || target == const0_rtx
2086 || TYPE_MODE (rettype) == VOIDmode)) 2079 || TYPE_MODE (rettype) == VOIDmode))
2087 { 2080 {
2088 bool volatilep = false; 2081 bool volatilep = false;
2089 tree arg; 2082 tree arg;
2090 call_expr_arg_iterator iter; 2083 call_expr_arg_iterator iter;
2091 2084
2092 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) 2085 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2093 if (TREE_THIS_VOLATILE (arg)) 2086 if (TREE_THIS_VOLATILE (arg))
2094 { 2087 {
2095 volatilep = true; 2088 volatilep = true;
2096 break; 2089 break;
2097 } 2090 }
2098 2091
2099 if (! volatilep) 2092 if (! volatilep)
2100 { 2093 {
2101 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) 2094 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2102 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL); 2095 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2103 return const0_rtx; 2096 return const0_rtx;
2104 } 2097 }
2105 } 2098 }
2106 2099
2107 #ifdef REG_PARM_STACK_SPACE 2100 #ifdef REG_PARM_STACK_SPACE
2108 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl); 2101 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2109 #endif 2102 #endif
2120 /* This call returns a big structure. */ 2113 /* This call returns a big structure. */
2121 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); 2114 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2122 2115
2123 #ifdef PCC_STATIC_STRUCT_RETURN 2116 #ifdef PCC_STATIC_STRUCT_RETURN
2124 { 2117 {
2125 pcc_struct_value = 1; 2118 pcc_struct_value = 1;
2126 } 2119 }
2127 #else /* not PCC_STATIC_STRUCT_RETURN */ 2120 #else /* not PCC_STATIC_STRUCT_RETURN */
2128 { 2121 {
2129 struct_value_size = int_size_in_bytes (rettype); 2122 struct_value_size = int_size_in_bytes (rettype);
2130 2123
2131 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp)) 2124 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2132 structure_value_addr = XEXP (target, 0); 2125 structure_value_addr = XEXP (target, 0);
2133 else 2126 else
2134 { 2127 {
2135 /* For variable-sized objects, we must be called with a target 2128 /* For variable-sized objects, we must be called with a target
2136 specified. If we were to allocate space on the stack here, 2129 specified. If we were to allocate space on the stack here,
2137 we would have no way of knowing when to free it. */ 2130 we would have no way of knowing when to free it. */
2138 rtx d = assign_temp (rettype, 0, 1, 1); 2131 rtx d = assign_temp (rettype, 0, 1, 1);
2139 2132
2140 mark_temp_addr_taken (d); 2133 mark_temp_addr_taken (d);
2141 structure_value_addr = XEXP (d, 0); 2134 structure_value_addr = XEXP (d, 0);
2142 target = 0; 2135 target = 0;
2143 } 2136 }
2144 } 2137 }
2145 #endif /* not PCC_STATIC_STRUCT_RETURN */ 2138 #endif /* not PCC_STATIC_STRUCT_RETURN */
2146 } 2139 }
2147 2140
2148 /* Figure out the amount to which the stack should be aligned. */ 2141 /* Figure out the amount to which the stack should be aligned. */
2149 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; 2142 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2150 if (fndecl) 2143 if (fndecl)
2151 { 2144 {
2152 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl); 2145 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2153 /* Without automatic stack alignment, we can't increase preferred 2146 /* Without automatic stack alignment, we can't increase preferred
2154 stack boundary. With automatic stack alignment, it is 2147 stack boundary. With automatic stack alignment, it is
2155 unnecessary since unless we can guarantee that all callers will 2148 unnecessary since unless we can guarantee that all callers will
2156 align the outgoing stack properly, callee has to align its 2149 align the outgoing stack properly, callee has to align its
2157 stack anyway. */ 2150 stack anyway. */
2158 if (i 2151 if (i
2159 && i->preferred_incoming_stack_boundary 2152 && i->preferred_incoming_stack_boundary
2160 && i->preferred_incoming_stack_boundary < preferred_stack_boundary) 2153 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2161 preferred_stack_boundary = i->preferred_incoming_stack_boundary; 2154 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2162 } 2155 }
2163 2156
2164 /* Operand 0 is a pointer-to-function; get the type of the function. */ 2157 /* Operand 0 is a pointer-to-function; get the type of the function. */
2165 funtype = TREE_TYPE (addr); 2158 funtype = TREE_TYPE (addr);
2166 gcc_assert (POINTER_TYPE_P (funtype)); 2159 gcc_assert (POINTER_TYPE_P (funtype));
2172 if (targetm.calls.split_complex_arg) 2165 if (targetm.calls.split_complex_arg)
2173 { 2166 {
2174 call_expr_arg_iterator iter; 2167 call_expr_arg_iterator iter;
2175 tree arg; 2168 tree arg;
2176 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) 2169 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2177 { 2170 {
2178 tree type = TREE_TYPE (arg); 2171 tree type = TREE_TYPE (arg);
2179 if (type && TREE_CODE (type) == COMPLEX_TYPE 2172 if (type && TREE_CODE (type) == COMPLEX_TYPE
2180 && targetm.calls.split_complex_arg (type)) 2173 && targetm.calls.split_complex_arg (type))
2181 num_complex_actuals++; 2174 num_complex_actuals++;
2182 } 2175 }
2183 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype)); 2176 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2184 } 2177 }
2185 else 2178 else
2186 type_arg_types = TYPE_ARG_TYPES (funtype); 2179 type_arg_types = TYPE_ARG_TYPES (funtype);
2187 2180
2192 as if it were an extra parameter. Put the argument expression 2185 as if it were an extra parameter. Put the argument expression
2193 in structure_value_addr_value. */ 2186 in structure_value_addr_value. */
2194 if (structure_value_addr && struct_value == 0) 2187 if (structure_value_addr && struct_value == 0)
2195 { 2188 {
2196 /* If structure_value_addr is a REG other than 2189 /* If structure_value_addr is a REG other than
2197 virtual_outgoing_args_rtx, we can use always use it. If it 2190 virtual_outgoing_args_rtx, we can use always use it. If it
2198 is not a REG, we must always copy it into a register. 2191 is not a REG, we must always copy it into a register.
2199 If it is virtual_outgoing_args_rtx, we must copy it to another 2192 If it is virtual_outgoing_args_rtx, we must copy it to another
2200 register in some cases. */ 2193 register in some cases. */
2201 rtx temp = (!REG_P (structure_value_addr) 2194 rtx temp = (!REG_P (structure_value_addr)
2202 || (ACCUMULATE_OUTGOING_ARGS 2195 || (ACCUMULATE_OUTGOING_ARGS
2203 && stack_arg_under_construction 2196 && stack_arg_under_construction
2204 && structure_value_addr == virtual_outgoing_args_rtx) 2197 && structure_value_addr == virtual_outgoing_args_rtx)
2205 ? copy_addr_to_reg (convert_memory_address 2198 ? copy_addr_to_reg (convert_memory_address
2206 (Pmode, structure_value_addr)) 2199 (Pmode, structure_value_addr))
2207 : structure_value_addr); 2200 : structure_value_addr);
2208 2201
2209 structure_value_addr_value = 2202 structure_value_addr_value =
2210 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp); 2203 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2211 structure_value_addr_parm = 1; 2204 structure_value_addr_parm = 1;
2212 } 2205 }
2213 2206
2214 /* Count the arguments and set NUM_ACTUALS. */ 2207 /* Count the arguments and set NUM_ACTUALS. */
2215 num_actuals = 2208 num_actuals =
2219 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */ 2212 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2220 2213
2221 if (type_arg_types != 0) 2214 if (type_arg_types != 0)
2222 n_named_args 2215 n_named_args
2223 = (list_length (type_arg_types) 2216 = (list_length (type_arg_types)
2224 /* Count the struct value address, if it is passed as a parm. */ 2217 /* Count the struct value address, if it is passed as a parm. */
2225 + structure_value_addr_parm); 2218 + structure_value_addr_parm);
2226 else 2219 else
2227 /* If we know nothing, treat all args as named. */ 2220 /* If we know nothing, treat all args as named. */
2228 n_named_args = num_actuals; 2221 n_named_args = num_actuals;
2229 2222
2230 /* Start updating where the next arg would go. 2223 /* Start updating where the next arg would go.
2255 2248
2256 if (type_arg_types != 0 2249 if (type_arg_types != 0
2257 && targetm.calls.strict_argument_naming (&args_so_far)) 2250 && targetm.calls.strict_argument_naming (&args_so_far))
2258 ; 2251 ;
2259 else if (type_arg_types != 0 2252 else if (type_arg_types != 0
2260 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far)) 2253 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2261 /* Don't include the last named arg. */ 2254 /* Don't include the last named arg. */
2262 --n_named_args; 2255 --n_named_args;
2263 else 2256 else
2264 /* Treat all args as named. */ 2257 /* Treat all args as named. */
2265 n_named_args = num_actuals; 2258 n_named_args = num_actuals;
2269 memset (args, 0, num_actuals * sizeof (struct arg_data)); 2262 memset (args, 0, num_actuals * sizeof (struct arg_data));
2270 2263
2271 /* Build up entries in the ARGS array, compute the size of the 2264 /* Build up entries in the ARGS array, compute the size of the
2272 arguments into ARGS_SIZE, etc. */ 2265 arguments into ARGS_SIZE, etc. */
2273 initialize_argument_information (num_actuals, args, &args_size, 2266 initialize_argument_information (num_actuals, args, &args_size,
2274 n_named_args, exp, 2267 n_named_args, exp,
2275 structure_value_addr_value, fndecl, fntype, 2268 structure_value_addr_value, fndecl, fntype,
2276 &args_so_far, reg_parm_stack_space, 2269 &args_so_far, reg_parm_stack_space,
2277 &old_stack_level, &old_pending_adj, 2270 &old_stack_level, &old_pending_adj,
2278 &must_preallocate, &flags, 2271 &must_preallocate, &flags,
2279 &try_tail_call, CALL_FROM_THUNK_P (exp)); 2272 &try_tail_call, CALL_FROM_THUNK_P (exp));
2280 2273
2281 if (args_size.var) 2274 if (args_size.var)
2282 must_preallocate = 1; 2275 must_preallocate = 1;
2283 2276
2284 /* Now make final decision about preallocating stack space. */ 2277 /* Now make final decision about preallocating stack space. */
2285 must_preallocate = finalize_must_preallocate (must_preallocate, 2278 must_preallocate = finalize_must_preallocate (must_preallocate,
2286 num_actuals, args, 2279 num_actuals, args,
2287 &args_size); 2280 &args_size);
2288 2281
2289 /* If the structure value address will reference the stack pointer, we 2282 /* If the structure value address will reference the stack pointer, we
2290 must stabilize it. We don't need to do this if we know that we are 2283 must stabilize it. We don't need to do this if we know that we are
2291 not going to adjust the stack pointer in processing this call. */ 2284 not going to adjust the stack pointer in processing this call. */
2292 2285
2293 if (structure_value_addr 2286 if (structure_value_addr
2294 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr) 2287 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2295 || reg_mentioned_p (virtual_outgoing_args_rtx, 2288 || reg_mentioned_p (virtual_outgoing_args_rtx,
2296 structure_value_addr)) 2289 structure_value_addr))
2297 && (args_size.var 2290 && (args_size.var
2298 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant))) 2291 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2299 structure_value_addr = copy_to_reg (structure_value_addr); 2292 structure_value_addr = copy_to_reg (structure_value_addr);
2300 2293
2301 /* Tail calls can make things harder to debug, and we've traditionally 2294 /* Tail calls can make things harder to debug, and we've traditionally
2302 pushed these optimizations into -O2. Don't try if we're already 2295 pushed these optimizations into -O2. Don't try if we're already
2303 expanding a call, as that means we're an argument. Don't try if 2296 expanding a call, as that means we're an argument. Don't try if
2304 there's cleanups, as we know there's code to follow the call. */ 2297 there's cleanups, as we know there's code to follow the call. */
2305 2298
2306 // -O2オプションがないときも末尾最適化が行われるように(Code Segmentのみ)
2307 if (currently_expanding_call++ != 0 2299 if (currently_expanding_call++ != 0
2308 #ifndef noCbC
2309 || ((!fndecl || !CbC_IS_CODE_SEGMENT (TREE_TYPE (fndecl))) && !flag_optimize_sibling_calls)
2310 #else
2311 || !flag_optimize_sibling_calls 2300 || !flag_optimize_sibling_calls
2312 #endif
2313 || args_size.var 2301 || args_size.var
2314 || dbg_cnt (tail_call) == false) 2302 || dbg_cnt (tail_call) == false)
2315 try_tail_call = 0; 2303 try_tail_call = 0;
2316 2304
2317 /* Rest of purposes for tail call optimizations to fail. */ 2305 /* Rest of purposes for tail call optimizations to fail. */
2321 #else 2309 #else
2322 1 2310 1
2323 #endif 2311 #endif
2324 || !try_tail_call 2312 || !try_tail_call
2325 /* Doing sibling call optimization needs some work, since 2313 /* Doing sibling call optimization needs some work, since
2326 structure_value_addr can be allocated on the stack. 2314 structure_value_addr can be allocated on the stack.
2327 It does not seem worth the effort since few optimizable 2315 It does not seem worth the effort since few optimizable
2328 sibling calls will return a structure. */ 2316 sibling calls will return a structure. */
2329 || structure_value_addr != NULL_RTX 2317 || structure_value_addr != NULL_RTX
2330 #ifdef REG_PARM_STACK_SPACE 2318 #ifdef REG_PARM_STACK_SPACE
2331 /* If outgoing reg parm stack space changes, we can not do sibcall. */ 2319 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2332 || (OUTGOING_REG_PARM_STACK_SPACE (funtype) 2320 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2333 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))) 2321 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2334 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl)) 2322 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
2335 #endif 2323 #endif
2336 /* Check whether the target is able to optimize the call 2324 /* Check whether the target is able to optimize the call
2337 into a sibcall. */ 2325 into a sibcall. */
2338 || !targetm.function_ok_for_sibcall (fndecl, exp) 2326 || !targetm.function_ok_for_sibcall (fndecl, exp)
2339 /* Functions that do not return exactly once may not be sibcall 2327 /* Functions that do not return exactly once may not be sibcall
2340 optimized. */ 2328 optimized. */
2341 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN)) 2329 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2342 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))) 2330 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2343 /* If the called function is nested in the current one, it might access 2331 /* If the called function is nested in the current one, it might access
2344 some of the caller's arguments, but could clobber them beforehand if 2332 some of the caller's arguments, but could clobber them beforehand if
2345 the argument areas are shared. */ 2333 the argument areas are shared. */
2346 || (fndecl && decl_function_context (fndecl) == current_function_decl) 2334 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2347 /* If this function requires more stack slots than the current 2335 /* If this function requires more stack slots than the current
2348 function, we cannot change it into a sibling call. 2336 function, we cannot change it into a sibling call.
2349 crtl->args.pretend_args_size is not part of the 2337 crtl->args.pretend_args_size is not part of the
2350 stack allocated by our caller. */ 2338 stack allocated by our caller. */
2351 || args_size.constant > (crtl->args.size 2339 || args_size.constant > (crtl->args.size
2352 - crtl->args.pretend_args_size) 2340 - crtl->args.pretend_args_size)
2353 /* If the callee pops its own arguments, then it must pop exactly 2341 /* If the callee pops its own arguments, then it must pop exactly
2354 the same number of arguments as the current function. */ 2342 the same number of arguments as the current function. */
2355 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant) 2343 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2356 != targetm.calls.return_pops_args (current_function_decl, 2344 != targetm.calls.return_pops_args (current_function_decl,
2357 TREE_TYPE (current_function_decl), 2345 TREE_TYPE (current_function_decl),
2371 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res)); 2359 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2372 caller_mode = DECL_MODE (caller_res); 2360 caller_mode = DECL_MODE (caller_res);
2373 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype)); 2361 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2374 callee_mode = TYPE_MODE (TREE_TYPE (funtype)); 2362 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2375 caller_promoted_mode 2363 caller_promoted_mode
2376 = promote_function_mode (TREE_TYPE (caller_res), caller_mode, 2364 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2377 &caller_unsignedp, 2365 &caller_unsignedp,
2378 TREE_TYPE (current_function_decl), 1); 2366 TREE_TYPE (current_function_decl), 1);
2379 callee_promoted_mode 2367 callee_promoted_mode
2380 = promote_function_mode (TREE_TYPE (funtype), callee_mode, 2368 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2381 &callee_unsignedp, 2369 &callee_unsignedp,
2382 funtype, 1); 2370 funtype, 1);
2383 if (caller_mode != VOIDmode 2371 if (caller_mode != VOIDmode
2384 && (caller_promoted_mode != callee_promoted_mode 2372 && (caller_promoted_mode != callee_promoted_mode
2385 || ((caller_mode != caller_promoted_mode 2373 || ((caller_mode != caller_promoted_mode
2386 || callee_mode != callee_promoted_mode) 2374 || callee_mode != callee_promoted_mode)
2387 && (caller_unsignedp != callee_unsignedp 2375 && (caller_unsignedp != callee_unsignedp
2388 || GET_MODE_BITSIZE (caller_mode) 2376 || GET_MODE_BITSIZE (caller_mode)
2389 < GET_MODE_BITSIZE (callee_mode))))) 2377 < GET_MODE_BITSIZE (callee_mode)))))
2390 try_tail_call = 0; 2378 try_tail_call = 0;
2391 } 2379 }
2392 2380
2393 /* Ensure current function's preferred stack boundary is at least 2381 /* Ensure current function's preferred stack boundary is at least
2394 what we need. Stack alignment may also increase preferred stack 2382 what we need. Stack alignment may also increase preferred stack
2395 boundary. */ 2383 boundary. */
2398 else 2386 else
2399 preferred_stack_boundary = crtl->preferred_stack_boundary; 2387 preferred_stack_boundary = crtl->preferred_stack_boundary;
2400 2388
2401 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT; 2389 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2402 2390
2403 #ifndef noCbC
2404 if ( fntype
2405 && CbC_IS_CbC_GOTO (exp) // it's better? than CALL_EXPR_TAILCALL()
2406 && CbC_IS_CODE_SEGMENT (TREE_TYPE (current_function_decl))
2407 )
2408 {
2409
2410 args_size.constant = CbC_PRETENDED_STACK_SIZE;
2411 // try_tail_callを矯正的に立たせて末尾最適化を必ずうように変更
2412 // -> expand_cbc_gotは不要に。
2413 /* return expand_cbc_goto(exp, target, fndecl, funtype, fntype,
2414 * addr, ignore, flags, num_actuals, args, &args_size,
2415 * args_so_far,
2416 * old_stack_level, reg_parm_stack_space, old_pending_adj,
2417 * preferred_stack_boundary, preferred_unit_stack_boundary,
2418 * structure_value_addr, old_inhibit_defer_pop); */
2419 }
2420 else if ( CbC_IS_CbC_GOTO (exp) )
2421 {
2422 // TODO: 関数からコードセグメントへの遷移
2423 /*
2424 if (fndecl)
2425 {
2426 char *name_callee = IDENTIFIER_POINTER(DECL_NAME(fndecl));
2427 warning(0, "no warning: code segment `%s' has been called from a function.", name_callee);
2428 }
2429 else
2430 {
2431 warning(0, "no warning: unnamed code segment has been called from a function.");
2432 }
2433 */
2434 args_size.constant = CbC_PRETENDED_STACK_SIZE;
2435 }
2436 else if ( fndecl && CbC_IS_CODE_SEGMENT (TREE_TYPE (fndecl)) )
2437 {
2438 // 警告コードセグメントを関数呼び出し
2439 //char *name= IDENTIFIER_POINTER(DECL_NAME(fndecl));
2440 //warning (0, "code segment `%s' has been \"called\" instead \"goto\".", name);
2441 }
2442 else if (CbC_IS_CODE_SEGMENT(TREE_TYPE (current_function_decl)) )
2443 {
2444 // code segment内部からの関数呼び出し。なんも問題ない。
2445 //warning (0, "no warning: normal call from a code segment.");
2446 }
2447 #endif
2448
2449 // when tail call optimization flag was down, warn about them.
2450 // and flag it to force a tail call optimize.
2451 #ifndef noCbC
2452 if (fndecl && CbC_IS_CODE_SEGMENT (TREE_TYPE (fndecl))
2453 && CbC_IS_CODE_SEGMENT (TREE_TYPE (current_function_decl))
2454 && try_tail_call == 0)
2455 {
2456 location_t loc = EXPR_LOCATION (exp);
2457 char *name_callee = IDENTIFIER_POINTER(DECL_NAME(fndecl));
2458 warning_at (loc, 0, "transition to code segment \"%s\" with CbC goto, but tail call optimization was cut.",
2459 name_callee);
2460 try_tail_call = 1;
2461 }
2462 #endif
2463
2464 /* We want to make two insn chains; one for a sibling call, the other 2391 /* We want to make two insn chains; one for a sibling call, the other
2465 for a normal call. We will select one of the two chains after 2392 for a normal call. We will select one of the two chains after
2466 initial RTL generation is complete. */ 2393 initial RTL generation is complete. */
2467 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++) 2394 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2468 { 2395 {
2469 int sibcall_failure = 0; 2396 int sibcall_failure = 0;
2470 /* We want to emit any pending stack adjustments before the tail 2397 /* We want to emit any pending stack adjustments before the tail
2471 recursion "call". That way we know any adjustment after the tail 2398 recursion "call". That way we know any adjustment after the tail
2472 recursion call can be ignored if we indeed use the tail 2399 recursion call can be ignored if we indeed use the tail
2473 call expansion. */ 2400 call expansion. */
2474 int save_pending_stack_adjust = 0; 2401 int save_pending_stack_adjust = 0;
2475 int save_stack_pointer_delta = 0; 2402 int save_stack_pointer_delta = 0;
2476 rtx insns; 2403 rtx insns;
2477 rtx before_call, next_arg_reg, after_args; 2404 rtx before_call, next_arg_reg, after_args;
2478 2405
2479 if (pass == 0) 2406 if (pass == 0)
2480 { 2407 {
2481 /* State variables we need to save and restore between 2408 /* State variables we need to save and restore between
2482 iterations. */ 2409 iterations. */
2483 save_pending_stack_adjust = pending_stack_adjust; 2410 save_pending_stack_adjust = pending_stack_adjust;
2484 save_stack_pointer_delta = stack_pointer_delta; 2411 save_stack_pointer_delta = stack_pointer_delta;
2485 } 2412 }
2486 if (pass) 2413 if (pass)
2487 flags &= ~ECF_SIBCALL; 2414 flags &= ~ECF_SIBCALL;
2488 else 2415 else
2489 flags |= ECF_SIBCALL; 2416 flags |= ECF_SIBCALL;
2490 2417
2491 /* Other state variables that we must reinitialize each time 2418 /* Other state variables that we must reinitialize each time
2492 through the loop (that are not initialized by the loop itself). */ 2419 through the loop (that are not initialized by the loop itself). */
2493 argblock = 0; 2420 argblock = 0;
2494 call_fusage = 0; 2421 call_fusage = 0;
2495 2422
2496 /* Start a new sequence for the normal call case. 2423 /* Start a new sequence for the normal call case.
2497 2424
2498 From this point on, if the sibling call fails, we want to set 2425 From this point on, if the sibling call fails, we want to set
2499 sibcall_failure instead of continuing the loop. */ 2426 sibcall_failure instead of continuing the loop. */
2500 start_sequence (); 2427 start_sequence ();
2501 2428
2502 /* Don't let pending stack adjusts add up to too much. 2429 /* Don't let pending stack adjusts add up to too much.
2503 Also, do all pending adjustments now if there is any chance 2430 Also, do all pending adjustments now if there is any chance
2504 this might be a call to alloca or if we are expanding a sibling 2431 this might be a call to alloca or if we are expanding a sibling
2505 call sequence. 2432 call sequence.
2506 Also do the adjustments before a throwing call, otherwise 2433 Also do the adjustments before a throwing call, otherwise
2507 exception handling can fail; PR 19225. */ 2434 exception handling can fail; PR 19225. */
2508 if (pending_stack_adjust >= 32 2435 if (pending_stack_adjust >= 32
2509 || (pending_stack_adjust > 0 2436 || (pending_stack_adjust > 0
2510 && (flags & ECF_MAY_BE_ALLOCA)) 2437 && (flags & ECF_MAY_BE_ALLOCA))
2511 || (pending_stack_adjust > 0 2438 || (pending_stack_adjust > 0
2512 && flag_exceptions && !(flags & ECF_NOTHROW)) 2439 && flag_exceptions && !(flags & ECF_NOTHROW))
2513 || pass == 0) 2440 || pass == 0)
2514 do_pending_stack_adjust (); 2441 do_pending_stack_adjust ();
2515 2442
2516 /* Precompute any arguments as needed. */ 2443 /* Precompute any arguments as needed. */
2517 if (pass) 2444 if (pass)
2518 precompute_arguments (num_actuals, args); 2445 precompute_arguments (num_actuals, args);
2519 2446
2520 /* Now we are about to start emitting insns that can be deleted 2447 /* Now we are about to start emitting insns that can be deleted
2521 if a libcall is deleted. */ 2448 if a libcall is deleted. */
2522 if (pass && (flags & ECF_MALLOC)) 2449 if (pass && (flags & ECF_MALLOC))
2523 start_sequence (); 2450 start_sequence ();
2524 2451
2525 if (pass == 0 && crtl->stack_protect_guard) 2452 if (pass == 0 && crtl->stack_protect_guard)
2526 stack_protect_epilogue (); 2453 stack_protect_epilogue ();
2527 2454
2528 adjusted_args_size = args_size; 2455 adjusted_args_size = args_size;
2529 /* Compute the actual size of the argument block required. The variable 2456 /* Compute the actual size of the argument block required. The variable
2530 and constant sizes must be combined, the size may have to be rounded, 2457 and constant sizes must be combined, the size may have to be rounded,
2531 and there may be a minimum required size. When generating a sibcall 2458 and there may be a minimum required size. When generating a sibcall
2532 pattern, do not round up, since we'll be re-using whatever space our 2459 pattern, do not round up, since we'll be re-using whatever space our
2533 caller provided. */ 2460 caller provided. */
2534 #ifndef noCbC 2461 unadjusted_args_size
2535 if ( fntype && CbC_IS_CODE_SEGMENT(fntype) ) 2462 = compute_argument_block_size (reg_parm_stack_space,
2536 { 2463 &adjusted_args_size,
2537 unadjusted_args_size = args_size.constant; 2464 fndecl, fntype,
2538 adjusted_args_size.constant = CbC_PRETENDED_STACK_SIZE; 2465 (pass == 0 ? 0
2539 compute_argument_block_size (reg_parm_stack_space, 2466 : preferred_stack_boundary));
2540 &adjusted_args_size, 2467
2541 fndecl, fntype, 2468 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2542 (pass == 0 ? 0 2469
2543 : preferred_stack_boundary)); 2470 /* The argument block when performing a sibling call is the
2544 } 2471 incoming argument block. */
2545 else 2472 if (pass == 0)
2473 {
2474 argblock = crtl->args.internal_arg_pointer;
2475 argblock
2476 #ifdef STACK_GROWS_DOWNWARD
2477 = plus_constant (argblock, crtl->args.pretend_args_size);
2478 #else
2479 = plus_constant (argblock, -crtl->args.pretend_args_size);
2546 #endif 2480 #endif
2547 { 2481 stored_args_map = sbitmap_alloc (args_size.constant);
2548 unadjusted_args_size 2482 sbitmap_zero (stored_args_map);
2549 = compute_argument_block_size (reg_parm_stack_space, 2483 }
2550 &adjusted_args_size,
2551 fndecl, fntype,
2552 (pass == 0 ? 0
2553 : preferred_stack_boundary));
2554 }
2555
2556 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2557
2558 /* The argument block when performing a sibling call is the
2559 incoming argument block. */
2560 if (pass == 0)
2561 {
2562 argblock = crtl->args.internal_arg_pointer;
2563 argblock
2564 #ifdef STACK_GROWS_DOWNWARD
2565 = plus_constant (argblock, crtl->args.pretend_args_size);
2566 #else
2567 = plus_constant (argblock, -crtl->args.pretend_args_size);
2568 #endif
2569 stored_args_map = sbitmap_alloc (args_size.constant);
2570 sbitmap_zero (stored_args_map);
2571 }
2572 2484
2573 /* If we have no actual push instructions, or shouldn't use them, 2485 /* If we have no actual push instructions, or shouldn't use them,
2574 make space for all args right now. */ 2486 make space for all args right now. */
2575 else if (adjusted_args_size.var != 0) 2487 else if (adjusted_args_size.var != 0)
2576 { 2488 {
2577 if (old_stack_level == 0) 2489 if (old_stack_level == 0)
2578 { 2490 {
2579 emit_stack_save (SAVE_BLOCK, &old_stack_level); 2491 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2589 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0); 2501 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2590 if (flag_stack_usage) 2502 if (flag_stack_usage)
2591 current_function_has_unbounded_dynamic_stack_size = 1; 2503 current_function_has_unbounded_dynamic_stack_size = 1;
2592 } 2504 }
2593 else 2505 else
2594 { 2506 {
2595 /* Note that we must go through the motions of allocating an argument 2507 /* Note that we must go through the motions of allocating an argument
2596 block even if the size is zero because we may be storing args 2508 block even if the size is zero because we may be storing args
2597 in the area reserved for register arguments, which may be part of 2509 in the area reserved for register arguments, which may be part of
2598 the stack frame. */ 2510 the stack frame. */
2599 2511
2600 int needed = adjusted_args_size.constant; 2512 int needed = adjusted_args_size.constant;
2601 2513
2602 /* Store the maximum argument space used. It will be pushed by 2514 /* Store the maximum argument space used. It will be pushed by
2603 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow 2515 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2604 checking). */ 2516 checking). */
2605 2517
2606 if (needed > crtl->outgoing_args_size) 2518 if (needed > crtl->outgoing_args_size)
2607 crtl->outgoing_args_size = needed; 2519 crtl->outgoing_args_size = needed;
2608 2520
2609 if (must_preallocate) 2521 if (must_preallocate)
2610 { 2522 {
2611 if (ACCUMULATE_OUTGOING_ARGS) 2523 if (ACCUMULATE_OUTGOING_ARGS)
2612 { 2524 {
2613 /* Since the stack pointer will never be pushed, it is 2525 /* Since the stack pointer will never be pushed, it is
2614 possible for the evaluation of a parm to clobber 2526 possible for the evaluation of a parm to clobber
2615 something we have already written to the stack. 2527 something we have already written to the stack.
2616 Since most function calls on RISC machines do not use 2528 Since most function calls on RISC machines do not use
2617 the stack, this is uncommon, but must work correctly. 2529 the stack, this is uncommon, but must work correctly.
2618 2530
2619 Therefore, we save any area of the stack that was already 2531 Therefore, we save any area of the stack that was already
2620 written and that we are using. Here we set up to do this 2532 written and that we are using. Here we set up to do this
2621 by making a new stack usage map from the old one. The 2533 by making a new stack usage map from the old one. The
2622 actual save will be done by store_one_arg. 2534 actual save will be done by store_one_arg.
2623 2535
2624 Another approach might be to try to reorder the argument 2536 Another approach might be to try to reorder the argument
2625 evaluations to avoid this conflicting stack usage. */ 2537 evaluations to avoid this conflicting stack usage. */
2626 2538
2627 /* Since we will be writing into the entire argument area, 2539 /* Since we will be writing into the entire argument area,
2628 the map must be allocated for its entire size, not just 2540 the map must be allocated for its entire size, not just
2629 the part that is the responsibility of the caller. */ 2541 the part that is the responsibility of the caller. */
2630 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) 2542 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2631 needed += reg_parm_stack_space; 2543 needed += reg_parm_stack_space;
2632 2544
2633 #ifdef ARGS_GROW_DOWNWARD 2545 #ifdef ARGS_GROW_DOWNWARD
2634 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, 2546 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2635 needed + 1); 2547 needed + 1);
2636 #else 2548 #else
2637 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, 2549 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2638 needed); 2550 needed);
2639 #endif 2551 #endif
2640 if (stack_usage_map_buf) 2552 if (stack_usage_map_buf)
2641 free (stack_usage_map_buf); 2553 free (stack_usage_map_buf);
2642 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); 2554 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2643 stack_usage_map = stack_usage_map_buf; 2555 stack_usage_map = stack_usage_map_buf;
2644 2556
2645 if (initial_highest_arg_in_use) 2557 if (initial_highest_arg_in_use)
2646 memcpy (stack_usage_map, initial_stack_usage_map, 2558 memcpy (stack_usage_map, initial_stack_usage_map,
2647 initial_highest_arg_in_use); 2559 initial_highest_arg_in_use);
2648 2560
2649 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) 2561 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2650 memset (&stack_usage_map[initial_highest_arg_in_use], 0, 2562 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2651 (highest_outgoing_arg_in_use 2563 (highest_outgoing_arg_in_use
2652 - initial_highest_arg_in_use)); 2564 - initial_highest_arg_in_use));
2653 needed = 0; 2565 needed = 0;
2654 2566
2655 /* The address of the outgoing argument list must not be 2567 /* The address of the outgoing argument list must not be
2656 copied to a register here, because argblock would be left 2568 copied to a register here, because argblock would be left
2657 pointing to the wrong place after the call to 2569 pointing to the wrong place after the call to
2658 allocate_dynamic_stack_space below. */ 2570 allocate_dynamic_stack_space below. */
2659 2571
2660 argblock = virtual_outgoing_args_rtx; 2572 argblock = virtual_outgoing_args_rtx;
2661 } 2573 }
2662 else 2574 else
2663 { 2575 {
2664 if (inhibit_defer_pop == 0) 2576 if (inhibit_defer_pop == 0)
2665 { 2577 {
2666 /* Try to reuse some or all of the pending_stack_adjust 2578 /* Try to reuse some or all of the pending_stack_adjust
2667 to get this space. */ 2579 to get this space. */
2668 needed 2580 needed
2669 = (combine_pending_stack_adjustment_and_call 2581 = (combine_pending_stack_adjustment_and_call
2670 (unadjusted_args_size, 2582 (unadjusted_args_size,
2671 &adjusted_args_size, 2583 &adjusted_args_size,
2672 preferred_unit_stack_boundary)); 2584 preferred_unit_stack_boundary));
2673 2585
2674 /* combine_pending_stack_adjustment_and_call computes 2586 /* combine_pending_stack_adjustment_and_call computes
2675 an adjustment before the arguments are allocated. 2587 an adjustment before the arguments are allocated.
2676 Account for them and see whether or not the stack 2588 Account for them and see whether or not the stack
2677 needs to go up or down. */ 2589 needs to go up or down. */
2678 needed = unadjusted_args_size - needed; 2590 needed = unadjusted_args_size - needed;
2679 2591
2680 if (needed < 0) 2592 if (needed < 0)
2681 { 2593 {
2682 /* We're releasing stack space. */ 2594 /* We're releasing stack space. */
2683 /* ??? We can avoid any adjustment at all if we're 2595 /* ??? We can avoid any adjustment at all if we're
2684 already aligned. FIXME. */ 2596 already aligned. FIXME. */
2685 pending_stack_adjust = -needed; 2597 pending_stack_adjust = -needed;
2686 do_pending_stack_adjust (); 2598 do_pending_stack_adjust ();
2687 needed = 0; 2599 needed = 0;
2688 } 2600 }
2689 else 2601 else
2690 /* We need to allocate space. We'll do that in 2602 /* We need to allocate space. We'll do that in
2691 push_block below. */ 2603 push_block below. */
2692 pending_stack_adjust = 0; 2604 pending_stack_adjust = 0;
2693 } 2605 }
2694 2606
2695 /* Special case this because overhead of `push_block' in 2607 /* Special case this because overhead of `push_block' in
2696 this case is non-trivial. */ 2608 this case is non-trivial. */
2697 if (needed == 0) 2609 if (needed == 0)
2698 argblock = virtual_outgoing_args_rtx; 2610 argblock = virtual_outgoing_args_rtx;
2699 else 2611 else
2700 { 2612 {
2701 argblock = push_block (GEN_INT (needed), 0, 0); 2613 argblock = push_block (GEN_INT (needed), 0, 0);
2702 #ifdef ARGS_GROW_DOWNWARD 2614 #ifdef ARGS_GROW_DOWNWARD
2703 argblock = plus_constant (argblock, needed); 2615 argblock = plus_constant (argblock, needed);
2704 #endif 2616 #endif
2705 } 2617 }
2706 2618
2707 /* We only really need to call `copy_to_reg' in the case 2619 /* We only really need to call `copy_to_reg' in the case
2708 where push insns are going to be used to pass ARGBLOCK 2620 where push insns are going to be used to pass ARGBLOCK
2709 to a function call in ARGS. In that case, the stack 2621 to a function call in ARGS. In that case, the stack
2710 pointer changes value from the allocation point to the 2622 pointer changes value from the allocation point to the
2711 call point, and hence the value of 2623 call point, and hence the value of
2712 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might 2624 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2713 as well always do it. */ 2625 as well always do it. */
2714 argblock = copy_to_reg (argblock); 2626 argblock = copy_to_reg (argblock);
2715 } 2627 }
2716 } 2628 }
2717 } 2629 }
2718 2630
2719 if (ACCUMULATE_OUTGOING_ARGS) 2631 if (ACCUMULATE_OUTGOING_ARGS)
2720 { 2632 {
2721 /* The save/restore code in store_one_arg handles all 2633 /* The save/restore code in store_one_arg handles all
2722 cases except one: a constructor call (including a C 2634 cases except one: a constructor call (including a C
2767 } 2679 }
2768 2680
2769 compute_argument_addresses (args, argblock, num_actuals); 2681 compute_argument_addresses (args, argblock, num_actuals);
2770 2682
2771 /* If we push args individually in reverse order, perform stack alignment 2683 /* If we push args individually in reverse order, perform stack alignment
2772 before the first push (the last arg). */ 2684 before the first push (the last arg). */
2773 if (PUSH_ARGS_REVERSED && argblock == 0 2685 if (PUSH_ARGS_REVERSED && argblock == 0
2774 && adjusted_args_size.constant != unadjusted_args_size) 2686 && adjusted_args_size.constant != unadjusted_args_size)
2775 { 2687 {
2776 /* When the stack adjustment is pending, we get better code 2688 /* When the stack adjustment is pending, we get better code
2777 by combining the adjustments. */ 2689 by combining the adjustments. */
2778 if (pending_stack_adjust 2690 if (pending_stack_adjust
2779 && ! inhibit_defer_pop) 2691 && ! inhibit_defer_pop)
2780 { 2692 {
2781 pending_stack_adjust 2693 pending_stack_adjust
2782 = (combine_pending_stack_adjustment_and_call 2694 = (combine_pending_stack_adjustment_and_call
2783 (unadjusted_args_size, 2695 (unadjusted_args_size,
2784 &adjusted_args_size, 2696 &adjusted_args_size,
2785 preferred_unit_stack_boundary)); 2697 preferred_unit_stack_boundary));
2786 do_pending_stack_adjust (); 2698 do_pending_stack_adjust ();
2787 } 2699 }
2788 else if (argblock == 0) 2700 else if (argblock == 0)
2789 anti_adjust_stack (GEN_INT (adjusted_args_size.constant 2701 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2790 - unadjusted_args_size)); 2702 - unadjusted_args_size));
2791 } 2703 }
2792 /* Now that the stack is properly aligned, pops can't safely 2704 /* Now that the stack is properly aligned, pops can't safely
2793 be deferred during the evaluation of the arguments. */ 2705 be deferred during the evaluation of the arguments. */
2794 NO_DEFER_POP; 2706 NO_DEFER_POP;
2795 2707
2796 /* Record the maximum pushed stack space size. We need to delay 2708 /* Record the maximum pushed stack space size. We need to delay
2797 doing it this far to take into account the optimization done 2709 doing it this far to take into account the optimization done
2798 by combine_pending_stack_adjustment_and_call. */ 2710 by combine_pending_stack_adjustment_and_call. */
2809 funexp = rtx_for_function_call (fndecl, addr); 2721 funexp = rtx_for_function_call (fndecl, addr);
2810 2722
2811 /* Figure out the register where the value, if any, will come back. */ 2723 /* Figure out the register where the value, if any, will come back. */
2812 valreg = 0; 2724 valreg = 0;
2813 if (TYPE_MODE (rettype) != VOIDmode 2725 if (TYPE_MODE (rettype) != VOIDmode
2814 && ! structure_value_addr) 2726 && ! structure_value_addr)
2815 { 2727 {
2816 if (pcc_struct_value) 2728 if (pcc_struct_value)
2817 valreg = hard_function_value (build_pointer_type (rettype), 2729 valreg = hard_function_value (build_pointer_type (rettype),
2818 fndecl, NULL, (pass == 0)); 2730 fndecl, NULL, (pass == 0));
2819 else 2731 else
2820 valreg = hard_function_value (rettype, fndecl, fntype, 2732 valreg = hard_function_value (rettype, fndecl, fntype,
2821 (pass == 0)); 2733 (pass == 0));
2822 2734
2823 /* If VALREG is a PARALLEL whose first member has a zero 2735 /* If VALREG is a PARALLEL whose first member has a zero
2824 offset, use that. This is for targets such as m68k that 2736 offset, use that. This is for targets such as m68k that
2825 return the same value in multiple places. */ 2737 return the same value in multiple places. */
2826 if (GET_CODE (valreg) == PARALLEL) 2738 if (GET_CODE (valreg) == PARALLEL)
2827 { 2739 {
2828 rtx elem = XVECEXP (valreg, 0, 0); 2740 rtx elem = XVECEXP (valreg, 0, 0);
2829 rtx where = XEXP (elem, 0); 2741 rtx where = XEXP (elem, 0);
2830 rtx offset = XEXP (elem, 1); 2742 rtx offset = XEXP (elem, 1);
2831 if (offset == const0_rtx 2743 if (offset == const0_rtx
2832 && GET_MODE (where) == GET_MODE (valreg)) 2744 && GET_MODE (where) == GET_MODE (valreg))
2833 valreg = where; 2745 valreg = where;
2834 } 2746 }
2835 } 2747 }
2836 2748
2837 /* Precompute all register parameters. It isn't safe to compute anything 2749 /* Precompute all register parameters. It isn't safe to compute anything
2838 once we have started filling any specific hard regs. */ 2750 once we have started filling any specific hard regs. */
2839 precompute_register_parameters (num_actuals, args, &reg_parm_seen); 2751 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2840 2752
2841 if (CALL_EXPR_STATIC_CHAIN (exp)) 2753 if (CALL_EXPR_STATIC_CHAIN (exp))
2842 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp)); 2754 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2843 else 2755 else
2844 static_chain_value = 0; 2756 static_chain_value = 0;
2845 2757
2846 #ifdef REG_PARM_STACK_SPACE 2758 #ifdef REG_PARM_STACK_SPACE
2847 /* Save the fixed argument area if it's part of the caller's frame and 2759 /* Save the fixed argument area if it's part of the caller's frame and
2848 is clobbered by argument setup for this call. */ 2760 is clobbered by argument setup for this call. */
2849 if (ACCUMULATE_OUTGOING_ARGS && pass) 2761 if (ACCUMULATE_OUTGOING_ARGS && pass)
2850 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, 2762 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2851 &low_to_save, &high_to_save); 2763 &low_to_save, &high_to_save);
2852 #endif 2764 #endif
2853 2765
2854 /* Now store (and compute if necessary) all non-register parms. 2766 /* Now store (and compute if necessary) all non-register parms.
2855 These come before register parms, since they can require block-moves, 2767 These come before register parms, since they can require block-moves,
2856 which could clobber the registers used for register parms. 2768 which could clobber the registers used for register parms.
2857 Parms which have partial registers are not stored here, 2769 Parms which have partial registers are not stored here,
2858 but we do preallocate space here if they want that. */ 2770 but we do preallocate space here if they want that. */
2859 2771
2860 for (i = 0; i < num_actuals; i++) 2772 for (i = 0; i < num_actuals; i++)
2861 { 2773 {
2862 if (args[i].reg == 0 || args[i].pass_on_stack) 2774 if (args[i].reg == 0 || args[i].pass_on_stack)
2863 { 2775 {
2864 rtx before_arg = get_last_insn (); 2776 rtx before_arg = get_last_insn ();
2865 2777
2866 if (store_one_arg (&args[i], argblock, flags, 2778 if (store_one_arg (&args[i], argblock, flags,
2867 adjusted_args_size.var != 0, 2779 adjusted_args_size.var != 0,
2868 reg_parm_stack_space) 2780 reg_parm_stack_space)
2869 || (pass == 0 2781 || (pass == 0
2870 && check_sibcall_argument_overlap (before_arg, 2782 && check_sibcall_argument_overlap (before_arg,
2871 &args[i], 1))) 2783 &args[i], 1)))
2872 sibcall_failure = 1; 2784 sibcall_failure = 1;
2873 } 2785 }
2874 2786
2875 if (((flags & ECF_CONST) 2787 if (((flags & ECF_CONST)
2876 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS)) 2788 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
2877 && args[i].stack) 2789 && args[i].stack)
2878 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, 2790 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2879 gen_rtx_USE (VOIDmode, 2791 gen_rtx_USE (VOIDmode,
2880 args[i].stack), 2792 args[i].stack),
2881 call_fusage); 2793 call_fusage);
2882 } 2794 }
2883 2795
2884 /* If we have a parm that is passed in registers but not in memory 2796 /* If we have a parm that is passed in registers but not in memory
2885 and whose alignment does not permit a direct copy into registers, 2797 and whose alignment does not permit a direct copy into registers,
2886 make a group of pseudos that correspond to each register that we 2798 make a group of pseudos that correspond to each register that we
2887 will later fill. */ 2799 will later fill. */
2888 if (STRICT_ALIGNMENT) 2800 if (STRICT_ALIGNMENT)
2889 store_unaligned_arguments_into_pseudos (args, num_actuals); 2801 store_unaligned_arguments_into_pseudos (args, num_actuals);
2890 2802
2891 /* Now store any partially-in-registers parm. 2803 /* Now store any partially-in-registers parm.
2892 This is the last place a block-move can happen. */ 2804 This is the last place a block-move can happen. */
2893 if (reg_parm_seen) 2805 if (reg_parm_seen)
2894 for (i = 0; i < num_actuals; i++) 2806 for (i = 0; i < num_actuals; i++)
2895 if (args[i].partial != 0 && ! args[i].pass_on_stack) 2807 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2896 { 2808 {
2897 rtx before_arg = get_last_insn (); 2809 rtx before_arg = get_last_insn ();
2898 2810
2899 if (store_one_arg (&args[i], argblock, flags, 2811 if (store_one_arg (&args[i], argblock, flags,
2900 adjusted_args_size.var != 0, 2812 adjusted_args_size.var != 0,
2901 reg_parm_stack_space) 2813 reg_parm_stack_space)
2902 || (pass == 0 2814 || (pass == 0
2903 && check_sibcall_argument_overlap (before_arg, 2815 && check_sibcall_argument_overlap (before_arg,
2904 &args[i], 1))) 2816 &args[i], 1)))
2905 sibcall_failure = 1; 2817 sibcall_failure = 1;
2906 } 2818 }
2907 2819
2908 /* If we pushed args in forward order, perform stack alignment 2820 /* If we pushed args in forward order, perform stack alignment
2909 after pushing the last arg. */ 2821 after pushing the last arg. */
2910 if (!PUSH_ARGS_REVERSED && argblock == 0) 2822 if (!PUSH_ARGS_REVERSED && argblock == 0)
2911 anti_adjust_stack (GEN_INT (adjusted_args_size.constant 2823 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2912 - unadjusted_args_size)); 2824 - unadjusted_args_size));
2913 2825
2914 /* If register arguments require space on the stack and stack space 2826 /* If register arguments require space on the stack and stack space
2915 was not preallocated, allocate stack space here for arguments 2827 was not preallocated, allocate stack space here for arguments
2916 passed in registers. */ 2828 passed in registers. */
2917 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))) 2829 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2918 && !ACCUMULATE_OUTGOING_ARGS 2830 && !ACCUMULATE_OUTGOING_ARGS
2919 && must_preallocate == 0 && reg_parm_stack_space > 0) 2831 && must_preallocate == 0 && reg_parm_stack_space > 0)
2920 anti_adjust_stack (GEN_INT (reg_parm_stack_space)); 2832 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2921 2833
2922 /* Pass the function the address in which to return a 2834 /* Pass the function the address in which to return a
2923 structure value. */ 2835 structure value. */
2924 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm) 2836 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2925 { 2837 {
2926 structure_value_addr 2838 structure_value_addr
2927 = convert_memory_address (Pmode, structure_value_addr); 2839 = convert_memory_address (Pmode, structure_value_addr);
2928 emit_move_insn (struct_value, 2840 emit_move_insn (struct_value,
2929 force_reg (Pmode, 2841 force_reg (Pmode,
2930 force_operand (structure_value_addr, 2842 force_operand (structure_value_addr,
2931 NULL_RTX))); 2843 NULL_RTX)));
2932 2844
2933 if (REG_P (struct_value)) 2845 if (REG_P (struct_value))
2934 use_reg (&call_fusage, struct_value); 2846 use_reg (&call_fusage, struct_value);
2935 } 2847 }
2936 2848
2937 after_args = get_last_insn (); 2849 after_args = get_last_insn ();
2938 funexp = prepare_call_address (fndecl, funexp, static_chain_value, 2850 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
2939 &call_fusage, reg_parm_seen, pass == 0); 2851 &call_fusage, reg_parm_seen, pass == 0);
2940 2852
2941 load_register_parameters (args, num_actuals, &call_fusage, flags, 2853 load_register_parameters (args, num_actuals, &call_fusage, flags,
2942 pass == 0, &sibcall_failure); 2854 pass == 0, &sibcall_failure);
2943 2855
2944 /* Save a pointer to the last insn before the call, so that we can 2856 /* Save a pointer to the last insn before the call, so that we can
2945 later safely search backwards to find the CALL_INSN. */ 2857 later safely search backwards to find the CALL_INSN. */
2946 before_call = get_last_insn (); 2858 before_call = get_last_insn ();
2947 2859
2948 /* Set up next argument register. For sibling calls on machines 2860 /* Set up next argument register. For sibling calls on machines
2949 with register windows this should be the incoming register. */ 2861 with register windows this should be the incoming register. */
2950 if (pass == 0) 2862 if (pass == 0)
2956 next_arg_reg = targetm.calls.function_arg (&args_so_far, 2868 next_arg_reg = targetm.calls.function_arg (&args_so_far,
2957 VOIDmode, void_type_node, 2869 VOIDmode, void_type_node,
2958 true); 2870 true);
2959 2871
2960 /* All arguments and registers used for the call must be set up by 2872 /* All arguments and registers used for the call must be set up by
2961 now! */ 2873 now! */
2962 2874
2963 /* Stack must be properly aligned now. */ 2875 /* Stack must be properly aligned now. */
2964 gcc_assert (!pass 2876 gcc_assert (!pass
2965 || !(stack_pointer_delta % preferred_unit_stack_boundary)); 2877 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2966 2878
2967 /* Generate the actual call instruction. */ 2879 /* Generate the actual call instruction. */
2968 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, 2880 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2969 adjusted_args_size.constant, struct_value_size, 2881 adjusted_args_size.constant, struct_value_size,
2970 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, 2882 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2971 flags, & args_so_far); 2883 flags, & args_so_far);
2972 2884
2973 /* If the call setup or the call itself overlaps with anything 2885 /* If the call setup or the call itself overlaps with anything
2974 of the argument setup we probably clobbered our call address. 2886 of the argument setup we probably clobbered our call address.
2975 In that case we can't do sibcalls. */ 2887 In that case we can't do sibcalls. */
2976 if (pass == 0 2888 if (pass == 0
2977 && check_sibcall_argument_overlap (after_args, 0, 0)) 2889 && check_sibcall_argument_overlap (after_args, 0, 0))
2978 sibcall_failure = 1; 2890 sibcall_failure = 1;
2979 2891
2980 /* If a non-BLKmode value is returned at the most significant end 2892 /* If a non-BLKmode value is returned at the most significant end
2981 of a register, shift the register right by the appropriate amount 2893 of a register, shift the register right by the appropriate amount
2982 and update VALREG accordingly. BLKmode values are handled by the 2894 and update VALREG accordingly. BLKmode values are handled by the
2983 group load/store machinery below. */ 2895 group load/store machinery below. */
2984 if (!structure_value_addr 2896 if (!structure_value_addr
2985 && !pcc_struct_value 2897 && !pcc_struct_value
2986 && TYPE_MODE (rettype) != BLKmode 2898 && TYPE_MODE (rettype) != BLKmode
2987 && targetm.calls.return_in_msb (rettype)) 2899 && targetm.calls.return_in_msb (rettype))
2988 { 2900 {
2989 if (shift_return_value (TYPE_MODE (rettype), false, valreg)) 2901 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
2990 sibcall_failure = 1; 2902 sibcall_failure = 1;
2991 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg)); 2903 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
2992 } 2904 }
2993 2905
2994 if (pass && (flags & ECF_MALLOC)) 2906 if (pass && (flags & ECF_MALLOC))
2995 { 2907 {
2996 rtx temp = gen_reg_rtx (GET_MODE (valreg)); 2908 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2997 rtx last, insns; 2909 rtx last, insns;
2998 2910
2999 /* The return value from a malloc-like function is a pointer. */ 2911 /* The return value from a malloc-like function is a pointer. */
3000 if (TREE_CODE (rettype) == POINTER_TYPE) 2912 if (TREE_CODE (rettype) == POINTER_TYPE)
3001 mark_reg_pointer (temp, BIGGEST_ALIGNMENT); 2913 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3002 2914
3003 emit_move_insn (temp, valreg); 2915 emit_move_insn (temp, valreg);
3004 2916
3005 /* The return value from a malloc-like function can not alias 2917 /* The return value from a malloc-like function can not alias
3006 anything else. */ 2918 anything else. */
3007 last = get_last_insn (); 2919 last = get_last_insn ();
3008 add_reg_note (last, REG_NOALIAS, temp); 2920 add_reg_note (last, REG_NOALIAS, temp);
3009 2921
3010 /* Write out the sequence. */ 2922 /* Write out the sequence. */
3011 insns = get_insns (); 2923 insns = get_insns ();
3012 end_sequence (); 2924 end_sequence ();
3013 emit_insn (insns); 2925 emit_insn (insns);
3014 valreg = temp; 2926 valreg = temp;
3015 } 2927 }
3016 2928
3017 /* For calls to `setjmp', etc., inform 2929 /* For calls to `setjmp', etc., inform
3018 function.c:setjmp_warnings that it should complain if 2930 function.c:setjmp_warnings that it should complain if
3019 nonvolatile values are live. For functions that cannot 2931 nonvolatile values are live. For functions that cannot
3020 return, inform flow that control does not fall through. */ 2932 return, inform flow that control does not fall through. */
3021 2933
3022 if ((flags & ECF_NORETURN) || pass == 0) 2934 if ((flags & ECF_NORETURN) || pass == 0)
3023 { 2935 {
3024 /* The barrier must be emitted 2936 /* The barrier must be emitted
3025 immediately after the CALL_INSN. Some ports emit more 2937 immediately after the CALL_INSN. Some ports emit more
3026 than just a CALL_INSN above, so we must search for it here. */ 2938 than just a CALL_INSN above, so we must search for it here. */
3027 2939
3028 rtx last = get_last_insn (); 2940 rtx last = get_last_insn ();
3029 while (!CALL_P (last)) 2941 while (!CALL_P (last))
3030 { 2942 {
3031 last = PREV_INSN (last); 2943 last = PREV_INSN (last);
3032 /* There was no CALL_INSN? */ 2944 /* There was no CALL_INSN? */
3033 gcc_assert (last != before_call); 2945 gcc_assert (last != before_call);
3034 } 2946 }
3035 2947
3036 emit_barrier_after (last); 2948 emit_barrier_after (last);
3037 2949
3038 /* Stack adjustments after a noreturn call are dead code. 2950 /* Stack adjustments after a noreturn call are dead code.
3039 However when NO_DEFER_POP is in effect, we must preserve 2951 However when NO_DEFER_POP is in effect, we must preserve
3040 stack_pointer_delta. */ 2952 stack_pointer_delta. */
3041 if (inhibit_defer_pop == 0) 2953 if (inhibit_defer_pop == 0)
3042 { 2954 {
3043 stack_pointer_delta = old_stack_allocated; 2955 stack_pointer_delta = old_stack_allocated;
3044 pending_stack_adjust = 0; 2956 pending_stack_adjust = 0;
3045 } 2957 }
3046 } 2958 }
3047 2959
3048 /* If value type not void, return an rtx for the value. */ 2960 /* If value type not void, return an rtx for the value. */
3049 2961
3050 if (TYPE_MODE (rettype) == VOIDmode 2962 if (TYPE_MODE (rettype) == VOIDmode
3051 || ignore) 2963 || ignore)
3052 target = const0_rtx; 2964 target = const0_rtx;
3053 else if (structure_value_addr) 2965 else if (structure_value_addr)
3054 { 2966 {
3055 if (target == 0 || !MEM_P (target)) 2967 if (target == 0 || !MEM_P (target))
3056 { 2968 {
3057 target 2969 target
3058 = gen_rtx_MEM (TYPE_MODE (rettype), 2970 = gen_rtx_MEM (TYPE_MODE (rettype),
3059 memory_address (TYPE_MODE (rettype), 2971 memory_address (TYPE_MODE (rettype),
3060 structure_value_addr)); 2972 structure_value_addr));
3061 set_mem_attributes (target, rettype, 1); 2973 set_mem_attributes (target, rettype, 1);
3062 } 2974 }
3063 } 2975 }
3064 else if (pcc_struct_value) 2976 else if (pcc_struct_value)
3065 { 2977 {
3066 /* This is the special C++ case where we need to 2978 /* This is the special C++ case where we need to
3067 know what the true target was. We take care to 2979 know what the true target was. We take care to
3068 never use this value more than once in one expression. */ 2980 never use this value more than once in one expression. */
3069 target = gen_rtx_MEM (TYPE_MODE (rettype), 2981 target = gen_rtx_MEM (TYPE_MODE (rettype),
3070 copy_to_reg (valreg)); 2982 copy_to_reg (valreg));
3071 set_mem_attributes (target, rettype, 1); 2983 set_mem_attributes (target, rettype, 1);
3072 } 2984 }
3073 /* Handle calls that return values in multiple non-contiguous locations. 2985 /* Handle calls that return values in multiple non-contiguous locations.
3074 The Irix 6 ABI has examples of this. */ 2986 The Irix 6 ABI has examples of this. */
3075 else if (GET_CODE (valreg) == PARALLEL) 2987 else if (GET_CODE (valreg) == PARALLEL)
3076 { 2988 {
3077 if (target == 0) 2989 if (target == 0)
3078 { 2990 {
3079 /* This will only be assigned once, so it can be readonly. */ 2991 /* This will only be assigned once, so it can be readonly. */
3080 tree nt = build_qualified_type (rettype, 2992 tree nt = build_qualified_type (rettype,
3081 (TYPE_QUALS (rettype) 2993 (TYPE_QUALS (rettype)
3082 | TYPE_QUAL_CONST)); 2994 | TYPE_QUAL_CONST));
3083 2995
3084 target = assign_temp (nt, 0, 1, 1); 2996 target = assign_temp (nt, 0, 1, 1);
3085 } 2997 }
3086 2998
3087 if (! rtx_equal_p (target, valreg)) 2999 if (! rtx_equal_p (target, valreg))
3088 emit_group_store (target, valreg, rettype, 3000 emit_group_store (target, valreg, rettype,
3089 int_size_in_bytes (rettype)); 3001 int_size_in_bytes (rettype));
3090 3002
3091 /* We can not support sibling calls for this case. */ 3003 /* We can not support sibling calls for this case. */
3092 sibcall_failure = 1; 3004 sibcall_failure = 1;
3093 } 3005 }
3094 else if (target 3006 else if (target
3095 && GET_MODE (target) == TYPE_MODE (rettype) 3007 && GET_MODE (target) == TYPE_MODE (rettype)
3096 && GET_MODE (target) == GET_MODE (valreg)) 3008 && GET_MODE (target) == GET_MODE (valreg))
3097 { 3009 {
3098 bool may_overlap = false; 3010 bool may_overlap = false;
3099 3011
3100 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard 3012 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3101 reg to a plain register. */ 3013 reg to a plain register. */
3102 if (!REG_P (target) || HARD_REGISTER_P (target)) 3014 if (!REG_P (target) || HARD_REGISTER_P (target))
3103 valreg = avoid_likely_spilled_reg (valreg); 3015 valreg = avoid_likely_spilled_reg (valreg);
3104 3016
3105 /* If TARGET is a MEM in the argument area, and we have 3017 /* If TARGET is a MEM in the argument area, and we have
3106 saved part of the argument area, then we can't store 3018 saved part of the argument area, then we can't store
3107 directly into TARGET as it may get overwritten when we 3019 directly into TARGET as it may get overwritten when we
3108 restore the argument save area below. Don't work too 3020 restore the argument save area below. Don't work too
3109 hard though and simply force TARGET to a register if it 3021 hard though and simply force TARGET to a register if it
3110 is a MEM; the optimizer is quite likely to sort it out. */ 3022 is a MEM; the optimizer is quite likely to sort it out. */
3111 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target)) 3023 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3112 for (i = 0; i < num_actuals; i++) 3024 for (i = 0; i < num_actuals; i++)
3113 if (args[i].save_area) 3025 if (args[i].save_area)
3114 { 3026 {
3115 may_overlap = true; 3027 may_overlap = true;
3116 break; 3028 break;
3117 } 3029 }
3118 3030
3119 if (may_overlap) 3031 if (may_overlap)
3120 target = copy_to_reg (valreg); 3032 target = copy_to_reg (valreg);
3121 else 3033 else
3122 { 3034 {
3123 /* TARGET and VALREG cannot be equal at this point 3035 /* TARGET and VALREG cannot be equal at this point
3124 because the latter would not have 3036 because the latter would not have
3125 REG_FUNCTION_VALUE_P true, while the former would if 3037 REG_FUNCTION_VALUE_P true, while the former would if
3126 it were referring to the same register. 3038 it were referring to the same register.
3127 3039
3128 If they refer to the same register, this move will be 3040 If they refer to the same register, this move will be
3129 a no-op, except when function inlining is being 3041 a no-op, except when function inlining is being
3130 done. */ 3042 done. */
3131 emit_move_insn (target, valreg); 3043 emit_move_insn (target, valreg);
3132 3044
3133 /* If we are setting a MEM, this code must be executed. 3045 /* If we are setting a MEM, this code must be executed.
3134 Since it is emitted after the call insn, sibcall 3046 Since it is emitted after the call insn, sibcall
3135 optimization cannot be performed in that case. */ 3047 optimization cannot be performed in that case. */
3136 if (MEM_P (target)) 3048 if (MEM_P (target))
3137 sibcall_failure = 1; 3049 sibcall_failure = 1;
3138 } 3050 }
3139 } 3051 }
3140 else if (TYPE_MODE (rettype) == BLKmode) 3052 else if (TYPE_MODE (rettype) == BLKmode)
3141 { 3053 {
3142 rtx val = valreg; 3054 rtx val = valreg;
3143 if (GET_MODE (val) != BLKmode) 3055 if (GET_MODE (val) != BLKmode)
3144 val = avoid_likely_spilled_reg (val); 3056 val = avoid_likely_spilled_reg (val);
3145 target = copy_blkmode_from_reg (target, val, rettype); 3057 target = copy_blkmode_from_reg (target, val, rettype);
3146 3058
3147 /* We can not support sibling calls for this case. */ 3059 /* We can not support sibling calls for this case. */
3148 sibcall_failure = 1; 3060 sibcall_failure = 1;
3149 } 3061 }
3150 else 3062 else
3151 target = copy_to_reg (avoid_likely_spilled_reg (valreg)); 3063 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3152 3064
3153 /* If we promoted this return value, make the proper SUBREG. 3065 /* If we promoted this return value, make the proper SUBREG.
3154 TARGET might be const0_rtx here, so be careful. */ 3066 TARGET might be const0_rtx here, so be careful. */
3155 if (REG_P (target) 3067 if (REG_P (target)
3156 && TYPE_MODE (rettype) != BLKmode 3068 && TYPE_MODE (rettype) != BLKmode
3157 && GET_MODE (target) != TYPE_MODE (rettype)) 3069 && GET_MODE (target) != TYPE_MODE (rettype))
3158 { 3070 {
3159 tree type = rettype; 3071 tree type = rettype;
3160 int unsignedp = TYPE_UNSIGNED (type); 3072 int unsignedp = TYPE_UNSIGNED (type);
3161 int offset = 0; 3073 int offset = 0;
3162 enum machine_mode pmode; 3074 enum machine_mode pmode;
3163 3075
3164 /* Ensure we promote as expected, and get the new unsignedness. */ 3076 /* Ensure we promote as expected, and get the new unsignedness. */
3165 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, 3077 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3166 funtype, 1); 3078 funtype, 1);
3167 gcc_assert (GET_MODE (target) == pmode); 3079 gcc_assert (GET_MODE (target) == pmode);
3168 3080
3169 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN) 3081 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3170 && (GET_MODE_SIZE (GET_MODE (target)) 3082 && (GET_MODE_SIZE (GET_MODE (target))
3171 > GET_MODE_SIZE (TYPE_MODE (type)))) 3083 > GET_MODE_SIZE (TYPE_MODE (type))))
3172 { 3084 {
3173 offset = GET_MODE_SIZE (GET_MODE (target)) 3085 offset = GET_MODE_SIZE (GET_MODE (target))
3174 - GET_MODE_SIZE (TYPE_MODE (type)); 3086 - GET_MODE_SIZE (TYPE_MODE (type));
3175 if (! BYTES_BIG_ENDIAN) 3087 if (! BYTES_BIG_ENDIAN)
3176 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; 3088 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3177 else if (! WORDS_BIG_ENDIAN) 3089 else if (! WORDS_BIG_ENDIAN)
3178 offset %= UNITS_PER_WORD; 3090 offset %= UNITS_PER_WORD;
3179 } 3091 }
3180 3092
3181 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); 3093 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3182 SUBREG_PROMOTED_VAR_P (target) = 1; 3094 SUBREG_PROMOTED_VAR_P (target) = 1;
3183 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp); 3095 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3184 } 3096 }
3185 3097
3186 /* If size of args is variable or this was a constructor call for a stack 3098 /* If size of args is variable or this was a constructor call for a stack
3187 argument, restore saved stack-pointer value. */ 3099 argument, restore saved stack-pointer value. */
3188 3100
3189 if (old_stack_level) 3101 if (old_stack_level)
3190 { 3102 {
3191 emit_stack_restore (SAVE_BLOCK, old_stack_level); 3103 emit_stack_restore (SAVE_BLOCK, old_stack_level);
3192 stack_pointer_delta = old_stack_pointer_delta; 3104 stack_pointer_delta = old_stack_pointer_delta;
3196 highest_outgoing_arg_in_use = initial_highest_arg_in_use; 3108 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3197 stack_usage_map = initial_stack_usage_map; 3109 stack_usage_map = initial_stack_usage_map;
3198 sibcall_failure = 1; 3110 sibcall_failure = 1;
3199 } 3111 }
3200 else if (ACCUMULATE_OUTGOING_ARGS && pass) 3112 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3201 { 3113 {
3202 #ifdef REG_PARM_STACK_SPACE 3114 #ifdef REG_PARM_STACK_SPACE
3203 if (save_area) 3115 if (save_area)
3204 restore_fixed_argument_area (save_area, argblock, 3116 restore_fixed_argument_area (save_area, argblock,
3205 high_to_save, low_to_save); 3117 high_to_save, low_to_save);
3206 #endif 3118 #endif
3207 3119
3208 /* If we saved any argument areas, restore them. */ 3120 /* If we saved any argument areas, restore them. */
3209 for (i = 0; i < num_actuals; i++) 3121 for (i = 0; i < num_actuals; i++)
3210 if (args[i].save_area) 3122 if (args[i].save_area)
3211 { 3123 {
3212 enum machine_mode save_mode = GET_MODE (args[i].save_area); 3124 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3213 rtx stack_area 3125 rtx stack_area
3214 = gen_rtx_MEM (save_mode, 3126 = gen_rtx_MEM (save_mode,
3215 memory_address (save_mode, 3127 memory_address (save_mode,
3216 XEXP (args[i].stack_slot, 0))); 3128 XEXP (args[i].stack_slot, 0)));
3217 3129
3218 if (save_mode != BLKmode) 3130 if (save_mode != BLKmode)
3219 emit_move_insn (stack_area, args[i].save_area); 3131 emit_move_insn (stack_area, args[i].save_area);
3220 else 3132 else
3221 emit_block_move (stack_area, args[i].save_area, 3133 emit_block_move (stack_area, args[i].save_area,
3222 GEN_INT (args[i].locate.size.constant), 3134 GEN_INT (args[i].locate.size.constant),
3223 BLOCK_OP_CALL_PARM); 3135 BLOCK_OP_CALL_PARM);
3224 } 3136 }
3225 3137
3226 highest_outgoing_arg_in_use = initial_highest_arg_in_use; 3138 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3227 stack_usage_map = initial_stack_usage_map; 3139 stack_usage_map = initial_stack_usage_map;
3228 } 3140 }
3229 3141
3230 /* If this was alloca, record the new stack level for nonlocal gotos. 3142 /* If this was alloca, record the new stack level for nonlocal gotos.
3231 Check for the handler slots since we might not have a save area 3143 Check for the handler slots since we might not have a save area
3232 for non-local gotos. */ 3144 for non-local gotos. */
3233 3145
3234 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0) 3146 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3235 update_nonlocal_goto_save_area (); 3147 update_nonlocal_goto_save_area ();
3236 3148
3237 /* Free up storage we no longer need. */ 3149 /* Free up storage we no longer need. */
3238 for (i = 0; i < num_actuals; ++i) 3150 for (i = 0; i < num_actuals; ++i)
3239 if (args[i].aligned_regs) 3151 if (args[i].aligned_regs)
3240 free (args[i].aligned_regs); 3152 free (args[i].aligned_regs);
3241 3153
3242 insns = get_insns (); 3154 insns = get_insns ();
3243 end_sequence (); 3155 end_sequence ();
3244 3156
3245 if (pass == 0) 3157 if (pass == 0)
3246 { 3158 {
3247 tail_call_insns = insns; 3159 tail_call_insns = insns;
3248 3160
3249 /* Restore the pending stack adjustment now that we have 3161 /* Restore the pending stack adjustment now that we have
3250 finished generating the sibling call sequence. */ 3162 finished generating the sibling call sequence. */
3251 3163
3252 pending_stack_adjust = save_pending_stack_adjust; 3164 pending_stack_adjust = save_pending_stack_adjust;
3253 stack_pointer_delta = save_stack_pointer_delta; 3165 stack_pointer_delta = save_stack_pointer_delta;
3254 3166
3255 /* Prepare arg structure for next iteration. */ 3167 /* Prepare arg structure for next iteration. */
3256 for (i = 0; i < num_actuals; i++) 3168 for (i = 0; i < num_actuals; i++)
3257 { 3169 {
3258 args[i].value = 0; 3170 args[i].value = 0;
3259 args[i].aligned_regs = 0; 3171 args[i].aligned_regs = 0;
3260 args[i].stack = 0; 3172 args[i].stack = 0;
3261 } 3173 }
3262 3174
3263 sbitmap_free (stored_args_map); 3175 sbitmap_free (stored_args_map);
3264 } 3176 }
3265 else 3177 else
3266 { 3178 {
3267 normal_call_insns = insns; 3179 normal_call_insns = insns;
3268 3180
3269 /* Verify that we've deallocated all the stack we used. */ 3181 /* Verify that we've deallocated all the stack we used. */
3270 gcc_assert ((flags & ECF_NORETURN) 3182 gcc_assert ((flags & ECF_NORETURN)
3271 || (old_stack_allocated 3183 || (old_stack_allocated
3272 == stack_pointer_delta - pending_stack_adjust)); 3184 == stack_pointer_delta - pending_stack_adjust));
3273 } 3185 }
3274 3186
3275 /* If something prevents making this a sibling call, 3187 /* If something prevents making this a sibling call,
3276 zero out the sequence. */ 3188 zero out the sequence. */
3277 if (sibcall_failure) 3189 if (sibcall_failure)
3278 tail_call_insns = NULL_RTX; 3190 tail_call_insns = NULL_RTX;
3279 else 3191 else
3280 break; 3192 break;
3281 } 3193 }
3282 3194
3283 /* If tail call production succeeded, we need to remove REG_EQUIV notes on 3195 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3284 arguments too, as argument area is now clobbered by the call. */ 3196 arguments too, as argument area is now clobbered by the call. */
3285 if (tail_call_insns) 3197 if (tail_call_insns)
3318 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 3230 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3319 { 3231 {
3320 rtx note; 3232 rtx note;
3321 3233
3322 /* There are never REG_EQUIV notes for the incoming arguments 3234 /* There are never REG_EQUIV notes for the incoming arguments
3323 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */ 3235 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3324 if (NOTE_P (insn) 3236 if (NOTE_P (insn)
3325 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG) 3237 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3326 break; 3238 break;
3327 3239
3328 note = find_reg_note (insn, REG_EQUIV, 0); 3240 note = find_reg_note (insn, REG_EQUIV, 0);
3329 if (note) 3241 if (note)
3330 remove_note (insn, note); 3242 remove_note (insn, note);
3331 note = find_reg_note (insn, REG_EQUIV, 0); 3243 note = find_reg_note (insn, REG_EQUIV, 0);
3332 gcc_assert (!note); 3244 gcc_assert (!note);
3333 } 3245 }
3334 } 3246 }
3335 3247
3343 /* Before allocating memory, check for the common case of no complex. */ 3255 /* Before allocating memory, check for the common case of no complex. */
3344 for (p = types; p; p = TREE_CHAIN (p)) 3256 for (p = types; p; p = TREE_CHAIN (p))
3345 { 3257 {
3346 tree type = TREE_VALUE (p); 3258 tree type = TREE_VALUE (p);
3347 if (TREE_CODE (type) == COMPLEX_TYPE 3259 if (TREE_CODE (type) == COMPLEX_TYPE
3348 && targetm.calls.split_complex_arg (type)) 3260 && targetm.calls.split_complex_arg (type))
3349 goto found; 3261 goto found;
3350 } 3262 }
3351 return types; 3263 return types;
3352 3264
3353 found: 3265 found:
3354 types = copy_list (types); 3266 types = copy_list (types);
3356 for (p = types; p; p = TREE_CHAIN (p)) 3268 for (p = types; p; p = TREE_CHAIN (p))
3357 { 3269 {
3358 tree complex_type = TREE_VALUE (p); 3270 tree complex_type = TREE_VALUE (p);
3359 3271
3360 if (TREE_CODE (complex_type) == COMPLEX_TYPE 3272 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3361 && targetm.calls.split_complex_arg (complex_type)) 3273 && targetm.calls.split_complex_arg (complex_type))
3362 { 3274 {
3363 tree next, imag; 3275 tree next, imag;
3364 3276
3365 /* Rewrite complex type with component type. */ 3277 /* Rewrite complex type with component type. */
3366 TREE_VALUE (p) = TREE_TYPE (complex_type); 3278 TREE_VALUE (p) = TREE_TYPE (complex_type);
3367 next = TREE_CHAIN (p); 3279 next = TREE_CHAIN (p);
3368 3280
3369 /* Add another component type for the imaginary part. */ 3281 /* Add another component type for the imaginary part. */
3370 imag = build_tree_list (NULL_TREE, TREE_VALUE (p)); 3282 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3371 TREE_CHAIN (p) = imag; 3283 TREE_CHAIN (p) = imag;
3372 TREE_CHAIN (imag) = next; 3284 TREE_CHAIN (imag) = next;
3373 3285
3374 /* Skip the newly created node. */ 3286 /* Skip the newly created node. */
3375 p = TREE_CHAIN (p); 3287 p = TREE_CHAIN (p);
3376 } 3288 }
3377 } 3289 }
3378 3290
3379 return types; 3291 return types;
3380 } 3292 }
3381 3293
3383 The RETVAL parameter specifies whether return value needs to be saved, other 3295 The RETVAL parameter specifies whether return value needs to be saved, other
3384 parameters are documented in the emit_library_call function below. */ 3296 parameters are documented in the emit_library_call function below. */
3385 3297
3386 static rtx 3298 static rtx
3387 emit_library_call_value_1 (int retval, rtx orgfun, rtx value, 3299 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3388 enum libcall_type fn_type, 3300 enum libcall_type fn_type,
3389 enum machine_mode outmode, int nargs, va_list p) 3301 enum machine_mode outmode, int nargs, va_list p)
3390 { 3302 {
3391 /* Total size in bytes of all the stack-parms scanned so far. */ 3303 /* Total size in bytes of all the stack-parms scanned so far. */
3392 struct args_size args_size; 3304 struct args_size args_size;
3393 /* Size of arguments before any adjustments (such as rounding). */ 3305 /* Size of arguments before any adjustments (such as rounding). */
3394 struct args_size original_args_size; 3306 struct args_size original_args_size;
3420 int struct_value_size = 0; 3332 int struct_value_size = 0;
3421 int flags; 3333 int flags;
3422 int reg_parm_stack_space = 0; 3334 int reg_parm_stack_space = 0;
3423 int needed; 3335 int needed;
3424 rtx before_call; 3336 rtx before_call;
3425 tree tfom; /* type_for_mode (outmode, 0) */ 3337 tree tfom; /* type_for_mode (outmode, 0) */
3426 3338
3427 #ifdef REG_PARM_STACK_SPACE 3339 #ifdef REG_PARM_STACK_SPACE
3428 /* Define the boundary of the register parm stack space that needs to be 3340 /* Define the boundary of the register parm stack space that needs to be
3429 save, if any. */ 3341 save, if any. */
3430 int low_to_save = 0, high_to_save = 0; 3342 int low_to_save = 0, high_to_save = 0;
3476 decide where in memory it should come back. */ 3388 decide where in memory it should come back. */
3477 if (outmode != VOIDmode) 3389 if (outmode != VOIDmode)
3478 { 3390 {
3479 tfom = lang_hooks.types.type_for_mode (outmode, 0); 3391 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3480 if (aggregate_value_p (tfom, 0)) 3392 if (aggregate_value_p (tfom, 0))
3481 { 3393 {
3482 #ifdef PCC_STATIC_STRUCT_RETURN 3394 #ifdef PCC_STATIC_STRUCT_RETURN
3483 rtx pointer_reg 3395 rtx pointer_reg
3484 = hard_function_value (build_pointer_type (tfom), 0, 0, 0); 3396 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3485 mem_value = gen_rtx_MEM (outmode, pointer_reg); 3397 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3486 pcc_struct_value = 1; 3398 pcc_struct_value = 1;
3487 if (value == 0) 3399 if (value == 0)
3488 value = gen_reg_rtx (outmode); 3400 value = gen_reg_rtx (outmode);
3489 #else /* not PCC_STATIC_STRUCT_RETURN */ 3401 #else /* not PCC_STATIC_STRUCT_RETURN */
3490 struct_value_size = GET_MODE_SIZE (outmode); 3402 struct_value_size = GET_MODE_SIZE (outmode);
3491 if (value != 0 && MEM_P (value)) 3403 if (value != 0 && MEM_P (value))
3492 mem_value = value; 3404 mem_value = value;
3493 else 3405 else
3494 mem_value = assign_temp (tfom, 0, 1, 1); 3406 mem_value = assign_temp (tfom, 0, 1, 1);
3495 #endif 3407 #endif
3496 /* This call returns a big structure. */ 3408 /* This call returns a big structure. */
3497 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); 3409 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3498 } 3410 }
3499 } 3411 }
3500 else 3412 else
3501 tfom = void_type_node; 3413 tfom = void_type_node;
3502 3414
3503 /* ??? Unfinished: must pass the memory address as an argument. */ 3415 /* ??? Unfinished: must pass the memory address as an argument. */
3533 3445
3534 nargs++; 3446 nargs++;
3535 3447
3536 /* Make sure it is a reasonable operand for a move or push insn. */ 3448 /* Make sure it is a reasonable operand for a move or push insn. */
3537 if (!REG_P (addr) && !MEM_P (addr) 3449 if (!REG_P (addr) && !MEM_P (addr)
3538 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr))) 3450 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3539 addr = force_operand (addr, NULL_RTX); 3451 addr = force_operand (addr, NULL_RTX);
3540 3452
3541 argvec[count].value = addr; 3453 argvec[count].value = addr;
3542 argvec[count].mode = Pmode; 3454 argvec[count].mode = Pmode;
3543 argvec[count].partial = 0; 3455 argvec[count].partial = 0;
3544 3456
3545 argvec[count].reg = targetm.calls.function_arg (&args_so_far, 3457 argvec[count].reg = targetm.calls.function_arg (&args_so_far,
3546 Pmode, NULL_TREE, true); 3458 Pmode, NULL_TREE, true);
3547 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode, 3459 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3548 NULL_TREE, 1) == 0); 3460 NULL_TREE, 1) == 0);
3549 3461
3550 locate_and_pad_parm (Pmode, NULL_TREE, 3462 locate_and_pad_parm (Pmode, NULL_TREE,
3551 #ifdef STACK_PARMS_IN_REG_PARM_AREA 3463 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3552 1, 3464 1,
3553 #else 3465 #else
3554 argvec[count].reg != 0, 3466 argvec[count].reg != 0,
3555 #endif 3467 #endif
3556 0, NULL_TREE, &args_size, &argvec[count].locate); 3468 0, NULL_TREE, &args_size, &argvec[count].locate);
3557 3469
3558 if (argvec[count].reg == 0 || argvec[count].partial != 0 3470 if (argvec[count].reg == 0 || argvec[count].partial != 0
3559 || reg_parm_stack_space > 0) 3471 || reg_parm_stack_space > 0)
3560 args_size.constant += argvec[count].locate.size.constant; 3472 args_size.constant += argvec[count].locate.size.constant;
3561 3473
3562 targetm.calls.function_arg_advance (&args_so_far, Pmode, (tree) 0, true); 3474 targetm.calls.function_arg_advance (&args_so_far, Pmode, (tree) 0, true);
3563 3475
3564 count++; 3476 count++;
3565 } 3477 }
3568 { 3480 {
3569 rtx val = va_arg (p, rtx); 3481 rtx val = va_arg (p, rtx);
3570 enum machine_mode mode = (enum machine_mode) va_arg (p, int); 3482 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
3571 3483
3572 /* We cannot convert the arg value to the mode the library wants here; 3484 /* We cannot convert the arg value to the mode the library wants here;
3573 must do it earlier where we know the signedness of the arg. */ 3485 must do it earlier where we know the signedness of the arg. */
3574 gcc_assert (mode != BLKmode 3486 gcc_assert (mode != BLKmode
3575 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode)); 3487 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3576 3488
3577 /* Make sure it is a reasonable operand for a move or push insn. */ 3489 /* Make sure it is a reasonable operand for a move or push insn. */
3578 if (!REG_P (val) && !MEM_P (val) 3490 if (!REG_P (val) && !MEM_P (val)
3579 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val))) 3491 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3580 val = force_operand (val, NULL_RTX); 3492 val = force_operand (val, NULL_RTX);
3581 3493
3582 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1)) 3494 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3583 { 3495 {
3584 rtx slot; 3496 rtx slot;
3585 int must_copy 3497 int must_copy
3586 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1); 3498 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3587 3499
3588 /* If this was a CONST function, it is now PURE since it now 3500 /* If this was a CONST function, it is now PURE since it now
3589 reads memory. */ 3501 reads memory. */
3590 if (flags & ECF_CONST) 3502 if (flags & ECF_CONST)
3591 { 3503 {
3592 flags &= ~ECF_CONST; 3504 flags &= ~ECF_CONST;
3593 flags |= ECF_PURE; 3505 flags |= ECF_PURE;
3594 } 3506 }
3595 3507
3596 if (MEM_P (val) && !must_copy) 3508 if (MEM_P (val) && !must_copy)
3597 slot = val; 3509 slot = val;
3598 else 3510 else
3599 { 3511 {
3600 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0), 3512 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3601 0, 1, 1); 3513 0, 1, 1);
3602 emit_move_insn (slot, val); 3514 emit_move_insn (slot, val);
3603 } 3515 }
3604 3516
3605 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, 3517 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3606 gen_rtx_USE (VOIDmode, slot), 3518 gen_rtx_USE (VOIDmode, slot),
3607 call_fusage); 3519 call_fusage);
3608 if (must_copy) 3520 if (must_copy)
3609 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, 3521 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3610 gen_rtx_CLOBBER (VOIDmode, 3522 gen_rtx_CLOBBER (VOIDmode,
3611 slot), 3523 slot),
3612 call_fusage); 3524 call_fusage);
3613 3525
3614 mode = Pmode; 3526 mode = Pmode;
3615 val = force_operand (XEXP (slot, 0), NULL_RTX); 3527 val = force_operand (XEXP (slot, 0), NULL_RTX);
3616 } 3528 }
3617 3529
3618 argvec[count].value = val; 3530 argvec[count].value = val;
3619 argvec[count].mode = mode; 3531 argvec[count].mode = mode;
3620 3532
3621 argvec[count].reg = targetm.calls.function_arg (&args_so_far, mode, 3533 argvec[count].reg = targetm.calls.function_arg (&args_so_far, mode,
3622 NULL_TREE, true); 3534 NULL_TREE, true);
3623 3535
3624 argvec[count].partial 3536 argvec[count].partial
3625 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1); 3537 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3626 3538
3627 locate_and_pad_parm (mode, NULL_TREE, 3539 locate_and_pad_parm (mode, NULL_TREE,
3628 #ifdef STACK_PARMS_IN_REG_PARM_AREA 3540 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3629 1, 3541 1,
3630 #else 3542 #else
3631 argvec[count].reg != 0, 3543 argvec[count].reg != 0,
3632 #endif 3544 #endif
3633 argvec[count].partial, 3545 argvec[count].partial,
3634 NULL_TREE, &args_size, &argvec[count].locate); 3546 NULL_TREE, &args_size, &argvec[count].locate);
3635 3547
3636 gcc_assert (!argvec[count].locate.size.var); 3548 gcc_assert (!argvec[count].locate.size.var);
3637 3549
3638 if (argvec[count].reg == 0 || argvec[count].partial != 0 3550 if (argvec[count].reg == 0 || argvec[count].partial != 0
3639 || reg_parm_stack_space > 0) 3551 || reg_parm_stack_space > 0)
3640 args_size.constant += argvec[count].locate.size.constant; 3552 args_size.constant += argvec[count].locate.size.constant;
3641 3553
3642 targetm.calls.function_arg_advance (&args_so_far, mode, (tree) 0, true); 3554 targetm.calls.function_arg_advance (&args_so_far, mode, (tree) 0, true);
3643 } 3555 }
3644 3556
3645 /* If this machine requires an external definition for library 3557 /* If this machine requires an external definition for library
3646 functions, write one out. */ 3558 functions, write one out. */
3647 assemble_external_libcall (fun); 3559 assemble_external_libcall (fun);
3648 3560
3649 original_args_size = args_size; 3561 original_args_size = args_size;
3650 args_size.constant = (((args_size.constant 3562 args_size.constant = (((args_size.constant
3651 + stack_pointer_delta 3563 + stack_pointer_delta
3652 + STACK_BYTES - 1) 3564 + STACK_BYTES - 1)
3653 / STACK_BYTES 3565 / STACK_BYTES
3654 * STACK_BYTES) 3566 * STACK_BYTES)
3655 - stack_pointer_delta); 3567 - stack_pointer_delta);
3656 3568
3657 args_size.constant = MAX (args_size.constant, 3569 args_size.constant = MAX (args_size.constant,
3658 reg_parm_stack_space); 3570 reg_parm_stack_space);
3659 3571
3660 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) 3572 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3661 args_size.constant -= reg_parm_stack_space; 3573 args_size.constant -= reg_parm_stack_space;
3662 3574
3663 if (args_size.constant > crtl->outgoing_args_size) 3575 if (args_size.constant > crtl->outgoing_args_size)
3671 } 3583 }
3672 3584
3673 if (ACCUMULATE_OUTGOING_ARGS) 3585 if (ACCUMULATE_OUTGOING_ARGS)
3674 { 3586 {
3675 /* Since the stack pointer will never be pushed, it is possible for 3587 /* Since the stack pointer will never be pushed, it is possible for
3676 the evaluation of a parm to clobber something we have already 3588 the evaluation of a parm to clobber something we have already
3677 written to the stack. Since most function calls on RISC machines 3589 written to the stack. Since most function calls on RISC machines
3678 do not use the stack, this is uncommon, but must work correctly. 3590 do not use the stack, this is uncommon, but must work correctly.
3679 3591
3680 Therefore, we save any area of the stack that was already written 3592 Therefore, we save any area of the stack that was already written
3681 and that we are using. Here we set up to do this by making a new 3593 and that we are using. Here we set up to do this by making a new
3682 stack usage map from the old one. 3594 stack usage map from the old one.
3683 3595
3684 Another approach might be to try to reorder the argument 3596 Another approach might be to try to reorder the argument
3685 evaluations to avoid this conflicting stack usage. */ 3597 evaluations to avoid this conflicting stack usage. */
3686 3598
3687 needed = args_size.constant; 3599 needed = args_size.constant;
3688 3600
3689 /* Since we will be writing into the entire argument area, the 3601 /* Since we will be writing into the entire argument area, the
3690 map must be allocated for its entire size, not just the part that 3602 map must be allocated for its entire size, not just the part that
3691 is the responsibility of the caller. */ 3603 is the responsibility of the caller. */
3692 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) 3604 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3693 needed += reg_parm_stack_space; 3605 needed += reg_parm_stack_space;
3694 3606
3695 #ifdef ARGS_GROW_DOWNWARD 3607 #ifdef ARGS_GROW_DOWNWARD
3696 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, 3608 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3697 needed + 1); 3609 needed + 1);
3698 #else 3610 #else
3699 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, 3611 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3700 needed); 3612 needed);
3701 #endif 3613 #endif
3702 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); 3614 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3703 stack_usage_map = stack_usage_map_buf; 3615 stack_usage_map = stack_usage_map_buf;
3704 3616
3705 if (initial_highest_arg_in_use) 3617 if (initial_highest_arg_in_use)
3706 memcpy (stack_usage_map, initial_stack_usage_map, 3618 memcpy (stack_usage_map, initial_stack_usage_map,
3707 initial_highest_arg_in_use); 3619 initial_highest_arg_in_use);
3708 3620
3709 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) 3621 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3710 memset (&stack_usage_map[initial_highest_arg_in_use], 0, 3622 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3711 highest_outgoing_arg_in_use - initial_highest_arg_in_use); 3623 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3712 needed = 0; 3624 needed = 0;
3713 3625
3714 /* We must be careful to use virtual regs before they're instantiated, 3626 /* We must be careful to use virtual regs before they're instantiated,
3715 and real regs afterwards. Loop optimization, for example, can create 3627 and real regs afterwards. Loop optimization, for example, can create
3716 new libcalls after we've instantiated the virtual regs, and if we 3628 new libcalls after we've instantiated the virtual regs, and if we
3717 use virtuals anyway, they won't match the rtl patterns. */ 3629 use virtuals anyway, they won't match the rtl patterns. */
3718 3630
3719 if (virtuals_instantiated) 3631 if (virtuals_instantiated)
3720 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET); 3632 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3721 else 3633 else
3722 argblock = virtual_outgoing_args_rtx; 3634 argblock = virtual_outgoing_args_rtx;
3723 } 3635 }
3724 else 3636 else
3725 { 3637 {
3726 if (!PUSH_ARGS) 3638 if (!PUSH_ARGS)
3727 argblock = push_block (GEN_INT (args_size.constant), 0, 0); 3639 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3728 } 3640 }
3729 3641
3730 /* If we push args individually in reverse order, perform stack alignment 3642 /* If we push args individually in reverse order, perform stack alignment
3731 before the first push (the last arg). */ 3643 before the first push (the last arg). */
3732 if (argblock == 0 && PUSH_ARGS_REVERSED) 3644 if (argblock == 0 && PUSH_ARGS_REVERSED)
3733 anti_adjust_stack (GEN_INT (args_size.constant 3645 anti_adjust_stack (GEN_INT (args_size.constant
3734 - original_args_size.constant)); 3646 - original_args_size.constant));
3735 3647
3736 if (PUSH_ARGS_REVERSED) 3648 if (PUSH_ARGS_REVERSED)
3737 { 3649 {
3738 inc = -1; 3650 inc = -1;
3739 argnum = nargs - 1; 3651 argnum = nargs - 1;
3746 3658
3747 #ifdef REG_PARM_STACK_SPACE 3659 #ifdef REG_PARM_STACK_SPACE
3748 if (ACCUMULATE_OUTGOING_ARGS) 3660 if (ACCUMULATE_OUTGOING_ARGS)
3749 { 3661 {
3750 /* The argument list is the property of the called routine and it 3662 /* The argument list is the property of the called routine and it
3751 may clobber it. If the fixed area has been used for previous 3663 may clobber it. If the fixed area has been used for previous
3752 parameters, we must save and restore it. */ 3664 parameters, we must save and restore it. */
3753 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, 3665 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3754 &low_to_save, &high_to_save); 3666 &low_to_save, &high_to_save);
3755 } 3667 }
3756 #endif 3668 #endif
3757 3669
3758 /* Push the args that need to be pushed. */ 3670 /* Push the args that need to be pushed. */
3759 3671
3767 int partial = argvec[argnum].partial; 3679 int partial = argvec[argnum].partial;
3768 unsigned int parm_align = argvec[argnum].locate.boundary; 3680 unsigned int parm_align = argvec[argnum].locate.boundary;
3769 int lower_bound = 0, upper_bound = 0, i; 3681 int lower_bound = 0, upper_bound = 0, i;
3770 3682
3771 if (! (reg != 0 && partial == 0)) 3683 if (! (reg != 0 && partial == 0))
3772 { 3684 {
3773 if (ACCUMULATE_OUTGOING_ARGS) 3685 if (ACCUMULATE_OUTGOING_ARGS)
3774 { 3686 {
3775 /* If this is being stored into a pre-allocated, fixed-size, 3687 /* If this is being stored into a pre-allocated, fixed-size,
3776 stack area, save any previous data at that location. */ 3688 stack area, save any previous data at that location. */
3777 3689
3778 #ifdef ARGS_GROW_DOWNWARD 3690 #ifdef ARGS_GROW_DOWNWARD
3779 /* stack_slot is negative, but we want to index stack_usage_map 3691 /* stack_slot is negative, but we want to index stack_usage_map
3780 with positive values. */ 3692 with positive values. */
3781 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1; 3693 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
3782 lower_bound = upper_bound - argvec[argnum].locate.size.constant; 3694 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3783 #else 3695 #else
3784 lower_bound = argvec[argnum].locate.slot_offset.constant; 3696 lower_bound = argvec[argnum].locate.slot_offset.constant;
3785 upper_bound = lower_bound + argvec[argnum].locate.size.constant; 3697 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3786 #endif 3698 #endif
3787 3699
3788 i = lower_bound; 3700 i = lower_bound;
3789 /* Don't worry about things in the fixed argument area; 3701 /* Don't worry about things in the fixed argument area;
3790 it has already been saved. */ 3702 it has already been saved. */
3791 if (i < reg_parm_stack_space) 3703 if (i < reg_parm_stack_space)
3792 i = reg_parm_stack_space; 3704 i = reg_parm_stack_space;
3793 while (i < upper_bound && stack_usage_map[i] == 0) 3705 while (i < upper_bound && stack_usage_map[i] == 0)
3794 i++; 3706 i++;
3795 3707
3796 if (i < upper_bound) 3708 if (i < upper_bound)
3797 { 3709 {
3798 /* We need to make a save area. */ 3710 /* We need to make a save area. */
3799 unsigned int size 3711 unsigned int size
3800 = argvec[argnum].locate.size.constant * BITS_PER_UNIT; 3712 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3801 enum machine_mode save_mode 3713 enum machine_mode save_mode
3802 = mode_for_size (size, MODE_INT, 1); 3714 = mode_for_size (size, MODE_INT, 1);
3803 rtx adr 3715 rtx adr
3804 = plus_constant (argblock, 3716 = plus_constant (argblock,
3805 argvec[argnum].locate.offset.constant); 3717 argvec[argnum].locate.offset.constant);
3806 rtx stack_area 3718 rtx stack_area
3807 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr)); 3719 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3808 3720
3809 if (save_mode == BLKmode) 3721 if (save_mode == BLKmode)
3810 { 3722 {
3811 argvec[argnum].save_area 3723 argvec[argnum].save_area
3812 = assign_stack_temp (BLKmode, 3724 = assign_stack_temp (BLKmode,
3813 argvec[argnum].locate.size.constant, 3725 argvec[argnum].locate.size.constant,
3814 0); 3726 0);
3815 3727
3816 emit_block_move (validize_mem (argvec[argnum].save_area), 3728 emit_block_move (validize_mem (argvec[argnum].save_area),
3817 stack_area, 3729 stack_area,
3818 GEN_INT (argvec[argnum].locate.size.constant), 3730 GEN_INT (argvec[argnum].locate.size.constant),
3819 BLOCK_OP_CALL_PARM); 3731 BLOCK_OP_CALL_PARM);
3820 } 3732 }
3821 else 3733 else
3822 { 3734 {
3823 argvec[argnum].save_area = gen_reg_rtx (save_mode); 3735 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3824 3736
3825 emit_move_insn (argvec[argnum].save_area, stack_area); 3737 emit_move_insn (argvec[argnum].save_area, stack_area);
3826 } 3738 }
3827 } 3739 }
3828 } 3740 }
3829 3741
3830 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align, 3742 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
3831 partial, reg, 0, argblock, 3743 partial, reg, 0, argblock,
3832 GEN_INT (argvec[argnum].locate.offset.constant), 3744 GEN_INT (argvec[argnum].locate.offset.constant),
3833 reg_parm_stack_space, 3745 reg_parm_stack_space,
3834 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad)); 3746 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3835 3747
3836 /* Now mark the segment we just used. */ 3748 /* Now mark the segment we just used. */
3837 if (ACCUMULATE_OUTGOING_ARGS) 3749 if (ACCUMULATE_OUTGOING_ARGS)
3838 for (i = lower_bound; i < upper_bound; i++) 3750 for (i = lower_bound; i < upper_bound; i++)
3839 stack_usage_map[i] = 1; 3751 stack_usage_map[i] = 1;
3840 3752
3841 NO_DEFER_POP; 3753 NO_DEFER_POP;
3842 3754
3843 if ((flags & ECF_CONST) 3755 if ((flags & ECF_CONST)
3844 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS)) 3756 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
3845 { 3757 {
3846 rtx use; 3758 rtx use;
3847 3759
3848 /* Indicate argument access so that alias.c knows that these 3760 /* Indicate argument access so that alias.c knows that these
3849 values are live. */ 3761 values are live. */
3850 if (argblock) 3762 if (argblock)
3851 use = plus_constant (argblock, 3763 use = plus_constant (argblock,
3852 argvec[argnum].locate.offset.constant); 3764 argvec[argnum].locate.offset.constant);
3853 else 3765 else
3854 /* When arguments are pushed, trying to tell alias.c where 3766 /* When arguments are pushed, trying to tell alias.c where
3855 exactly this argument is won't work, because the 3767 exactly this argument is won't work, because the
3856 auto-increment causes confusion. So we merely indicate 3768 auto-increment causes confusion. So we merely indicate
3857 that we access something with a known mode somewhere on 3769 that we access something with a known mode somewhere on
3858 the stack. */ 3770 the stack. */
3859 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, 3771 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3860 gen_rtx_SCRATCH (Pmode)); 3772 gen_rtx_SCRATCH (Pmode));
3861 use = gen_rtx_MEM (argvec[argnum].mode, use); 3773 use = gen_rtx_MEM (argvec[argnum].mode, use);
3862 use = gen_rtx_USE (VOIDmode, use); 3774 use = gen_rtx_USE (VOIDmode, use);
3863 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage); 3775 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3864 } 3776 }
3865 } 3777 }
3866 } 3778 }
3867 3779
3868 /* If we pushed args in forward order, perform stack alignment 3780 /* If we pushed args in forward order, perform stack alignment
3869 after pushing the last arg. */ 3781 after pushing the last arg. */
3870 if (argblock == 0 && !PUSH_ARGS_REVERSED) 3782 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3871 anti_adjust_stack (GEN_INT (args_size.constant 3783 anti_adjust_stack (GEN_INT (args_size.constant
3872 - original_args_size.constant)); 3784 - original_args_size.constant));
3873 3785
3874 if (PUSH_ARGS_REVERSED) 3786 if (PUSH_ARGS_REVERSED)
3875 argnum = nargs - 1; 3787 argnum = nargs - 1;
3876 else 3788 else
3877 argnum = 0; 3789 argnum = 0;
3888 rtx val = argvec[argnum].value; 3800 rtx val = argvec[argnum].value;
3889 rtx reg = argvec[argnum].reg; 3801 rtx reg = argvec[argnum].reg;
3890 int partial = argvec[argnum].partial; 3802 int partial = argvec[argnum].partial;
3891 3803
3892 /* Handle calls that pass values in multiple non-contiguous 3804 /* Handle calls that pass values in multiple non-contiguous
3893 locations. The PA64 has examples of this for library calls. */ 3805 locations. The PA64 has examples of this for library calls. */
3894 if (reg != 0 && GET_CODE (reg) == PARALLEL) 3806 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3895 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode)); 3807 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3896 else if (reg != 0 && partial == 0) 3808 else if (reg != 0 && partial == 0)
3897 emit_move_insn (reg, val); 3809 emit_move_insn (reg, val);
3898 3810
3899 NO_DEFER_POP; 3811 NO_DEFER_POP;
3900 } 3812 }
3901 3813
3902 /* Any regs containing parms remain in use through the call. */ 3814 /* Any regs containing parms remain in use through the call. */
3903 for (count = 0; count < nargs; count++) 3815 for (count = 0; count < nargs; count++)
3904 { 3816 {
3905 rtx reg = argvec[count].reg; 3817 rtx reg = argvec[count].reg;
3906 if (reg != 0 && GET_CODE (reg) == PARALLEL) 3818 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3907 use_group_regs (&call_fusage, reg); 3819 use_group_regs (&call_fusage, reg);
3908 else if (reg != 0) 3820 else if (reg != 0)
3909 { 3821 {
3910 int partial = argvec[count].partial; 3822 int partial = argvec[count].partial;
3911 if (partial) 3823 if (partial)
3912 { 3824 {
3913 int nregs; 3825 int nregs;
3914 gcc_assert (partial % UNITS_PER_WORD == 0); 3826 gcc_assert (partial % UNITS_PER_WORD == 0);
3915 nregs = partial / UNITS_PER_WORD; 3827 nregs = partial / UNITS_PER_WORD;
3916 use_regs (&call_fusage, REGNO (reg), nregs); 3828 use_regs (&call_fusage, REGNO (reg), nregs);
3917 } 3829 }
3918 else 3830 else
3919 use_reg (&call_fusage, reg); 3831 use_reg (&call_fusage, reg);
3920 } 3832 }
3921 } 3833 }
3922 3834
3923 /* Pass the function the address in which to return a structure value. */ 3835 /* Pass the function the address in which to return a structure value. */
3924 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value) 3836 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3925 { 3837 {
3926 emit_move_insn (struct_value, 3838 emit_move_insn (struct_value,
3927 force_reg (Pmode, 3839 force_reg (Pmode,
3928 force_operand (XEXP (mem_value, 0), 3840 force_operand (XEXP (mem_value, 0),
3929 NULL_RTX))); 3841 NULL_RTX)));
3930 if (REG_P (struct_value)) 3842 if (REG_P (struct_value))
3931 use_reg (&call_fusage, struct_value); 3843 use_reg (&call_fusage, struct_value);
3932 } 3844 }
3933 3845
3934 /* Don't allow popping to be deferred, since then 3846 /* Don't allow popping to be deferred, since then
3935 cse'ing of library calls could delete a call and leave the pop. */ 3847 cse'ing of library calls could delete a call and leave the pop. */
3936 NO_DEFER_POP; 3848 NO_DEFER_POP;
3937 valreg = (mem_value == 0 && outmode != VOIDmode 3849 valreg = (mem_value == 0 && outmode != VOIDmode
3938 ? hard_libcall_value (outmode, orgfun) : NULL_RTX); 3850 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3939 3851
3940 /* Stack must be properly aligned now. */ 3852 /* Stack must be properly aligned now. */
3941 gcc_assert (!(stack_pointer_delta 3853 gcc_assert (!(stack_pointer_delta
3942 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))); 3854 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3943 3855
3944 before_call = get_last_insn (); 3856 before_call = get_last_insn ();
3945 3857
3946 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which 3858 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3947 will set inhibit_defer_pop to that value. */ 3859 will set inhibit_defer_pop to that value. */
3966 fall through. */ 3878 fall through. */
3967 3879
3968 if (flags & ECF_NORETURN) 3880 if (flags & ECF_NORETURN)
3969 { 3881 {
3970 /* The barrier note must be emitted 3882 /* The barrier note must be emitted
3971 immediately after the CALL_INSN. Some ports emit more than 3883 immediately after the CALL_INSN. Some ports emit more than
3972 just a CALL_INSN above, so we must search for it here. */ 3884 just a CALL_INSN above, so we must search for it here. */
3973 3885
3974 rtx last = get_last_insn (); 3886 rtx last = get_last_insn ();
3975 while (!CALL_P (last)) 3887 while (!CALL_P (last))
3976 { 3888 {
3977 last = PREV_INSN (last); 3889 last = PREV_INSN (last);
3978 /* There was no CALL_INSN? */ 3890 /* There was no CALL_INSN? */
3979 gcc_assert (last != before_call); 3891 gcc_assert (last != before_call);
3980 } 3892 }
3981 3893
3982 emit_barrier_after (last); 3894 emit_barrier_after (last);
3983 } 3895 }
3984 3896
3985 /* Now restore inhibit_defer_pop to its actual original value. */ 3897 /* Now restore inhibit_defer_pop to its actual original value. */
3989 3901
3990 /* Copy the value to the right place. */ 3902 /* Copy the value to the right place. */
3991 if (outmode != VOIDmode && retval) 3903 if (outmode != VOIDmode && retval)
3992 { 3904 {
3993 if (mem_value) 3905 if (mem_value)
3994 { 3906 {
3995 if (value == 0) 3907 if (value == 0)
3996 value = mem_value; 3908 value = mem_value;
3997 if (value != mem_value) 3909 if (value != mem_value)
3998 emit_move_insn (value, mem_value); 3910 emit_move_insn (value, mem_value);
3999 } 3911 }
4000 else if (GET_CODE (valreg) == PARALLEL) 3912 else if (GET_CODE (valreg) == PARALLEL)
4001 { 3913 {
4002 if (value == 0) 3914 if (value == 0)
4003 value = gen_reg_rtx (outmode); 3915 value = gen_reg_rtx (outmode);
4004 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode)); 3916 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4005 } 3917 }
4006 else 3918 else
4007 { 3919 {
4008 /* Convert to the proper mode if a promotion has been active. */ 3920 /* Convert to the proper mode if a promotion has been active. */
4009 if (GET_MODE (valreg) != outmode) 3921 if (GET_MODE (valreg) != outmode)
4010 { 3922 {
4011 int unsignedp = TYPE_UNSIGNED (tfom); 3923 int unsignedp = TYPE_UNSIGNED (tfom);
4012 3924
4013 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp, 3925 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4014 fndecl ? TREE_TYPE (fndecl) : fntype, 1) 3926 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4015 == GET_MODE (valreg)); 3927 == GET_MODE (valreg));
4016 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0); 3928 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4017 } 3929 }
4018 3930
4019 if (value != 0) 3931 if (value != 0)
4020 emit_move_insn (value, valreg); 3932 emit_move_insn (value, valreg);
4021 else 3933 else
4022 value = valreg; 3934 value = valreg;
4023 } 3935 }
4024 } 3936 }
4025 3937
4026 if (ACCUMULATE_OUTGOING_ARGS) 3938 if (ACCUMULATE_OUTGOING_ARGS)
4027 { 3939 {
4028 #ifdef REG_PARM_STACK_SPACE 3940 #ifdef REG_PARM_STACK_SPACE
4029 if (save_area) 3941 if (save_area)
4030 restore_fixed_argument_area (save_area, argblock, 3942 restore_fixed_argument_area (save_area, argblock,
4031 high_to_save, low_to_save); 3943 high_to_save, low_to_save);
4032 #endif 3944 #endif
4033 3945
4034 /* If we saved any argument areas, restore them. */ 3946 /* If we saved any argument areas, restore them. */
4035 for (count = 0; count < nargs; count++) 3947 for (count = 0; count < nargs; count++)
4036 if (argvec[count].save_area) 3948 if (argvec[count].save_area)
4037 { 3949 {
4038 enum machine_mode save_mode = GET_MODE (argvec[count].save_area); 3950 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4039 rtx adr = plus_constant (argblock, 3951 rtx adr = plus_constant (argblock,
4040 argvec[count].locate.offset.constant); 3952 argvec[count].locate.offset.constant);
4041 rtx stack_area = gen_rtx_MEM (save_mode, 3953 rtx stack_area = gen_rtx_MEM (save_mode,
4042 memory_address (save_mode, adr)); 3954 memory_address (save_mode, adr));
4043 3955
4044 if (save_mode == BLKmode) 3956 if (save_mode == BLKmode)
4045 emit_block_move (stack_area, 3957 emit_block_move (stack_area,
4046 validize_mem (argvec[count].save_area), 3958 validize_mem (argvec[count].save_area),
4047 GEN_INT (argvec[count].locate.size.constant), 3959 GEN_INT (argvec[count].locate.size.constant),
4048 BLOCK_OP_CALL_PARM); 3960 BLOCK_OP_CALL_PARM);
4049 else 3961 else
4050 emit_move_insn (stack_area, argvec[count].save_area); 3962 emit_move_insn (stack_area, argvec[count].save_area);
4051 } 3963 }
4052 3964
4053 highest_outgoing_arg_in_use = initial_highest_arg_in_use; 3965 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4054 stack_usage_map = initial_stack_usage_map; 3966 stack_usage_map = initial_stack_usage_map;
4055 } 3967 }
4056 3968
4071 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for 3983 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
4072 other types of library calls. */ 3984 other types of library calls. */
4073 3985
4074 void 3986 void
4075 emit_library_call (rtx orgfun, enum libcall_type fn_type, 3987 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4076 enum machine_mode outmode, int nargs, ...) 3988 enum machine_mode outmode, int nargs, ...)
4077 { 3989 {
4078 va_list p; 3990 va_list p;
4079 3991
4080 va_start (p, nargs); 3992 va_start (p, nargs);
4081 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p); 3993 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4090 This function returns an rtx for where the value is to be found. 4002 This function returns an rtx for where the value is to be found.
4091 If VALUE is nonzero, VALUE is returned. */ 4003 If VALUE is nonzero, VALUE is returned. */
4092 4004
4093 rtx 4005 rtx
4094 emit_library_call_value (rtx orgfun, rtx value, 4006 emit_library_call_value (rtx orgfun, rtx value,
4095 enum libcall_type fn_type, 4007 enum libcall_type fn_type,
4096 enum machine_mode outmode, int nargs, ...) 4008 enum machine_mode outmode, int nargs, ...)
4097 { 4009 {
4098 rtx result; 4010 rtx result;
4099 va_list p; 4011 va_list p;
4100 4012
4101 va_start (p, nargs); 4013 va_start (p, nargs);
4102 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, 4014 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4103 nargs, p); 4015 nargs, p);
4104 va_end (p); 4016 va_end (p);
4105 4017
4106 return result; 4018 return result;
4107 } 4019 }
4108 4020
4125 Return nonzero if this arg should cause sibcall failure, 4037 Return nonzero if this arg should cause sibcall failure,
4126 zero otherwise. */ 4038 zero otherwise. */
4127 4039
4128 static int 4040 static int
4129 store_one_arg (struct arg_data *arg, rtx argblock, int flags, 4041 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4130 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) 4042 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4131 { 4043 {
4132 tree pval = arg->tree_value; 4044 tree pval = arg->tree_value;
4133 rtx reg = 0; 4045 rtx reg = 0;
4134 int partial = 0; 4046 int partial = 0;
4135 int used = 0; 4047 int used = 0;
4144 push_temp_slots (); 4056 push_temp_slots ();
4145 4057
4146 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)) 4058 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4147 { 4059 {
4148 /* If this is being stored into a pre-allocated, fixed-size, stack area, 4060 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4149 save any previous data at that location. */ 4061 save any previous data at that location. */
4150 if (argblock && ! variable_size && arg->stack) 4062 if (argblock && ! variable_size && arg->stack)
4151 { 4063 {
4152 #ifdef ARGS_GROW_DOWNWARD 4064 #ifdef ARGS_GROW_DOWNWARD
4153 /* stack_slot is negative, but we want to index stack_usage_map 4065 /* stack_slot is negative, but we want to index stack_usage_map
4154 with positive values. */ 4066 with positive values. */
4155 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) 4067 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4156 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1; 4068 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4157 else 4069 else
4158 upper_bound = 0; 4070 upper_bound = 0;
4159 4071
4160 lower_bound = upper_bound - arg->locate.size.constant; 4072 lower_bound = upper_bound - arg->locate.size.constant;
4161 #else 4073 #else
4162 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) 4074 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4163 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)); 4075 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4164 else 4076 else
4165 lower_bound = 0; 4077 lower_bound = 0;
4166 4078
4167 upper_bound = lower_bound + arg->locate.size.constant; 4079 upper_bound = lower_bound + arg->locate.size.constant;
4168 #endif 4080 #endif
4169 4081
4170 i = lower_bound; 4082 i = lower_bound;
4171 /* Don't worry about things in the fixed argument area; 4083 /* Don't worry about things in the fixed argument area;
4172 it has already been saved. */ 4084 it has already been saved. */
4173 if (i < reg_parm_stack_space) 4085 if (i < reg_parm_stack_space)
4174 i = reg_parm_stack_space; 4086 i = reg_parm_stack_space;
4175 while (i < upper_bound && stack_usage_map[i] == 0) 4087 while (i < upper_bound && stack_usage_map[i] == 0)
4176 i++; 4088 i++;
4177 4089
4178 if (i < upper_bound) 4090 if (i < upper_bound)
4179 { 4091 {
4180 /* We need to make a save area. */ 4092 /* We need to make a save area. */
4181 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT; 4093 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4182 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1); 4094 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4183 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0)); 4095 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4184 rtx stack_area = gen_rtx_MEM (save_mode, adr); 4096 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4185 4097
4186 if (save_mode == BLKmode) 4098 if (save_mode == BLKmode)
4187 { 4099 {
4188 tree ot = TREE_TYPE (arg->tree_value); 4100 tree ot = TREE_TYPE (arg->tree_value);
4189 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot) 4101 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4190 | TYPE_QUAL_CONST)); 4102 | TYPE_QUAL_CONST));
4191 4103
4192 arg->save_area = assign_temp (nt, 0, 1, 1); 4104 arg->save_area = assign_temp (nt, 0, 1, 1);
4193 preserve_temp_slots (arg->save_area); 4105 preserve_temp_slots (arg->save_area);
4194 emit_block_move (validize_mem (arg->save_area), stack_area, 4106 emit_block_move (validize_mem (arg->save_area), stack_area,
4195 GEN_INT (arg->locate.size.constant), 4107 GEN_INT (arg->locate.size.constant),
4196 BLOCK_OP_CALL_PARM); 4108 BLOCK_OP_CALL_PARM);
4197 } 4109 }
4198 else 4110 else
4199 { 4111 {
4200 arg->save_area = gen_reg_rtx (save_mode); 4112 arg->save_area = gen_reg_rtx (save_mode);
4201 emit_move_insn (arg->save_area, stack_area); 4113 emit_move_insn (arg->save_area, stack_area);
4202 } 4114 }
4203 } 4115 }
4204 } 4116 }
4205 } 4117 }
4206 4118
4207 /* If this isn't going to be placed on both the stack and in registers, 4119 /* If this isn't going to be placed on both the stack and in registers,
4208 set up the register and number of words. */ 4120 set up the register and number of words. */
4209 if (! arg->pass_on_stack) 4121 if (! arg->pass_on_stack)
4210 { 4122 {
4211 if (flags & ECF_SIBCALL) 4123 if (flags & ECF_SIBCALL)
4212 reg = arg->tail_call_reg; 4124 reg = arg->tail_call_reg;
4213 else 4125 else
4214 reg = arg->reg; 4126 reg = arg->reg;
4215 partial = arg->partial; 4127 partial = arg->partial;
4216 } 4128 }
4217 4129
4218 /* Being passed entirely in a register. We shouldn't be called in 4130 /* Being passed entirely in a register. We shouldn't be called in
4219 this case. */ 4131 this case. */
4227 /* If this is being passed partially in a register, we can't evaluate 4139 /* If this is being passed partially in a register, we can't evaluate
4228 it directly into its stack slot. Otherwise, we can. */ 4140 it directly into its stack slot. Otherwise, we can. */
4229 if (arg->value == 0) 4141 if (arg->value == 0)
4230 { 4142 {
4231 /* stack_arg_under_construction is nonzero if a function argument is 4143 /* stack_arg_under_construction is nonzero if a function argument is
4232 being evaluated directly into the outgoing argument list and 4144 being evaluated directly into the outgoing argument list and
4233 expand_call must take special action to preserve the argument list 4145 expand_call must take special action to preserve the argument list
4234 if it is called recursively. 4146 if it is called recursively.
4235 4147
4236 For scalar function arguments stack_usage_map is sufficient to 4148 For scalar function arguments stack_usage_map is sufficient to
4237 determine which stack slots must be saved and restored. Scalar 4149 determine which stack slots must be saved and restored. Scalar
4238 arguments in general have pass_on_stack == 0. 4150 arguments in general have pass_on_stack == 0.
4239 4151
4240 If this argument is initialized by a function which takes the 4152 If this argument is initialized by a function which takes the
4241 address of the argument (a C++ constructor or a C function 4153 address of the argument (a C++ constructor or a C function
4242 returning a BLKmode structure), then stack_usage_map is 4154 returning a BLKmode structure), then stack_usage_map is
4243 insufficient and expand_call must push the stack around the 4155 insufficient and expand_call must push the stack around the
4244 function call. Such arguments have pass_on_stack == 1. 4156 function call. Such arguments have pass_on_stack == 1.
4245 4157
4246 Note that it is always safe to set stack_arg_under_construction, 4158 Note that it is always safe to set stack_arg_under_construction,
4247 but this generates suboptimal code if set when not needed. */ 4159 but this generates suboptimal code if set when not needed. */
4248 4160
4249 if (arg->pass_on_stack) 4161 if (arg->pass_on_stack)
4250 stack_arg_under_construction++; 4162 stack_arg_under_construction++;
4251 4163
4252 arg->value = expand_expr (pval, 4164 arg->value = expand_expr (pval,
4253 (partial 4165 (partial
4254 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) 4166 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4255 ? NULL_RTX : arg->stack, 4167 ? NULL_RTX : arg->stack,
4256 VOIDmode, EXPAND_STACK_PARM); 4168 VOIDmode, EXPAND_STACK_PARM);
4257 4169
4258 /* If we are promoting object (or for any other reason) the mode 4170 /* If we are promoting object (or for any other reason) the mode
4259 doesn't agree, convert the mode. */ 4171 doesn't agree, convert the mode. */
4260 4172
4261 if (arg->mode != TYPE_MODE (TREE_TYPE (pval))) 4173 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4262 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), 4174 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4263 arg->value, arg->unsignedp); 4175 arg->value, arg->unsignedp);
4264 4176
4265 if (arg->pass_on_stack) 4177 if (arg->pass_on_stack)
4266 stack_arg_under_construction--; 4178 stack_arg_under_construction--;
4267 } 4179 }
4268 4180
4269 /* Check for overlap with already clobbered argument area. */ 4181 /* Check for overlap with already clobbered argument area. */
4270 if ((flags & ECF_SIBCALL) 4182 if ((flags & ECF_SIBCALL)
4271 && MEM_P (arg->value) 4183 && MEM_P (arg->value)
4272 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0), 4184 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4273 arg->locate.size.constant)) 4185 arg->locate.size.constant))
4274 sibcall_failure = 1; 4186 sibcall_failure = 1;
4275 4187
4276 /* Don't allow anything left on stack from computation 4188 /* Don't allow anything left on stack from computation
4277 of argument to alloca. */ 4189 of argument to alloca. */
4278 if (flags & ECF_MAY_BE_ALLOCA) 4190 if (flags & ECF_MAY_BE_ALLOCA)
4285 { 4197 {
4286 int size; 4198 int size;
4287 unsigned int parm_align; 4199 unsigned int parm_align;
4288 4200
4289 /* Argument is a scalar, not entirely passed in registers. 4201 /* Argument is a scalar, not entirely passed in registers.
4290 (If part is passed in registers, arg->partial says how much 4202 (If part is passed in registers, arg->partial says how much
4291 and emit_push_insn will take care of putting it there.) 4203 and emit_push_insn will take care of putting it there.)
4292 4204
4293 Push it, and if its size is less than the 4205 Push it, and if its size is less than the
4294 amount of space allocated to it, 4206 amount of space allocated to it,
4295 also bump stack pointer by the additional space. 4207 also bump stack pointer by the additional space.
4296 Note that in C the default argument promotions 4208 Note that in C the default argument promotions
4297 will prevent such mismatches. */ 4209 will prevent such mismatches. */
4298 4210
4299 size = GET_MODE_SIZE (arg->mode); 4211 size = GET_MODE_SIZE (arg->mode);
4300 /* Compute how much space the push instruction will push. 4212 /* Compute how much space the push instruction will push.
4301 On many machines, pushing a byte will advance the stack 4213 On many machines, pushing a byte will advance the stack
4302 pointer by a halfword. */ 4214 pointer by a halfword. */
4303 #ifdef PUSH_ROUNDING 4215 #ifdef PUSH_ROUNDING
4304 size = PUSH_ROUNDING (size); 4216 size = PUSH_ROUNDING (size);
4305 #endif 4217 #endif
4306 used = size; 4218 used = size;
4307 4219
4308 /* Compute how much space the argument should get: 4220 /* Compute how much space the argument should get:
4309 round up to a multiple of the alignment for arguments. */ 4221 round up to a multiple of the alignment for arguments. */
4310 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval))) 4222 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4311 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) 4223 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4312 / (PARM_BOUNDARY / BITS_PER_UNIT)) 4224 / (PARM_BOUNDARY / BITS_PER_UNIT))
4313 * (PARM_BOUNDARY / BITS_PER_UNIT)); 4225 * (PARM_BOUNDARY / BITS_PER_UNIT));
4314 4226
4315 /* Compute the alignment of the pushed argument. */ 4227 /* Compute the alignment of the pushed argument. */
4316 parm_align = arg->locate.boundary; 4228 parm_align = arg->locate.boundary;
4317 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) 4229 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4318 { 4230 {
4319 int pad = used - size; 4231 int pad = used - size;
4320 if (pad) 4232 if (pad)
4321 { 4233 {
4322 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT; 4234 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4323 parm_align = MIN (parm_align, pad_align); 4235 parm_align = MIN (parm_align, pad_align);
4324 } 4236 }
4325 } 4237 }
4326 4238
4327 /* This isn't already where we want it on the stack, so put it there. 4239 /* This isn't already where we want it on the stack, so put it there.
4328 This can either be done with push or copy insns. */ 4240 This can either be done with push or copy insns. */
4329 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 4241 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4330 parm_align, partial, reg, used - size, argblock, 4242 parm_align, partial, reg, used - size, argblock,
4331 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, 4243 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4332 ARGS_SIZE_RTX (arg->locate.alignment_pad)); 4244 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4333 4245
4334 /* Unless this is a partially-in-register argument, the argument is now 4246 /* Unless this is a partially-in-register argument, the argument is now
4335 in the stack. */ 4247 in the stack. */
4336 if (partial == 0) 4248 if (partial == 0)
4337 arg->value = arg->stack; 4249 arg->value = arg->stack;
4338 } 4250 }
4339 else 4251 else
4340 { 4252 {
4341 /* BLKmode, at least partly to be pushed. */ 4253 /* BLKmode, at least partly to be pushed. */
4342 4254
4343 unsigned int parm_align; 4255 unsigned int parm_align;
4344 int excess; 4256 int excess;
4345 rtx size_rtx; 4257 rtx size_rtx;
4346 4258
4347 /* Pushing a nonscalar. 4259 /* Pushing a nonscalar.
4348 If part is passed in registers, PARTIAL says how much 4260 If part is passed in registers, PARTIAL says how much
4349 and emit_push_insn will take care of putting it there. */ 4261 and emit_push_insn will take care of putting it there. */
4350 4262
4351 /* Round its size up to a multiple 4263 /* Round its size up to a multiple
4352 of the allocation unit for arguments. */ 4264 of the allocation unit for arguments. */
4353 4265
4354 if (arg->locate.size.var != 0) 4266 if (arg->locate.size.var != 0)
4355 { 4267 {
4356 excess = 0; 4268 excess = 0;
4357 size_rtx = ARGS_SIZE_RTX (arg->locate.size); 4269 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4358 } 4270 }
4359 else 4271 else
4360 { 4272 {
4361 /* PUSH_ROUNDING has no effect on us, because emit_push_insn 4273 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4362 for BLKmode is careful to avoid it. */ 4274 for BLKmode is careful to avoid it. */
4363 excess = (arg->locate.size.constant 4275 excess = (arg->locate.size.constant
4364 - int_size_in_bytes (TREE_TYPE (pval)) 4276 - int_size_in_bytes (TREE_TYPE (pval))
4365 + partial); 4277 + partial);
4366 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), 4278 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4367 NULL_RTX, TYPE_MODE (sizetype), 4279 NULL_RTX, TYPE_MODE (sizetype),
4368 EXPAND_NORMAL); 4280 EXPAND_NORMAL);
4369 } 4281 }
4370 4282
4371 parm_align = arg->locate.boundary; 4283 parm_align = arg->locate.boundary;
4372 4284
4373 /* When an argument is padded down, the block is aligned to 4285 /* When an argument is padded down, the block is aligned to
4374 PARM_BOUNDARY, but the actual argument isn't. */ 4286 PARM_BOUNDARY, but the actual argument isn't. */
4375 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) 4287 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4376 { 4288 {
4377 if (arg->locate.size.var) 4289 if (arg->locate.size.var)
4378 parm_align = BITS_PER_UNIT; 4290 parm_align = BITS_PER_UNIT;
4379 else if (excess) 4291 else if (excess)
4380 { 4292 {
4381 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT; 4293 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4382 parm_align = MIN (parm_align, excess_align); 4294 parm_align = MIN (parm_align, excess_align);
4383 } 4295 }
4384 } 4296 }
4385 4297
4386 if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) 4298 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4387 { 4299 {
4388 /* emit_push_insn might not work properly if arg->value and 4300 /* emit_push_insn might not work properly if arg->value and
4389 argblock + arg->locate.offset areas overlap. */ 4301 argblock + arg->locate.offset areas overlap. */
4390 rtx x = arg->value; 4302 rtx x = arg->value;
4391 int i = 0; 4303 int i = 0;
4392 4304
4393 if (XEXP (x, 0) == crtl->args.internal_arg_pointer 4305 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4394 || (GET_CODE (XEXP (x, 0)) == PLUS 4306 || (GET_CODE (XEXP (x, 0)) == PLUS
4395 && XEXP (XEXP (x, 0), 0) == 4307 && XEXP (XEXP (x, 0), 0) ==
4396 crtl->args.internal_arg_pointer 4308 crtl->args.internal_arg_pointer
4397 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))) 4309 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4398 { 4310 {
4399 if (XEXP (x, 0) != crtl->args.internal_arg_pointer) 4311 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4400 i = INTVAL (XEXP (XEXP (x, 0), 1)); 4312 i = INTVAL (XEXP (XEXP (x, 0), 1));
4401 4313
4402 /* expand_call should ensure this. */ 4314 /* expand_call should ensure this. */
4403 gcc_assert (!arg->locate.offset.var 4315 gcc_assert (!arg->locate.offset.var
4404 && arg->locate.size.var == 0 4316 && arg->locate.size.var == 0
4405 && CONST_INT_P (size_rtx)); 4317 && CONST_INT_P (size_rtx));
4406 4318
4407 if (arg->locate.offset.constant > i) 4319 if (arg->locate.offset.constant > i)
4408 { 4320 {
4409 if (arg->locate.offset.constant < i + INTVAL (size_rtx)) 4321 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4410 sibcall_failure = 1; 4322 sibcall_failure = 1;
4411 } 4323 }
4412 else if (arg->locate.offset.constant < i) 4324 else if (arg->locate.offset.constant < i)
4413 { 4325 {
4414 /* Use arg->locate.size.constant instead of size_rtx 4326 /* Use arg->locate.size.constant instead of size_rtx
4415 because we only care about the part of the argument 4327 because we only care about the part of the argument
4416 on the stack. */ 4328 on the stack. */
4417 if (i < (arg->locate.offset.constant 4329 if (i < (arg->locate.offset.constant
4418 + arg->locate.size.constant)) 4330 + arg->locate.size.constant))
4419 sibcall_failure = 1; 4331 sibcall_failure = 1;
4420 } 4332 }
4421 else 4333 else
4422 { 4334 {
4423 /* Even though they appear to be at the same location, 4335 /* Even though they appear to be at the same location,
4424 if part of the outgoing argument is in registers, 4336 if part of the outgoing argument is in registers,
4425 they aren't really at the same location. Check for 4337 they aren't really at the same location. Check for
4426 this by making sure that the incoming size is the 4338 this by making sure that the incoming size is the
4427 same as the outgoing size. */ 4339 same as the outgoing size. */
4428 if (arg->locate.size.constant != INTVAL (size_rtx)) 4340 if (arg->locate.size.constant != INTVAL (size_rtx))
4429 sibcall_failure = 1; 4341 sibcall_failure = 1;
4430 } 4342 }
4431 } 4343 }
4432 } 4344 }
4433 4345
4434 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, 4346 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4435 parm_align, partial, reg, excess, argblock, 4347 parm_align, partial, reg, excess, argblock,
4436 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, 4348 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4437 ARGS_SIZE_RTX (arg->locate.alignment_pad)); 4349 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4438 4350
4439 /* Unless this is a partially-in-register argument, the argument is now 4351 /* Unless this is a partially-in-register argument, the argument is now
4440 in the stack. 4352 in the stack.
4441 4353
4442 ??? Unlike the case above, in which we want the actual 4354 ??? Unlike the case above, in which we want the actual
4443 address of the data, so that we can load it directly into a 4355 address of the data, so that we can load it directly into a
4444 register, here we want the address of the stack slot, so that 4356 register, here we want the address of the stack slot, so that
4445 it's properly aligned for word-by-word copying or something 4357 it's properly aligned for word-by-word copying or something
4446 like that. It's not clear that this is always correct. */ 4358 like that. It's not clear that this is always correct. */
4447 if (partial == 0) 4359 if (partial == 0)
4448 arg->value = arg->stack_slot; 4360 arg->value = arg->stack_slot;
4449 } 4361 }
4450 4362
4451 if (arg->reg && GET_CODE (arg->reg) == PARALLEL) 4363 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4452 { 4364 {
4453 tree type = TREE_TYPE (arg->tree_value); 4365 tree type = TREE_TYPE (arg->tree_value);
4454 arg->parallel_value 4366 arg->parallel_value
4455 = emit_group_load_into_temps (arg->reg, arg->value, type, 4367 = emit_group_load_into_temps (arg->reg, arg->value, type,
4456 int_size_in_bytes (type)); 4368 int_size_in_bytes (type));
4457 } 4369 }
4458 4370
4459 /* Mark all slots this store used. */ 4371 /* Mark all slots this store used. */
4460 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL) 4372 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4461 && argblock && ! variable_size && arg->stack) 4373 && argblock && ! variable_size && arg->stack)
4478 4390
4479 /* Nonzero if we do not know how to pass TYPE solely in registers. */ 4391 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4480 4392
4481 bool 4393 bool
4482 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED, 4394 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4483 const_tree type) 4395 const_tree type)
4484 { 4396 {
4485 if (!type) 4397 if (!type)
4486 return false; 4398 return false;
4487 4399
4488 /* If the type has variable size... */ 4400 /* If the type has variable size... */
4519 /* If the padding and mode of the type is such that a copy into 4431 /* If the padding and mode of the type is such that a copy into
4520 a register would put it into the wrong part of the register. */ 4432 a register would put it into the wrong part of the register. */
4521 if (mode == BLKmode 4433 if (mode == BLKmode
4522 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT) 4434 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4523 && (FUNCTION_ARG_PADDING (mode, type) 4435 && (FUNCTION_ARG_PADDING (mode, type)
4524 == (BYTES_BIG_ENDIAN ? upward : downward))) 4436 == (BYTES_BIG_ENDIAN ? upward : downward)))
4525 return true; 4437 return true;
4526 4438
4527 return false; 4439 return false;
4528 } 4440 }