comparison gcc/function.c @ 0:a06113de4d67

first commit
author kent <kent@cr.ie.u-ryukyu.ac.jp>
date Fri, 17 Jul 2009 14:47:48 +0900
parents
children caeb520cebed 58ad6c70ea60
comparison
equal deleted inserted replaced
-1:000000000000 0:a06113de4d67
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62 #include "gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
68
69 /* So we can assign to cfun in this file. */
70 #undef cfun
71
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
75
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
85
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
98 compiler passes. */
99 int current_function_is_leaf;
100
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging;
105
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
110
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
116
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
119
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
123
124 /* The currently compiled function. */
125 struct function *cfun = 0;
126
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
130
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
132 in this function. */
133 static VEC(int,heap) *sibcall_epilogue;
134
135 /* Forward declarations. */
136
137 static struct temp_slot *find_temp_slot_from_address (rtx);
138 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
139 static void pad_below (struct args_size *, enum machine_mode, tree);
140 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
141 static int all_blocks (tree, tree *);
142 static tree *get_block_vector (tree, int *);
143 extern tree debug_find_var_in_block_tree (tree, tree);
144 /* We always define `record_insns' even if it's not used so that we
145 can always export `prologue_epilogue_contains'. */
146 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
147 static int contains (const_rtx, VEC(int,heap) **);
148 #ifdef HAVE_return
149 static void emit_return_into_block (basic_block);
150 #endif
151 static void prepare_function_start (void);
152 static void do_clobber_return_reg (rtx, void *);
153 static void do_use_return_reg (rtx, void *);
154 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
155
156 /* Stack of nested functions. */
157 /* Keep track of the cfun stack. */
158
159 typedef struct function *function_p;
160
161 DEF_VEC_P(function_p);
162 DEF_VEC_ALLOC_P(function_p,heap);
163 static VEC(function_p,heap) *function_context_stack;
164
165 /* Save the current context for compilation of a nested function.
166 This is called from language-specific code. */
167
168 void
169 push_function_context (void)
170 {
171 if (cfun == 0)
172 allocate_struct_function (NULL, false);
173
174 VEC_safe_push (function_p, heap, function_context_stack, cfun);
175 set_cfun (NULL);
176 }
177
178 /* Restore the last saved context, at the end of a nested function.
179 This function is called from language-specific code. */
180
181 void
182 pop_function_context (void)
183 {
184 struct function *p = VEC_pop (function_p, function_context_stack);
185 set_cfun (p);
186 current_function_decl = p->decl;
187
188 /* Reset variables that have known state during rtx generation. */
189 virtuals_instantiated = 0;
190 generating_concat_p = 1;
191 }
192
193 /* Clear out all parts of the state in F that can safely be discarded
194 after the function has been parsed, but not compiled, to let
195 garbage collection reclaim the memory. */
196
197 void
198 free_after_parsing (struct function *f)
199 {
200 f->language = 0;
201 }
202
203 /* Clear out all parts of the state in F that can safely be discarded
204 after the function has been compiled, to let garbage collection
205 reclaim the memory. */
206
207 void
208 free_after_compilation (struct function *f)
209 {
210 VEC_free (int, heap, prologue);
211 VEC_free (int, heap, epilogue);
212 VEC_free (int, heap, sibcall_epilogue);
213 if (crtl->emit.regno_pointer_align)
214 free (crtl->emit.regno_pointer_align);
215
216 memset (crtl, 0, sizeof (struct rtl_data));
217 f->eh = NULL;
218 f->machine = NULL;
219 f->cfg = NULL;
220
221 regno_reg_rtx = NULL;
222 insn_locators_free ();
223 }
224
225 /* Return size needed for stack frame based on slots so far allocated.
226 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
227 the caller may have to do that. */
228
229 HOST_WIDE_INT
230 get_frame_size (void)
231 {
232 if (FRAME_GROWS_DOWNWARD)
233 return -frame_offset;
234 else
235 return frame_offset;
236 }
237
238 /* Issue an error message and return TRUE if frame OFFSET overflows in
239 the signed target pointer arithmetics for function FUNC. Otherwise
240 return FALSE. */
241
242 bool
243 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
244 {
245 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
246
247 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
248 /* Leave room for the fixed part of the frame. */
249 - 64 * UNITS_PER_WORD)
250 {
251 error ("%Jtotal size of local objects too large", func);
252 return TRUE;
253 }
254
255 return FALSE;
256 }
257
258 /* Return stack slot alignment in bits for TYPE and MODE. */
259
260 static unsigned int
261 get_stack_local_alignment (tree type, enum machine_mode mode)
262 {
263 unsigned int alignment;
264
265 if (mode == BLKmode)
266 alignment = BIGGEST_ALIGNMENT;
267 else
268 alignment = GET_MODE_ALIGNMENT (mode);
269
270 /* Allow the frond-end to (possibly) increase the alignment of this
271 stack slot. */
272 if (! type)
273 type = lang_hooks.types.type_for_mode (mode, 0);
274
275 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
276 }
277
278 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
279 with machine mode MODE.
280
281 ALIGN controls the amount of alignment for the address of the slot:
282 0 means according to MODE,
283 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
284 -2 means use BITS_PER_UNIT,
285 positive specifies alignment boundary in bits.
286
287 If REDUCE_ALIGNMENT_OK is true, it is OK to reduce alignment.
288
289 We do not round to stack_boundary here. */
290
291 rtx
292 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
293 int align,
294 bool reduce_alignment_ok ATTRIBUTE_UNUSED)
295 {
296 rtx x, addr;
297 int bigend_correction = 0;
298 unsigned int alignment, alignment_in_bits;
299 int frame_off, frame_alignment, frame_phase;
300
301 if (align == 0)
302 {
303 alignment = get_stack_local_alignment (NULL, mode);
304 alignment /= BITS_PER_UNIT;
305 }
306 else if (align == -1)
307 {
308 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
309 size = CEIL_ROUND (size, alignment);
310 }
311 else if (align == -2)
312 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
313 else
314 alignment = align / BITS_PER_UNIT;
315
316 alignment_in_bits = alignment * BITS_PER_UNIT;
317
318 if (FRAME_GROWS_DOWNWARD)
319 frame_offset -= size;
320
321 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
322 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
323 {
324 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
325 alignment = alignment_in_bits / BITS_PER_UNIT;
326 }
327
328 if (SUPPORTS_STACK_ALIGNMENT)
329 {
330 if (crtl->stack_alignment_estimated < alignment_in_bits)
331 {
332 if (!crtl->stack_realign_processed)
333 crtl->stack_alignment_estimated = alignment_in_bits;
334 else
335 {
336 /* If stack is realigned and stack alignment value
337 hasn't been finalized, it is OK not to increase
338 stack_alignment_estimated. The bigger alignment
339 requirement is recorded in stack_alignment_needed
340 below. */
341 gcc_assert (!crtl->stack_realign_finalized);
342 if (!crtl->stack_realign_needed)
343 {
344 /* It is OK to reduce the alignment as long as the
345 requested size is 0 or the estimated stack
346 alignment >= mode alignment. */
347 gcc_assert (reduce_alignment_ok
348 || size == 0
349 || (crtl->stack_alignment_estimated
350 >= GET_MODE_ALIGNMENT (mode)));
351 alignment_in_bits = crtl->stack_alignment_estimated;
352 alignment = alignment_in_bits / BITS_PER_UNIT;
353 }
354 }
355 }
356 }
357
358 if (crtl->stack_alignment_needed < alignment_in_bits)
359 crtl->stack_alignment_needed = alignment_in_bits;
360 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
361 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
362
363 /* Calculate how many bytes the start of local variables is off from
364 stack alignment. */
365 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
366 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
367 frame_phase = frame_off ? frame_alignment - frame_off : 0;
368
369 /* Round the frame offset to the specified alignment. The default is
370 to always honor requests to align the stack but a port may choose to
371 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
372 if (STACK_ALIGNMENT_NEEDED
373 || mode != BLKmode
374 || size != 0)
375 {
376 /* We must be careful here, since FRAME_OFFSET might be negative and
377 division with a negative dividend isn't as well defined as we might
378 like. So we instead assume that ALIGNMENT is a power of two and
379 use logical operations which are unambiguous. */
380 if (FRAME_GROWS_DOWNWARD)
381 frame_offset
382 = (FLOOR_ROUND (frame_offset - frame_phase,
383 (unsigned HOST_WIDE_INT) alignment)
384 + frame_phase);
385 else
386 frame_offset
387 = (CEIL_ROUND (frame_offset - frame_phase,
388 (unsigned HOST_WIDE_INT) alignment)
389 + frame_phase);
390 }
391
392 /* On a big-endian machine, if we are allocating more space than we will use,
393 use the least significant bytes of those that are allocated. */
394 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
395 bigend_correction = size - GET_MODE_SIZE (mode);
396
397 /* If we have already instantiated virtual registers, return the actual
398 address relative to the frame pointer. */
399 if (virtuals_instantiated)
400 addr = plus_constant (frame_pointer_rtx,
401 trunc_int_for_mode
402 (frame_offset + bigend_correction
403 + STARTING_FRAME_OFFSET, Pmode));
404 else
405 addr = plus_constant (virtual_stack_vars_rtx,
406 trunc_int_for_mode
407 (frame_offset + bigend_correction,
408 Pmode));
409
410 if (!FRAME_GROWS_DOWNWARD)
411 frame_offset += size;
412
413 x = gen_rtx_MEM (mode, addr);
414 set_mem_align (x, alignment_in_bits);
415 MEM_NOTRAP_P (x) = 1;
416
417 stack_slot_list
418 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
419
420 if (frame_offset_overflow (frame_offset, current_function_decl))
421 frame_offset = 0;
422
423 return x;
424 }
425
426 /* Wrap up assign_stack_local_1 with last parameter as false. */
427
428 rtx
429 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
430 {
431 return assign_stack_local_1 (mode, size, align, false);
432 }
433
434
435 /* In order to evaluate some expressions, such as function calls returning
436 structures in memory, we need to temporarily allocate stack locations.
437 We record each allocated temporary in the following structure.
438
439 Associated with each temporary slot is a nesting level. When we pop up
440 one level, all temporaries associated with the previous level are freed.
441 Normally, all temporaries are freed after the execution of the statement
442 in which they were created. However, if we are inside a ({...}) grouping,
443 the result may be in a temporary and hence must be preserved. If the
444 result could be in a temporary, we preserve it if we can determine which
445 one it is in. If we cannot determine which temporary may contain the
446 result, all temporaries are preserved. A temporary is preserved by
447 pretending it was allocated at the previous nesting level.
448
449 Automatic variables are also assigned temporary slots, at the nesting
450 level where they are defined. They are marked a "kept" so that
451 free_temp_slots will not free them. */
452
453 struct temp_slot GTY(())
454 {
455 /* Points to next temporary slot. */
456 struct temp_slot *next;
457 /* Points to previous temporary slot. */
458 struct temp_slot *prev;
459 /* The rtx to used to reference the slot. */
460 rtx slot;
461 /* The size, in units, of the slot. */
462 HOST_WIDE_INT size;
463 /* The type of the object in the slot, or zero if it doesn't correspond
464 to a type. We use this to determine whether a slot can be reused.
465 It can be reused if objects of the type of the new slot will always
466 conflict with objects of the type of the old slot. */
467 tree type;
468 /* The alignment (in bits) of the slot. */
469 unsigned int align;
470 /* Nonzero if this temporary is currently in use. */
471 char in_use;
472 /* Nonzero if this temporary has its address taken. */
473 char addr_taken;
474 /* Nesting level at which this slot is being used. */
475 int level;
476 /* Nonzero if this should survive a call to free_temp_slots. */
477 int keep;
478 /* The offset of the slot from the frame_pointer, including extra space
479 for alignment. This info is for combine_temp_slots. */
480 HOST_WIDE_INT base_offset;
481 /* The size of the slot, including extra space for alignment. This
482 info is for combine_temp_slots. */
483 HOST_WIDE_INT full_size;
484 };
485
486 /* A table of addresses that represent a stack slot. The table is a mapping
487 from address RTXen to a temp slot. */
488 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
489
490 /* Entry for the above hash table. */
491 struct temp_slot_address_entry GTY(())
492 {
493 hashval_t hash;
494 rtx address;
495 struct temp_slot *temp_slot;
496 };
497
498 /* Removes temporary slot TEMP from LIST. */
499
500 static void
501 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
502 {
503 if (temp->next)
504 temp->next->prev = temp->prev;
505 if (temp->prev)
506 temp->prev->next = temp->next;
507 else
508 *list = temp->next;
509
510 temp->prev = temp->next = NULL;
511 }
512
513 /* Inserts temporary slot TEMP to LIST. */
514
515 static void
516 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
517 {
518 temp->next = *list;
519 if (*list)
520 (*list)->prev = temp;
521 temp->prev = NULL;
522 *list = temp;
523 }
524
525 /* Returns the list of used temp slots at LEVEL. */
526
527 static struct temp_slot **
528 temp_slots_at_level (int level)
529 {
530 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
531 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
532
533 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
534 }
535
536 /* Returns the maximal temporary slot level. */
537
538 static int
539 max_slot_level (void)
540 {
541 if (!used_temp_slots)
542 return -1;
543
544 return VEC_length (temp_slot_p, used_temp_slots) - 1;
545 }
546
547 /* Moves temporary slot TEMP to LEVEL. */
548
549 static void
550 move_slot_to_level (struct temp_slot *temp, int level)
551 {
552 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
553 insert_slot_to_list (temp, temp_slots_at_level (level));
554 temp->level = level;
555 }
556
557 /* Make temporary slot TEMP available. */
558
559 static void
560 make_slot_available (struct temp_slot *temp)
561 {
562 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
563 insert_slot_to_list (temp, &avail_temp_slots);
564 temp->in_use = 0;
565 temp->level = -1;
566 }
567
568 /* Compute the hash value for an address -> temp slot mapping.
569 The value is cached on the mapping entry. */
570 static hashval_t
571 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
572 {
573 int do_not_record = 0;
574 return hash_rtx (t->address, GET_MODE (t->address),
575 &do_not_record, NULL, false);
576 }
577
578 /* Return the hash value for an address -> temp slot mapping. */
579 static hashval_t
580 temp_slot_address_hash (const void *p)
581 {
582 const struct temp_slot_address_entry *t;
583 t = (const struct temp_slot_address_entry *) p;
584 return t->hash;
585 }
586
587 /* Compare two address -> temp slot mapping entries. */
588 static int
589 temp_slot_address_eq (const void *p1, const void *p2)
590 {
591 const struct temp_slot_address_entry *t1, *t2;
592 t1 = (const struct temp_slot_address_entry *) p1;
593 t2 = (const struct temp_slot_address_entry *) p2;
594 return exp_equiv_p (t1->address, t2->address, 0, true);
595 }
596
597 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
598 static void
599 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
600 {
601 void **slot;
602 struct temp_slot_address_entry *t = GGC_NEW (struct temp_slot_address_entry);
603 t->address = address;
604 t->temp_slot = temp_slot;
605 t->hash = temp_slot_address_compute_hash (t);
606 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
607 *slot = t;
608 }
609
610 /* Remove an address -> temp slot mapping entry if the temp slot is
611 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
612 static int
613 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
614 {
615 const struct temp_slot_address_entry *t;
616 t = (const struct temp_slot_address_entry *) *slot;
617 if (! t->temp_slot->in_use)
618 *slot = NULL;
619 return 1;
620 }
621
622 /* Remove all mappings of addresses to unused temp slots. */
623 static void
624 remove_unused_temp_slot_addresses (void)
625 {
626 htab_traverse (temp_slot_address_table,
627 remove_unused_temp_slot_addresses_1,
628 NULL);
629 }
630
631 /* Find the temp slot corresponding to the object at address X. */
632
633 static struct temp_slot *
634 find_temp_slot_from_address (rtx x)
635 {
636 struct temp_slot *p;
637 struct temp_slot_address_entry tmp, *t;
638
639 /* First try the easy way:
640 See if X exists in the address -> temp slot mapping. */
641 tmp.address = x;
642 tmp.temp_slot = NULL;
643 tmp.hash = temp_slot_address_compute_hash (&tmp);
644 t = (struct temp_slot_address_entry *)
645 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
646 if (t)
647 return t->temp_slot;
648
649 /* If we have a sum involving a register, see if it points to a temp
650 slot. */
651 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
652 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
653 return p;
654 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
655 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
656 return p;
657
658 /* Last resort: Address is a virtual stack var address. */
659 if (GET_CODE (x) == PLUS
660 && XEXP (x, 0) == virtual_stack_vars_rtx
661 && GET_CODE (XEXP (x, 1)) == CONST_INT)
662 {
663 int i;
664 for (i = max_slot_level (); i >= 0; i--)
665 for (p = *temp_slots_at_level (i); p; p = p->next)
666 {
667 if (INTVAL (XEXP (x, 1)) >= p->base_offset
668 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
669 return p;
670 }
671 }
672
673 return NULL;
674 }
675
676 /* Allocate a temporary stack slot and record it for possible later
677 reuse.
678
679 MODE is the machine mode to be given to the returned rtx.
680
681 SIZE is the size in units of the space required. We do no rounding here
682 since assign_stack_local will do any required rounding.
683
684 KEEP is 1 if this slot is to be retained after a call to
685 free_temp_slots. Automatic variables for a block are allocated
686 with this flag. KEEP values of 2 or 3 were needed respectively
687 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
688 or for SAVE_EXPRs, but they are now unused.
689
690 TYPE is the type that will be used for the stack slot. */
691
692 rtx
693 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
694 int keep, tree type)
695 {
696 unsigned int align;
697 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
698 rtx slot;
699
700 /* If SIZE is -1 it means that somebody tried to allocate a temporary
701 of a variable size. */
702 gcc_assert (size != -1);
703
704 /* These are now unused. */
705 gcc_assert (keep <= 1);
706
707 align = get_stack_local_alignment (type, mode);
708
709 /* Try to find an available, already-allocated temporary of the proper
710 mode which meets the size and alignment requirements. Choose the
711 smallest one with the closest alignment.
712
713 If assign_stack_temp is called outside of the tree->rtl expansion,
714 we cannot reuse the stack slots (that may still refer to
715 VIRTUAL_STACK_VARS_REGNUM). */
716 if (!virtuals_instantiated)
717 {
718 for (p = avail_temp_slots; p; p = p->next)
719 {
720 if (p->align >= align && p->size >= size
721 && GET_MODE (p->slot) == mode
722 && objects_must_conflict_p (p->type, type)
723 && (best_p == 0 || best_p->size > p->size
724 || (best_p->size == p->size && best_p->align > p->align)))
725 {
726 if (p->align == align && p->size == size)
727 {
728 selected = p;
729 cut_slot_from_list (selected, &avail_temp_slots);
730 best_p = 0;
731 break;
732 }
733 best_p = p;
734 }
735 }
736 }
737
738 /* Make our best, if any, the one to use. */
739 if (best_p)
740 {
741 selected = best_p;
742 cut_slot_from_list (selected, &avail_temp_slots);
743
744 /* If there are enough aligned bytes left over, make them into a new
745 temp_slot so that the extra bytes don't get wasted. Do this only
746 for BLKmode slots, so that we can be sure of the alignment. */
747 if (GET_MODE (best_p->slot) == BLKmode)
748 {
749 int alignment = best_p->align / BITS_PER_UNIT;
750 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
751
752 if (best_p->size - rounded_size >= alignment)
753 {
754 p = GGC_NEW (struct temp_slot);
755 p->in_use = p->addr_taken = 0;
756 p->size = best_p->size - rounded_size;
757 p->base_offset = best_p->base_offset + rounded_size;
758 p->full_size = best_p->full_size - rounded_size;
759 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
760 p->align = best_p->align;
761 p->type = best_p->type;
762 insert_slot_to_list (p, &avail_temp_slots);
763
764 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
765 stack_slot_list);
766
767 best_p->size = rounded_size;
768 best_p->full_size = rounded_size;
769 }
770 }
771 }
772
773 /* If we still didn't find one, make a new temporary. */
774 if (selected == 0)
775 {
776 HOST_WIDE_INT frame_offset_old = frame_offset;
777
778 p = GGC_NEW (struct temp_slot);
779
780 /* We are passing an explicit alignment request to assign_stack_local.
781 One side effect of that is assign_stack_local will not round SIZE
782 to ensure the frame offset remains suitably aligned.
783
784 So for requests which depended on the rounding of SIZE, we go ahead
785 and round it now. We also make sure ALIGNMENT is at least
786 BIGGEST_ALIGNMENT. */
787 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
788 p->slot = assign_stack_local (mode,
789 (mode == BLKmode
790 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
791 : size),
792 align);
793
794 p->align = align;
795
796 /* The following slot size computation is necessary because we don't
797 know the actual size of the temporary slot until assign_stack_local
798 has performed all the frame alignment and size rounding for the
799 requested temporary. Note that extra space added for alignment
800 can be either above or below this stack slot depending on which
801 way the frame grows. We include the extra space if and only if it
802 is above this slot. */
803 if (FRAME_GROWS_DOWNWARD)
804 p->size = frame_offset_old - frame_offset;
805 else
806 p->size = size;
807
808 /* Now define the fields used by combine_temp_slots. */
809 if (FRAME_GROWS_DOWNWARD)
810 {
811 p->base_offset = frame_offset;
812 p->full_size = frame_offset_old - frame_offset;
813 }
814 else
815 {
816 p->base_offset = frame_offset_old;
817 p->full_size = frame_offset - frame_offset_old;
818 }
819
820 selected = p;
821 }
822
823 p = selected;
824 p->in_use = 1;
825 p->addr_taken = 0;
826 p->type = type;
827 p->level = temp_slot_level;
828 p->keep = keep;
829
830 pp = temp_slots_at_level (p->level);
831 insert_slot_to_list (p, pp);
832 insert_temp_slot_address (XEXP (p->slot, 0), p);
833
834 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
835 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
836 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
837
838 /* If we know the alias set for the memory that will be used, use
839 it. If there's no TYPE, then we don't know anything about the
840 alias set for the memory. */
841 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
842 set_mem_align (slot, align);
843
844 /* If a type is specified, set the relevant flags. */
845 if (type != 0)
846 {
847 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
848 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
849 || TREE_CODE (type) == COMPLEX_TYPE));
850 }
851 MEM_NOTRAP_P (slot) = 1;
852
853 return slot;
854 }
855
856 /* Allocate a temporary stack slot and record it for possible later
857 reuse. First three arguments are same as in preceding function. */
858
859 rtx
860 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
861 {
862 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
863 }
864
865 /* Assign a temporary.
866 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
867 and so that should be used in error messages. In either case, we
868 allocate of the given type.
869 KEEP is as for assign_stack_temp.
870 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
871 it is 0 if a register is OK.
872 DONT_PROMOTE is 1 if we should not promote values in register
873 to wider modes. */
874
875 rtx
876 assign_temp (tree type_or_decl, int keep, int memory_required,
877 int dont_promote ATTRIBUTE_UNUSED)
878 {
879 tree type, decl;
880 enum machine_mode mode;
881 #ifdef PROMOTE_MODE
882 int unsignedp;
883 #endif
884
885 if (DECL_P (type_or_decl))
886 decl = type_or_decl, type = TREE_TYPE (decl);
887 else
888 decl = NULL, type = type_or_decl;
889
890 mode = TYPE_MODE (type);
891 #ifdef PROMOTE_MODE
892 unsignedp = TYPE_UNSIGNED (type);
893 #endif
894
895 if (mode == BLKmode || memory_required)
896 {
897 HOST_WIDE_INT size = int_size_in_bytes (type);
898 rtx tmp;
899
900 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
901 problems with allocating the stack space. */
902 if (size == 0)
903 size = 1;
904
905 /* Unfortunately, we don't yet know how to allocate variable-sized
906 temporaries. However, sometimes we can find a fixed upper limit on
907 the size, so try that instead. */
908 else if (size == -1)
909 size = max_int_size_in_bytes (type);
910
911 /* The size of the temporary may be too large to fit into an integer. */
912 /* ??? Not sure this should happen except for user silliness, so limit
913 this to things that aren't compiler-generated temporaries. The
914 rest of the time we'll die in assign_stack_temp_for_type. */
915 if (decl && size == -1
916 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
917 {
918 error ("size of variable %q+D is too large", decl);
919 size = 1;
920 }
921
922 tmp = assign_stack_temp_for_type (mode, size, keep, type);
923 return tmp;
924 }
925
926 #ifdef PROMOTE_MODE
927 if (! dont_promote)
928 mode = promote_mode (type, mode, &unsignedp, 0);
929 #endif
930
931 return gen_reg_rtx (mode);
932 }
933
934 /* Combine temporary stack slots which are adjacent on the stack.
935
936 This allows for better use of already allocated stack space. This is only
937 done for BLKmode slots because we can be sure that we won't have alignment
938 problems in this case. */
939
940 static void
941 combine_temp_slots (void)
942 {
943 struct temp_slot *p, *q, *next, *next_q;
944 int num_slots;
945
946 /* We can't combine slots, because the information about which slot
947 is in which alias set will be lost. */
948 if (flag_strict_aliasing)
949 return;
950
951 /* If there are a lot of temp slots, don't do anything unless
952 high levels of optimization. */
953 if (! flag_expensive_optimizations)
954 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
955 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
956 return;
957
958 for (p = avail_temp_slots; p; p = next)
959 {
960 int delete_p = 0;
961
962 next = p->next;
963
964 if (GET_MODE (p->slot) != BLKmode)
965 continue;
966
967 for (q = p->next; q; q = next_q)
968 {
969 int delete_q = 0;
970
971 next_q = q->next;
972
973 if (GET_MODE (q->slot) != BLKmode)
974 continue;
975
976 if (p->base_offset + p->full_size == q->base_offset)
977 {
978 /* Q comes after P; combine Q into P. */
979 p->size += q->size;
980 p->full_size += q->full_size;
981 delete_q = 1;
982 }
983 else if (q->base_offset + q->full_size == p->base_offset)
984 {
985 /* P comes after Q; combine P into Q. */
986 q->size += p->size;
987 q->full_size += p->full_size;
988 delete_p = 1;
989 break;
990 }
991 if (delete_q)
992 cut_slot_from_list (q, &avail_temp_slots);
993 }
994
995 /* Either delete P or advance past it. */
996 if (delete_p)
997 cut_slot_from_list (p, &avail_temp_slots);
998 }
999 }
1000
1001 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1002 slot that previously was known by OLD_RTX. */
1003
1004 void
1005 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1006 {
1007 struct temp_slot *p;
1008
1009 if (rtx_equal_p (old_rtx, new_rtx))
1010 return;
1011
1012 p = find_temp_slot_from_address (old_rtx);
1013
1014 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1015 NEW_RTX is a register, see if one operand of the PLUS is a
1016 temporary location. If so, NEW_RTX points into it. Otherwise,
1017 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1018 in common between them. If so, try a recursive call on those
1019 values. */
1020 if (p == 0)
1021 {
1022 if (GET_CODE (old_rtx) != PLUS)
1023 return;
1024
1025 if (REG_P (new_rtx))
1026 {
1027 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1028 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1029 return;
1030 }
1031 else if (GET_CODE (new_rtx) != PLUS)
1032 return;
1033
1034 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1035 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1036 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1037 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1038 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1039 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1040 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1041 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1042
1043 return;
1044 }
1045
1046 /* Otherwise add an alias for the temp's address. */
1047 insert_temp_slot_address (new_rtx, p);
1048 }
1049
1050 /* If X could be a reference to a temporary slot, mark the fact that its
1051 address was taken. */
1052
1053 void
1054 mark_temp_addr_taken (rtx x)
1055 {
1056 struct temp_slot *p;
1057
1058 if (x == 0)
1059 return;
1060
1061 /* If X is not in memory or is at a constant address, it cannot be in
1062 a temporary slot. */
1063 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1064 return;
1065
1066 p = find_temp_slot_from_address (XEXP (x, 0));
1067 if (p != 0)
1068 p->addr_taken = 1;
1069 }
1070
1071 /* If X could be a reference to a temporary slot, mark that slot as
1072 belonging to the to one level higher than the current level. If X
1073 matched one of our slots, just mark that one. Otherwise, we can't
1074 easily predict which it is, so upgrade all of them. Kept slots
1075 need not be touched.
1076
1077 This is called when an ({...}) construct occurs and a statement
1078 returns a value in memory. */
1079
1080 void
1081 preserve_temp_slots (rtx x)
1082 {
1083 struct temp_slot *p = 0, *next;
1084
1085 /* If there is no result, we still might have some objects whose address
1086 were taken, so we need to make sure they stay around. */
1087 if (x == 0)
1088 {
1089 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1090 {
1091 next = p->next;
1092
1093 if (p->addr_taken)
1094 move_slot_to_level (p, temp_slot_level - 1);
1095 }
1096
1097 return;
1098 }
1099
1100 /* If X is a register that is being used as a pointer, see if we have
1101 a temporary slot we know it points to. To be consistent with
1102 the code below, we really should preserve all non-kept slots
1103 if we can't find a match, but that seems to be much too costly. */
1104 if (REG_P (x) && REG_POINTER (x))
1105 p = find_temp_slot_from_address (x);
1106
1107 /* If X is not in memory or is at a constant address, it cannot be in
1108 a temporary slot, but it can contain something whose address was
1109 taken. */
1110 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1111 {
1112 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1113 {
1114 next = p->next;
1115
1116 if (p->addr_taken)
1117 move_slot_to_level (p, temp_slot_level - 1);
1118 }
1119
1120 return;
1121 }
1122
1123 /* First see if we can find a match. */
1124 if (p == 0)
1125 p = find_temp_slot_from_address (XEXP (x, 0));
1126
1127 if (p != 0)
1128 {
1129 /* Move everything at our level whose address was taken to our new
1130 level in case we used its address. */
1131 struct temp_slot *q;
1132
1133 if (p->level == temp_slot_level)
1134 {
1135 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1136 {
1137 next = q->next;
1138
1139 if (p != q && q->addr_taken)
1140 move_slot_to_level (q, temp_slot_level - 1);
1141 }
1142
1143 move_slot_to_level (p, temp_slot_level - 1);
1144 p->addr_taken = 0;
1145 }
1146 return;
1147 }
1148
1149 /* Otherwise, preserve all non-kept slots at this level. */
1150 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1151 {
1152 next = p->next;
1153
1154 if (!p->keep)
1155 move_slot_to_level (p, temp_slot_level - 1);
1156 }
1157 }
1158
1159 /* Free all temporaries used so far. This is normally called at the
1160 end of generating code for a statement. */
1161
1162 void
1163 free_temp_slots (void)
1164 {
1165 struct temp_slot *p, *next;
1166
1167 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1168 {
1169 next = p->next;
1170
1171 if (!p->keep)
1172 make_slot_available (p);
1173 }
1174
1175 remove_unused_temp_slot_addresses ();
1176 combine_temp_slots ();
1177 }
1178
1179 /* Push deeper into the nesting level for stack temporaries. */
1180
1181 void
1182 push_temp_slots (void)
1183 {
1184 temp_slot_level++;
1185 }
1186
1187 /* Pop a temporary nesting level. All slots in use in the current level
1188 are freed. */
1189
1190 void
1191 pop_temp_slots (void)
1192 {
1193 struct temp_slot *p, *next;
1194
1195 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1196 {
1197 next = p->next;
1198 make_slot_available (p);
1199 }
1200
1201 remove_unused_temp_slot_addresses ();
1202 combine_temp_slots ();
1203
1204 temp_slot_level--;
1205 }
1206
1207 /* Initialize temporary slots. */
1208
1209 void
1210 init_temp_slots (void)
1211 {
1212 /* We have not allocated any temporaries yet. */
1213 avail_temp_slots = 0;
1214 used_temp_slots = 0;
1215 temp_slot_level = 0;
1216
1217 /* Set up the table to map addresses to temp slots. */
1218 if (! temp_slot_address_table)
1219 temp_slot_address_table = htab_create_ggc (32,
1220 temp_slot_address_hash,
1221 temp_slot_address_eq,
1222 NULL);
1223 else
1224 htab_empty (temp_slot_address_table);
1225 }
1226
1227 /* These routines are responsible for converting virtual register references
1228 to the actual hard register references once RTL generation is complete.
1229
1230 The following four variables are used for communication between the
1231 routines. They contain the offsets of the virtual registers from their
1232 respective hard registers. */
1233
1234 static int in_arg_offset;
1235 static int var_offset;
1236 static int dynamic_offset;
1237 static int out_arg_offset;
1238 static int cfa_offset;
1239
1240 /* In most machines, the stack pointer register is equivalent to the bottom
1241 of the stack. */
1242
1243 #ifndef STACK_POINTER_OFFSET
1244 #define STACK_POINTER_OFFSET 0
1245 #endif
1246
1247 /* If not defined, pick an appropriate default for the offset of dynamically
1248 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1249 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1250
1251 #ifndef STACK_DYNAMIC_OFFSET
1252
1253 /* The bottom of the stack points to the actual arguments. If
1254 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1255 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1256 stack space for register parameters is not pushed by the caller, but
1257 rather part of the fixed stack areas and hence not included in
1258 `crtl->outgoing_args_size'. Nevertheless, we must allow
1259 for it when allocating stack dynamic objects. */
1260
1261 #if defined(REG_PARM_STACK_SPACE)
1262 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1263 ((ACCUMULATE_OUTGOING_ARGS \
1264 ? (crtl->outgoing_args_size \
1265 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1266 : REG_PARM_STACK_SPACE (FNDECL))) \
1267 : 0) + (STACK_POINTER_OFFSET))
1268 #else
1269 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1270 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1271 + (STACK_POINTER_OFFSET))
1272 #endif
1273 #endif
1274
1275
1276 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1277 is a virtual register, return the equivalent hard register and set the
1278 offset indirectly through the pointer. Otherwise, return 0. */
1279
1280 static rtx
1281 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1282 {
1283 rtx new_rtx;
1284 HOST_WIDE_INT offset;
1285
1286 if (x == virtual_incoming_args_rtx)
1287 {
1288 if (stack_realign_drap)
1289 {
1290 /* Replace virtual_incoming_args_rtx with internal arg
1291 pointer if DRAP is used to realign stack. */
1292 new_rtx = crtl->args.internal_arg_pointer;
1293 offset = 0;
1294 }
1295 else
1296 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1297 }
1298 else if (x == virtual_stack_vars_rtx)
1299 new_rtx = frame_pointer_rtx, offset = var_offset;
1300 else if (x == virtual_stack_dynamic_rtx)
1301 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1302 else if (x == virtual_outgoing_args_rtx)
1303 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1304 else if (x == virtual_cfa_rtx)
1305 {
1306 #ifdef FRAME_POINTER_CFA_OFFSET
1307 new_rtx = frame_pointer_rtx;
1308 #else
1309 new_rtx = arg_pointer_rtx;
1310 #endif
1311 offset = cfa_offset;
1312 }
1313 else
1314 return NULL_RTX;
1315
1316 *poffset = offset;
1317 return new_rtx;
1318 }
1319
1320 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1321 Instantiate any virtual registers present inside of *LOC. The expression
1322 is simplified, as much as possible, but is not to be considered "valid"
1323 in any sense implied by the target. If any change is made, set CHANGED
1324 to true. */
1325
1326 static int
1327 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1328 {
1329 HOST_WIDE_INT offset;
1330 bool *changed = (bool *) data;
1331 rtx x, new_rtx;
1332
1333 x = *loc;
1334 if (x == 0)
1335 return 0;
1336
1337 switch (GET_CODE (x))
1338 {
1339 case REG:
1340 new_rtx = instantiate_new_reg (x, &offset);
1341 if (new_rtx)
1342 {
1343 *loc = plus_constant (new_rtx, offset);
1344 if (changed)
1345 *changed = true;
1346 }
1347 return -1;
1348
1349 case PLUS:
1350 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1351 if (new_rtx)
1352 {
1353 new_rtx = plus_constant (new_rtx, offset);
1354 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1355 if (changed)
1356 *changed = true;
1357 return -1;
1358 }
1359
1360 /* FIXME -- from old code */
1361 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1362 we can commute the PLUS and SUBREG because pointers into the
1363 frame are well-behaved. */
1364 break;
1365
1366 default:
1367 break;
1368 }
1369
1370 return 0;
1371 }
1372
1373 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1374 matches the predicate for insn CODE operand OPERAND. */
1375
1376 static int
1377 safe_insn_predicate (int code, int operand, rtx x)
1378 {
1379 const struct insn_operand_data *op_data;
1380
1381 if (code < 0)
1382 return true;
1383
1384 op_data = &insn_data[code].operand[operand];
1385 if (op_data->predicate == NULL)
1386 return true;
1387
1388 return op_data->predicate (x, op_data->mode);
1389 }
1390
1391 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1392 registers present inside of insn. The result will be a valid insn. */
1393
1394 static void
1395 instantiate_virtual_regs_in_insn (rtx insn)
1396 {
1397 HOST_WIDE_INT offset;
1398 int insn_code, i;
1399 bool any_change = false;
1400 rtx set, new_rtx, x, seq;
1401
1402 /* There are some special cases to be handled first. */
1403 set = single_set (insn);
1404 if (set)
1405 {
1406 /* We're allowed to assign to a virtual register. This is interpreted
1407 to mean that the underlying register gets assigned the inverse
1408 transformation. This is used, for example, in the handling of
1409 non-local gotos. */
1410 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1411 if (new_rtx)
1412 {
1413 start_sequence ();
1414
1415 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1416 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1417 GEN_INT (-offset));
1418 x = force_operand (x, new_rtx);
1419 if (x != new_rtx)
1420 emit_move_insn (new_rtx, x);
1421
1422 seq = get_insns ();
1423 end_sequence ();
1424
1425 emit_insn_before (seq, insn);
1426 delete_insn (insn);
1427 return;
1428 }
1429
1430 /* Handle a straight copy from a virtual register by generating a
1431 new add insn. The difference between this and falling through
1432 to the generic case is avoiding a new pseudo and eliminating a
1433 move insn in the initial rtl stream. */
1434 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1435 if (new_rtx && offset != 0
1436 && REG_P (SET_DEST (set))
1437 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1438 {
1439 start_sequence ();
1440
1441 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1442 new_rtx, GEN_INT (offset), SET_DEST (set),
1443 1, OPTAB_LIB_WIDEN);
1444 if (x != SET_DEST (set))
1445 emit_move_insn (SET_DEST (set), x);
1446
1447 seq = get_insns ();
1448 end_sequence ();
1449
1450 emit_insn_before (seq, insn);
1451 delete_insn (insn);
1452 return;
1453 }
1454
1455 extract_insn (insn);
1456 insn_code = INSN_CODE (insn);
1457
1458 /* Handle a plus involving a virtual register by determining if the
1459 operands remain valid if they're modified in place. */
1460 if (GET_CODE (SET_SRC (set)) == PLUS
1461 && recog_data.n_operands >= 3
1462 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1463 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1464 && GET_CODE (recog_data.operand[2]) == CONST_INT
1465 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1466 {
1467 offset += INTVAL (recog_data.operand[2]);
1468
1469 /* If the sum is zero, then replace with a plain move. */
1470 if (offset == 0
1471 && REG_P (SET_DEST (set))
1472 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1473 {
1474 start_sequence ();
1475 emit_move_insn (SET_DEST (set), new_rtx);
1476 seq = get_insns ();
1477 end_sequence ();
1478
1479 emit_insn_before (seq, insn);
1480 delete_insn (insn);
1481 return;
1482 }
1483
1484 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1485
1486 /* Using validate_change and apply_change_group here leaves
1487 recog_data in an invalid state. Since we know exactly what
1488 we want to check, do those two by hand. */
1489 if (safe_insn_predicate (insn_code, 1, new_rtx)
1490 && safe_insn_predicate (insn_code, 2, x))
1491 {
1492 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1493 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1494 any_change = true;
1495
1496 /* Fall through into the regular operand fixup loop in
1497 order to take care of operands other than 1 and 2. */
1498 }
1499 }
1500 }
1501 else
1502 {
1503 extract_insn (insn);
1504 insn_code = INSN_CODE (insn);
1505 }
1506
1507 /* In the general case, we expect virtual registers to appear only in
1508 operands, and then only as either bare registers or inside memories. */
1509 for (i = 0; i < recog_data.n_operands; ++i)
1510 {
1511 x = recog_data.operand[i];
1512 switch (GET_CODE (x))
1513 {
1514 case MEM:
1515 {
1516 rtx addr = XEXP (x, 0);
1517 bool changed = false;
1518
1519 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1520 if (!changed)
1521 continue;
1522
1523 start_sequence ();
1524 x = replace_equiv_address (x, addr);
1525 /* It may happen that the address with the virtual reg
1526 was valid (e.g. based on the virtual stack reg, which might
1527 be acceptable to the predicates with all offsets), whereas
1528 the address now isn't anymore, for instance when the address
1529 is still offsetted, but the base reg isn't virtual-stack-reg
1530 anymore. Below we would do a force_reg on the whole operand,
1531 but this insn might actually only accept memory. Hence,
1532 before doing that last resort, try to reload the address into
1533 a register, so this operand stays a MEM. */
1534 if (!safe_insn_predicate (insn_code, i, x))
1535 {
1536 addr = force_reg (GET_MODE (addr), addr);
1537 x = replace_equiv_address (x, addr);
1538 }
1539 seq = get_insns ();
1540 end_sequence ();
1541 if (seq)
1542 emit_insn_before (seq, insn);
1543 }
1544 break;
1545
1546 case REG:
1547 new_rtx = instantiate_new_reg (x, &offset);
1548 if (new_rtx == NULL)
1549 continue;
1550 if (offset == 0)
1551 x = new_rtx;
1552 else
1553 {
1554 start_sequence ();
1555
1556 /* Careful, special mode predicates may have stuff in
1557 insn_data[insn_code].operand[i].mode that isn't useful
1558 to us for computing a new value. */
1559 /* ??? Recognize address_operand and/or "p" constraints
1560 to see if (plus new offset) is a valid before we put
1561 this through expand_simple_binop. */
1562 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1563 GEN_INT (offset), NULL_RTX,
1564 1, OPTAB_LIB_WIDEN);
1565 seq = get_insns ();
1566 end_sequence ();
1567 emit_insn_before (seq, insn);
1568 }
1569 break;
1570
1571 case SUBREG:
1572 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1573 if (new_rtx == NULL)
1574 continue;
1575 if (offset != 0)
1576 {
1577 start_sequence ();
1578 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1579 GEN_INT (offset), NULL_RTX,
1580 1, OPTAB_LIB_WIDEN);
1581 seq = get_insns ();
1582 end_sequence ();
1583 emit_insn_before (seq, insn);
1584 }
1585 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1586 GET_MODE (new_rtx), SUBREG_BYTE (x));
1587 gcc_assert (x);
1588 break;
1589
1590 default:
1591 continue;
1592 }
1593
1594 /* At this point, X contains the new value for the operand.
1595 Validate the new value vs the insn predicate. Note that
1596 asm insns will have insn_code -1 here. */
1597 if (!safe_insn_predicate (insn_code, i, x))
1598 {
1599 start_sequence ();
1600 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1601 seq = get_insns ();
1602 end_sequence ();
1603 if (seq)
1604 emit_insn_before (seq, insn);
1605 }
1606
1607 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1608 any_change = true;
1609 }
1610
1611 if (any_change)
1612 {
1613 /* Propagate operand changes into the duplicates. */
1614 for (i = 0; i < recog_data.n_dups; ++i)
1615 *recog_data.dup_loc[i]
1616 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1617
1618 /* Force re-recognition of the instruction for validation. */
1619 INSN_CODE (insn) = -1;
1620 }
1621
1622 if (asm_noperands (PATTERN (insn)) >= 0)
1623 {
1624 if (!check_asm_operands (PATTERN (insn)))
1625 {
1626 error_for_asm (insn, "impossible constraint in %<asm%>");
1627 delete_insn (insn);
1628 }
1629 }
1630 else
1631 {
1632 if (recog_memoized (insn) < 0)
1633 fatal_insn_not_found (insn);
1634 }
1635 }
1636
1637 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1638 do any instantiation required. */
1639
1640 void
1641 instantiate_decl_rtl (rtx x)
1642 {
1643 rtx addr;
1644
1645 if (x == 0)
1646 return;
1647
1648 /* If this is a CONCAT, recurse for the pieces. */
1649 if (GET_CODE (x) == CONCAT)
1650 {
1651 instantiate_decl_rtl (XEXP (x, 0));
1652 instantiate_decl_rtl (XEXP (x, 1));
1653 return;
1654 }
1655
1656 /* If this is not a MEM, no need to do anything. Similarly if the
1657 address is a constant or a register that is not a virtual register. */
1658 if (!MEM_P (x))
1659 return;
1660
1661 addr = XEXP (x, 0);
1662 if (CONSTANT_P (addr)
1663 || (REG_P (addr)
1664 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1665 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1666 return;
1667
1668 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1669 }
1670
1671 /* Helper for instantiate_decls called via walk_tree: Process all decls
1672 in the given DECL_VALUE_EXPR. */
1673
1674 static tree
1675 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1676 {
1677 tree t = *tp;
1678 if (! EXPR_P (t))
1679 {
1680 *walk_subtrees = 0;
1681 if (DECL_P (t) && DECL_RTL_SET_P (t))
1682 instantiate_decl_rtl (DECL_RTL (t));
1683 }
1684 return NULL;
1685 }
1686
1687 /* Subroutine of instantiate_decls: Process all decls in the given
1688 BLOCK node and all its subblocks. */
1689
1690 static void
1691 instantiate_decls_1 (tree let)
1692 {
1693 tree t;
1694
1695 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1696 {
1697 if (DECL_RTL_SET_P (t))
1698 instantiate_decl_rtl (DECL_RTL (t));
1699 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1700 {
1701 tree v = DECL_VALUE_EXPR (t);
1702 walk_tree (&v, instantiate_expr, NULL, NULL);
1703 }
1704 }
1705
1706 /* Process all subblocks. */
1707 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1708 instantiate_decls_1 (t);
1709 }
1710
1711 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1712 all virtual registers in their DECL_RTL's. */
1713
1714 static void
1715 instantiate_decls (tree fndecl)
1716 {
1717 tree decl, t, next;
1718
1719 /* Process all parameters of the function. */
1720 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1721 {
1722 instantiate_decl_rtl (DECL_RTL (decl));
1723 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1724 if (DECL_HAS_VALUE_EXPR_P (decl))
1725 {
1726 tree v = DECL_VALUE_EXPR (decl);
1727 walk_tree (&v, instantiate_expr, NULL, NULL);
1728 }
1729 }
1730
1731 /* Now process all variables defined in the function or its subblocks. */
1732 instantiate_decls_1 (DECL_INITIAL (fndecl));
1733
1734 t = cfun->local_decls;
1735 cfun->local_decls = NULL_TREE;
1736 for (; t; t = next)
1737 {
1738 next = TREE_CHAIN (t);
1739 decl = TREE_VALUE (t);
1740 if (DECL_RTL_SET_P (decl))
1741 instantiate_decl_rtl (DECL_RTL (decl));
1742 ggc_free (t);
1743 }
1744 }
1745
1746 /* Pass through the INSNS of function FNDECL and convert virtual register
1747 references to hard register references. */
1748
1749 static unsigned int
1750 instantiate_virtual_regs (void)
1751 {
1752 rtx insn;
1753
1754 /* Compute the offsets to use for this function. */
1755 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1756 var_offset = STARTING_FRAME_OFFSET;
1757 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1758 out_arg_offset = STACK_POINTER_OFFSET;
1759 #ifdef FRAME_POINTER_CFA_OFFSET
1760 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1761 #else
1762 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1763 #endif
1764
1765 /* Initialize recognition, indicating that volatile is OK. */
1766 init_recog ();
1767
1768 /* Scan through all the insns, instantiating every virtual register still
1769 present. */
1770 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1771 if (INSN_P (insn))
1772 {
1773 /* These patterns in the instruction stream can never be recognized.
1774 Fortunately, they shouldn't contain virtual registers either. */
1775 if (GET_CODE (PATTERN (insn)) == USE
1776 || GET_CODE (PATTERN (insn)) == CLOBBER
1777 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1778 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1779 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1780 continue;
1781
1782 instantiate_virtual_regs_in_insn (insn);
1783
1784 if (INSN_DELETED_P (insn))
1785 continue;
1786
1787 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1788
1789 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1790 if (GET_CODE (insn) == CALL_INSN)
1791 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1792 instantiate_virtual_regs_in_rtx, NULL);
1793 }
1794
1795 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1796 instantiate_decls (current_function_decl);
1797
1798 targetm.instantiate_decls ();
1799
1800 /* Indicate that, from now on, assign_stack_local should use
1801 frame_pointer_rtx. */
1802 virtuals_instantiated = 1;
1803 return 0;
1804 }
1805
1806 struct rtl_opt_pass pass_instantiate_virtual_regs =
1807 {
1808 {
1809 RTL_PASS,
1810 "vregs", /* name */
1811 NULL, /* gate */
1812 instantiate_virtual_regs, /* execute */
1813 NULL, /* sub */
1814 NULL, /* next */
1815 0, /* static_pass_number */
1816 0, /* tv_id */
1817 0, /* properties_required */
1818 0, /* properties_provided */
1819 0, /* properties_destroyed */
1820 0, /* todo_flags_start */
1821 TODO_dump_func /* todo_flags_finish */
1822 }
1823 };
1824
1825
1826 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1827 This means a type for which function calls must pass an address to the
1828 function or get an address back from the function.
1829 EXP may be a type node or an expression (whose type is tested). */
1830
1831 int
1832 aggregate_value_p (const_tree exp, const_tree fntype)
1833 {
1834 int i, regno, nregs;
1835 rtx reg;
1836
1837 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1838
1839 /* DECL node associated with FNTYPE when relevant, which we might need to
1840 check for by-invisible-reference returns, typically for CALL_EXPR input
1841 EXPressions. */
1842 const_tree fndecl = NULL_TREE;
1843
1844 if (fntype)
1845 switch (TREE_CODE (fntype))
1846 {
1847 case CALL_EXPR:
1848 fndecl = get_callee_fndecl (fntype);
1849 fntype = (fndecl
1850 ? TREE_TYPE (fndecl)
1851 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
1852 break;
1853 case FUNCTION_DECL:
1854 fndecl = fntype;
1855 fntype = TREE_TYPE (fndecl);
1856 break;
1857 case FUNCTION_TYPE:
1858 case METHOD_TYPE:
1859 break;
1860 case IDENTIFIER_NODE:
1861 fntype = 0;
1862 break;
1863 default:
1864 /* We don't expect other rtl types here. */
1865 gcc_unreachable ();
1866 }
1867
1868 if (TREE_CODE (type) == VOID_TYPE)
1869 return 0;
1870
1871 /* If the front end has decided that this needs to be passed by
1872 reference, do so. */
1873 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1874 && DECL_BY_REFERENCE (exp))
1875 return 1;
1876
1877 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1878 called function RESULT_DECL, meaning the function returns in memory by
1879 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1880 on the function type, which used to be the way to request such a return
1881 mechanism but might now be causing troubles at gimplification time if
1882 temporaries with the function type need to be created. */
1883 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1884 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1885 return 1;
1886
1887 if (targetm.calls.return_in_memory (type, fntype))
1888 return 1;
1889 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1890 and thus can't be returned in registers. */
1891 if (TREE_ADDRESSABLE (type))
1892 return 1;
1893 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1894 return 1;
1895 /* Make sure we have suitable call-clobbered regs to return
1896 the value in; if not, we must return it in memory. */
1897 reg = hard_function_value (type, 0, fntype, 0);
1898
1899 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1900 it is OK. */
1901 if (!REG_P (reg))
1902 return 0;
1903
1904 regno = REGNO (reg);
1905 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1906 for (i = 0; i < nregs; i++)
1907 if (! call_used_regs[regno + i])
1908 return 1;
1909 return 0;
1910 }
1911
1912 /* Return true if we should assign DECL a pseudo register; false if it
1913 should live on the local stack. */
1914
1915 bool
1916 use_register_for_decl (const_tree decl)
1917 {
1918 if (!targetm.calls.allocate_stack_slots_for_args())
1919 return true;
1920
1921 /* Honor volatile. */
1922 if (TREE_SIDE_EFFECTS (decl))
1923 return false;
1924
1925 /* Honor addressability. */
1926 if (TREE_ADDRESSABLE (decl))
1927 return false;
1928
1929 /* Only register-like things go in registers. */
1930 if (DECL_MODE (decl) == BLKmode)
1931 return false;
1932
1933 /* If -ffloat-store specified, don't put explicit float variables
1934 into registers. */
1935 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1936 propagates values across these stores, and it probably shouldn't. */
1937 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1938 return false;
1939
1940 /* If we're not interested in tracking debugging information for
1941 this decl, then we can certainly put it in a register. */
1942 if (DECL_IGNORED_P (decl))
1943 return true;
1944
1945 if (optimize)
1946 return true;
1947
1948 if (!DECL_REGISTER (decl))
1949 return false;
1950
1951 switch (TREE_CODE (TREE_TYPE (decl)))
1952 {
1953 case RECORD_TYPE:
1954 case UNION_TYPE:
1955 case QUAL_UNION_TYPE:
1956 /* When not optimizing, disregard register keyword for variables with
1957 types containing methods, otherwise the methods won't be callable
1958 from the debugger. */
1959 if (TYPE_METHODS (TREE_TYPE (decl)))
1960 return false;
1961 break;
1962 default:
1963 break;
1964 }
1965
1966 return true;
1967 }
1968
1969 /* Return true if TYPE should be passed by invisible reference. */
1970
1971 bool
1972 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1973 tree type, bool named_arg)
1974 {
1975 if (type)
1976 {
1977 /* If this type contains non-trivial constructors, then it is
1978 forbidden for the middle-end to create any new copies. */
1979 if (TREE_ADDRESSABLE (type))
1980 return true;
1981
1982 /* GCC post 3.4 passes *all* variable sized types by reference. */
1983 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1984 return true;
1985 }
1986
1987 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1988 }
1989
1990 /* Return true if TYPE, which is passed by reference, should be callee
1991 copied instead of caller copied. */
1992
1993 bool
1994 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1995 tree type, bool named_arg)
1996 {
1997 if (type && TREE_ADDRESSABLE (type))
1998 return false;
1999 return targetm.calls.callee_copies (ca, mode, type, named_arg);
2000 }
2001
2002 /* Structures to communicate between the subroutines of assign_parms.
2003 The first holds data persistent across all parameters, the second
2004 is cleared out for each parameter. */
2005
2006 struct assign_parm_data_all
2007 {
2008 CUMULATIVE_ARGS args_so_far;
2009 struct args_size stack_args_size;
2010 tree function_result_decl;
2011 tree orig_fnargs;
2012 rtx first_conversion_insn;
2013 rtx last_conversion_insn;
2014 HOST_WIDE_INT pretend_args_size;
2015 HOST_WIDE_INT extra_pretend_bytes;
2016 int reg_parm_stack_space;
2017 };
2018
2019 struct assign_parm_data_one
2020 {
2021 tree nominal_type;
2022 tree passed_type;
2023 rtx entry_parm;
2024 rtx stack_parm;
2025 enum machine_mode nominal_mode;
2026 enum machine_mode passed_mode;
2027 enum machine_mode promoted_mode;
2028 struct locate_and_pad_arg_data locate;
2029 int partial;
2030 BOOL_BITFIELD named_arg : 1;
2031 BOOL_BITFIELD passed_pointer : 1;
2032 BOOL_BITFIELD on_stack : 1;
2033 BOOL_BITFIELD loaded_in_reg : 1;
2034 };
2035
2036 /* A subroutine of assign_parms. Initialize ALL. */
2037
2038 static void
2039 assign_parms_initialize_all (struct assign_parm_data_all *all)
2040 {
2041 tree fntype;
2042
2043 memset (all, 0, sizeof (*all));
2044
2045 fntype = TREE_TYPE (current_function_decl);
2046
2047 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2048 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2049 #else
2050 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2051 current_function_decl, -1);
2052 #endif
2053
2054 #ifdef REG_PARM_STACK_SPACE
2055 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2056 #endif
2057 }
2058
2059 /* If ARGS contains entries with complex types, split the entry into two
2060 entries of the component type. Return a new list of substitutions are
2061 needed, else the old list. */
2062
2063 static tree
2064 split_complex_args (tree args)
2065 {
2066 tree p;
2067
2068 /* Before allocating memory, check for the common case of no complex. */
2069 for (p = args; p; p = TREE_CHAIN (p))
2070 {
2071 tree type = TREE_TYPE (p);
2072 if (TREE_CODE (type) == COMPLEX_TYPE
2073 && targetm.calls.split_complex_arg (type))
2074 goto found;
2075 }
2076 return args;
2077
2078 found:
2079 args = copy_list (args);
2080
2081 for (p = args; p; p = TREE_CHAIN (p))
2082 {
2083 tree type = TREE_TYPE (p);
2084 if (TREE_CODE (type) == COMPLEX_TYPE
2085 && targetm.calls.split_complex_arg (type))
2086 {
2087 tree decl;
2088 tree subtype = TREE_TYPE (type);
2089 bool addressable = TREE_ADDRESSABLE (p);
2090
2091 /* Rewrite the PARM_DECL's type with its component. */
2092 TREE_TYPE (p) = subtype;
2093 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2094 DECL_MODE (p) = VOIDmode;
2095 DECL_SIZE (p) = NULL;
2096 DECL_SIZE_UNIT (p) = NULL;
2097 /* If this arg must go in memory, put it in a pseudo here.
2098 We can't allow it to go in memory as per normal parms,
2099 because the usual place might not have the imag part
2100 adjacent to the real part. */
2101 DECL_ARTIFICIAL (p) = addressable;
2102 DECL_IGNORED_P (p) = addressable;
2103 TREE_ADDRESSABLE (p) = 0;
2104 layout_decl (p, 0);
2105
2106 /* Build a second synthetic decl. */
2107 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2108 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2109 DECL_ARTIFICIAL (decl) = addressable;
2110 DECL_IGNORED_P (decl) = addressable;
2111 layout_decl (decl, 0);
2112
2113 /* Splice it in; skip the new decl. */
2114 TREE_CHAIN (decl) = TREE_CHAIN (p);
2115 TREE_CHAIN (p) = decl;
2116 p = decl;
2117 }
2118 }
2119
2120 return args;
2121 }
2122
2123 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2124 the hidden struct return argument, and (abi willing) complex args.
2125 Return the new parameter list. */
2126
2127 static tree
2128 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2129 {
2130 tree fndecl = current_function_decl;
2131 tree fntype = TREE_TYPE (fndecl);
2132 tree fnargs = DECL_ARGUMENTS (fndecl);
2133
2134 /* If struct value address is treated as the first argument, make it so. */
2135 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2136 && ! cfun->returns_pcc_struct
2137 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2138 {
2139 tree type = build_pointer_type (TREE_TYPE (fntype));
2140 tree decl;
2141
2142 decl = build_decl (PARM_DECL, NULL_TREE, type);
2143 DECL_ARG_TYPE (decl) = type;
2144 DECL_ARTIFICIAL (decl) = 1;
2145 DECL_IGNORED_P (decl) = 1;
2146
2147 TREE_CHAIN (decl) = fnargs;
2148 fnargs = decl;
2149 all->function_result_decl = decl;
2150 }
2151
2152 all->orig_fnargs = fnargs;
2153
2154 /* If the target wants to split complex arguments into scalars, do so. */
2155 if (targetm.calls.split_complex_arg)
2156 fnargs = split_complex_args (fnargs);
2157
2158 return fnargs;
2159 }
2160
2161 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2162 data for the parameter. Incorporate ABI specifics such as pass-by-
2163 reference and type promotion. */
2164
2165 static void
2166 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2167 struct assign_parm_data_one *data)
2168 {
2169 tree nominal_type, passed_type;
2170 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2171
2172 memset (data, 0, sizeof (*data));
2173
2174 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2175 if (!cfun->stdarg)
2176 data->named_arg = 1; /* No variadic parms. */
2177 else if (TREE_CHAIN (parm))
2178 data->named_arg = 1; /* Not the last non-variadic parm. */
2179 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2180 data->named_arg = 1; /* Only variadic ones are unnamed. */
2181 else
2182 data->named_arg = 0; /* Treat as variadic. */
2183
2184 nominal_type = TREE_TYPE (parm);
2185 passed_type = DECL_ARG_TYPE (parm);
2186
2187 /* Look out for errors propagating this far. Also, if the parameter's
2188 type is void then its value doesn't matter. */
2189 if (TREE_TYPE (parm) == error_mark_node
2190 /* This can happen after weird syntax errors
2191 or if an enum type is defined among the parms. */
2192 || TREE_CODE (parm) != PARM_DECL
2193 || passed_type == NULL
2194 || VOID_TYPE_P (nominal_type))
2195 {
2196 nominal_type = passed_type = void_type_node;
2197 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2198 goto egress;
2199 }
2200
2201 /* Find mode of arg as it is passed, and mode of arg as it should be
2202 during execution of this function. */
2203 passed_mode = TYPE_MODE (passed_type);
2204 nominal_mode = TYPE_MODE (nominal_type);
2205
2206 /* If the parm is to be passed as a transparent union, use the type of
2207 the first field for the tests below. We have already verified that
2208 the modes are the same. */
2209 if (TREE_CODE (passed_type) == UNION_TYPE
2210 && TYPE_TRANSPARENT_UNION (passed_type))
2211 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2212
2213 /* See if this arg was passed by invisible reference. */
2214 if (pass_by_reference (&all->args_so_far, passed_mode,
2215 passed_type, data->named_arg))
2216 {
2217 passed_type = nominal_type = build_pointer_type (passed_type);
2218 data->passed_pointer = true;
2219 passed_mode = nominal_mode = Pmode;
2220 }
2221
2222 /* Find mode as it is passed by the ABI. */
2223 promoted_mode = passed_mode;
2224 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2225 {
2226 int unsignedp = TYPE_UNSIGNED (passed_type);
2227 promoted_mode = promote_mode (passed_type, promoted_mode,
2228 &unsignedp, 1);
2229 }
2230
2231 egress:
2232 data->nominal_type = nominal_type;
2233 data->passed_type = passed_type;
2234 data->nominal_mode = nominal_mode;
2235 data->passed_mode = passed_mode;
2236 data->promoted_mode = promoted_mode;
2237 }
2238
2239 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2240
2241 static void
2242 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2243 struct assign_parm_data_one *data, bool no_rtl)
2244 {
2245 int varargs_pretend_bytes = 0;
2246
2247 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2248 data->promoted_mode,
2249 data->passed_type,
2250 &varargs_pretend_bytes, no_rtl);
2251
2252 /* If the back-end has requested extra stack space, record how much is
2253 needed. Do not change pretend_args_size otherwise since it may be
2254 nonzero from an earlier partial argument. */
2255 if (varargs_pretend_bytes > 0)
2256 all->pretend_args_size = varargs_pretend_bytes;
2257 }
2258
2259 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2260 the incoming location of the current parameter. */
2261
2262 static void
2263 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2264 struct assign_parm_data_one *data)
2265 {
2266 HOST_WIDE_INT pretend_bytes = 0;
2267 rtx entry_parm;
2268 bool in_regs;
2269
2270 if (data->promoted_mode == VOIDmode)
2271 {
2272 data->entry_parm = data->stack_parm = const0_rtx;
2273 return;
2274 }
2275
2276 #ifdef FUNCTION_INCOMING_ARG
2277 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2278 data->passed_type, data->named_arg);
2279 #else
2280 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2281 data->passed_type, data->named_arg);
2282 #endif
2283
2284 if (entry_parm == 0)
2285 data->promoted_mode = data->passed_mode;
2286
2287 /* Determine parm's home in the stack, in case it arrives in the stack
2288 or we should pretend it did. Compute the stack position and rtx where
2289 the argument arrives and its size.
2290
2291 There is one complexity here: If this was a parameter that would
2292 have been passed in registers, but wasn't only because it is
2293 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2294 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2295 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2296 as it was the previous time. */
2297 in_regs = entry_parm != 0;
2298 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2299 in_regs = true;
2300 #endif
2301 if (!in_regs && !data->named_arg)
2302 {
2303 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2304 {
2305 rtx tem;
2306 #ifdef FUNCTION_INCOMING_ARG
2307 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2308 data->passed_type, true);
2309 #else
2310 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2311 data->passed_type, true);
2312 #endif
2313 in_regs = tem != NULL;
2314 }
2315 }
2316
2317 /* If this parameter was passed both in registers and in the stack, use
2318 the copy on the stack. */
2319 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2320 data->passed_type))
2321 entry_parm = 0;
2322
2323 if (entry_parm)
2324 {
2325 int partial;
2326
2327 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2328 data->promoted_mode,
2329 data->passed_type,
2330 data->named_arg);
2331 data->partial = partial;
2332
2333 /* The caller might already have allocated stack space for the
2334 register parameters. */
2335 if (partial != 0 && all->reg_parm_stack_space == 0)
2336 {
2337 /* Part of this argument is passed in registers and part
2338 is passed on the stack. Ask the prologue code to extend
2339 the stack part so that we can recreate the full value.
2340
2341 PRETEND_BYTES is the size of the registers we need to store.
2342 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2343 stack space that the prologue should allocate.
2344
2345 Internally, gcc assumes that the argument pointer is aligned
2346 to STACK_BOUNDARY bits. This is used both for alignment
2347 optimizations (see init_emit) and to locate arguments that are
2348 aligned to more than PARM_BOUNDARY bits. We must preserve this
2349 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2350 a stack boundary. */
2351
2352 /* We assume at most one partial arg, and it must be the first
2353 argument on the stack. */
2354 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2355
2356 pretend_bytes = partial;
2357 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2358
2359 /* We want to align relative to the actual stack pointer, so
2360 don't include this in the stack size until later. */
2361 all->extra_pretend_bytes = all->pretend_args_size;
2362 }
2363 }
2364
2365 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2366 entry_parm ? data->partial : 0, current_function_decl,
2367 &all->stack_args_size, &data->locate);
2368
2369 /* Update parm_stack_boundary if this parameter is passed in the
2370 stack. */
2371 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2372 crtl->parm_stack_boundary = data->locate.boundary;
2373
2374 /* Adjust offsets to include the pretend args. */
2375 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2376 data->locate.slot_offset.constant += pretend_bytes;
2377 data->locate.offset.constant += pretend_bytes;
2378
2379 data->entry_parm = entry_parm;
2380 }
2381
2382 /* A subroutine of assign_parms. If there is actually space on the stack
2383 for this parm, count it in stack_args_size and return true. */
2384
2385 static bool
2386 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2387 struct assign_parm_data_one *data)
2388 {
2389 /* Trivially true if we've no incoming register. */
2390 if (data->entry_parm == NULL)
2391 ;
2392 /* Also true if we're partially in registers and partially not,
2393 since we've arranged to drop the entire argument on the stack. */
2394 else if (data->partial != 0)
2395 ;
2396 /* Also true if the target says that it's passed in both registers
2397 and on the stack. */
2398 else if (GET_CODE (data->entry_parm) == PARALLEL
2399 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2400 ;
2401 /* Also true if the target says that there's stack allocated for
2402 all register parameters. */
2403 else if (all->reg_parm_stack_space > 0)
2404 ;
2405 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2406 else
2407 return false;
2408
2409 all->stack_args_size.constant += data->locate.size.constant;
2410 if (data->locate.size.var)
2411 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2412
2413 return true;
2414 }
2415
2416 /* A subroutine of assign_parms. Given that this parameter is allocated
2417 stack space by the ABI, find it. */
2418
2419 static void
2420 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2421 {
2422 rtx offset_rtx, stack_parm;
2423 unsigned int align, boundary;
2424
2425 /* If we're passing this arg using a reg, make its stack home the
2426 aligned stack slot. */
2427 if (data->entry_parm)
2428 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2429 else
2430 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2431
2432 stack_parm = crtl->args.internal_arg_pointer;
2433 if (offset_rtx != const0_rtx)
2434 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2435 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2436
2437 set_mem_attributes (stack_parm, parm, 1);
2438 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2439 while promoted mode's size is needed. */
2440 if (data->promoted_mode != BLKmode
2441 && data->promoted_mode != DECL_MODE (parm))
2442 {
2443 set_mem_size (stack_parm, GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
2444 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
2445 {
2446 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2447 data->promoted_mode);
2448 if (offset)
2449 set_mem_offset (stack_parm,
2450 plus_constant (MEM_OFFSET (stack_parm), -offset));
2451 }
2452 }
2453
2454 boundary = data->locate.boundary;
2455 align = BITS_PER_UNIT;
2456
2457 /* If we're padding upward, we know that the alignment of the slot
2458 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2459 intentionally forcing upward padding. Otherwise we have to come
2460 up with a guess at the alignment based on OFFSET_RTX. */
2461 if (data->locate.where_pad != downward || data->entry_parm)
2462 align = boundary;
2463 else if (GET_CODE (offset_rtx) == CONST_INT)
2464 {
2465 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2466 align = align & -align;
2467 }
2468 set_mem_align (stack_parm, align);
2469
2470 if (data->entry_parm)
2471 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2472
2473 data->stack_parm = stack_parm;
2474 }
2475
2476 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2477 always valid and contiguous. */
2478
2479 static void
2480 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2481 {
2482 rtx entry_parm = data->entry_parm;
2483 rtx stack_parm = data->stack_parm;
2484
2485 /* If this parm was passed part in regs and part in memory, pretend it
2486 arrived entirely in memory by pushing the register-part onto the stack.
2487 In the special case of a DImode or DFmode that is split, we could put
2488 it together in a pseudoreg directly, but for now that's not worth
2489 bothering with. */
2490 if (data->partial != 0)
2491 {
2492 /* Handle calls that pass values in multiple non-contiguous
2493 locations. The Irix 6 ABI has examples of this. */
2494 if (GET_CODE (entry_parm) == PARALLEL)
2495 emit_group_store (validize_mem (stack_parm), entry_parm,
2496 data->passed_type,
2497 int_size_in_bytes (data->passed_type));
2498 else
2499 {
2500 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2501 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2502 data->partial / UNITS_PER_WORD);
2503 }
2504
2505 entry_parm = stack_parm;
2506 }
2507
2508 /* If we didn't decide this parm came in a register, by default it came
2509 on the stack. */
2510 else if (entry_parm == NULL)
2511 entry_parm = stack_parm;
2512
2513 /* When an argument is passed in multiple locations, we can't make use
2514 of this information, but we can save some copying if the whole argument
2515 is passed in a single register. */
2516 else if (GET_CODE (entry_parm) == PARALLEL
2517 && data->nominal_mode != BLKmode
2518 && data->passed_mode != BLKmode)
2519 {
2520 size_t i, len = XVECLEN (entry_parm, 0);
2521
2522 for (i = 0; i < len; i++)
2523 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2524 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2525 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2526 == data->passed_mode)
2527 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2528 {
2529 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2530 break;
2531 }
2532 }
2533
2534 data->entry_parm = entry_parm;
2535 }
2536
2537 /* A subroutine of assign_parms. Reconstitute any values which were
2538 passed in multiple registers and would fit in a single register. */
2539
2540 static void
2541 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2542 {
2543 rtx entry_parm = data->entry_parm;
2544
2545 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2546 This can be done with register operations rather than on the
2547 stack, even if we will store the reconstituted parameter on the
2548 stack later. */
2549 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2550 {
2551 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2552 emit_group_store (parmreg, entry_parm, data->passed_type,
2553 GET_MODE_SIZE (GET_MODE (entry_parm)));
2554 entry_parm = parmreg;
2555 }
2556
2557 data->entry_parm = entry_parm;
2558 }
2559
2560 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2561 always valid and properly aligned. */
2562
2563 static void
2564 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2565 {
2566 rtx stack_parm = data->stack_parm;
2567
2568 /* If we can't trust the parm stack slot to be aligned enough for its
2569 ultimate type, don't use that slot after entry. We'll make another
2570 stack slot, if we need one. */
2571 if (stack_parm
2572 && ((STRICT_ALIGNMENT
2573 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2574 || (data->nominal_type
2575 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2576 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2577 stack_parm = NULL;
2578
2579 /* If parm was passed in memory, and we need to convert it on entry,
2580 don't store it back in that same slot. */
2581 else if (data->entry_parm == stack_parm
2582 && data->nominal_mode != BLKmode
2583 && data->nominal_mode != data->passed_mode)
2584 stack_parm = NULL;
2585
2586 /* If stack protection is in effect for this function, don't leave any
2587 pointers in their passed stack slots. */
2588 else if (crtl->stack_protect_guard
2589 && (flag_stack_protect == 2
2590 || data->passed_pointer
2591 || POINTER_TYPE_P (data->nominal_type)))
2592 stack_parm = NULL;
2593
2594 data->stack_parm = stack_parm;
2595 }
2596
2597 /* A subroutine of assign_parms. Return true if the current parameter
2598 should be stored as a BLKmode in the current frame. */
2599
2600 static bool
2601 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2602 {
2603 if (data->nominal_mode == BLKmode)
2604 return true;
2605 if (GET_MODE (data->entry_parm) == BLKmode)
2606 return true;
2607
2608 #ifdef BLOCK_REG_PADDING
2609 /* Only assign_parm_setup_block knows how to deal with register arguments
2610 that are padded at the least significant end. */
2611 if (REG_P (data->entry_parm)
2612 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2613 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2614 == (BYTES_BIG_ENDIAN ? upward : downward)))
2615 return true;
2616 #endif
2617
2618 return false;
2619 }
2620
2621 /* A subroutine of assign_parms. Arrange for the parameter to be
2622 present and valid in DATA->STACK_RTL. */
2623
2624 static void
2625 assign_parm_setup_block (struct assign_parm_data_all *all,
2626 tree parm, struct assign_parm_data_one *data)
2627 {
2628 rtx entry_parm = data->entry_parm;
2629 rtx stack_parm = data->stack_parm;
2630 HOST_WIDE_INT size;
2631 HOST_WIDE_INT size_stored;
2632
2633 if (GET_CODE (entry_parm) == PARALLEL)
2634 entry_parm = emit_group_move_into_temps (entry_parm);
2635
2636 size = int_size_in_bytes (data->passed_type);
2637 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2638 if (stack_parm == 0)
2639 {
2640 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2641 stack_parm = assign_stack_local (BLKmode, size_stored,
2642 DECL_ALIGN (parm));
2643 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2644 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2645 set_mem_attributes (stack_parm, parm, 1);
2646 }
2647
2648 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2649 calls that pass values in multiple non-contiguous locations. */
2650 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2651 {
2652 rtx mem;
2653
2654 /* Note that we will be storing an integral number of words.
2655 So we have to be careful to ensure that we allocate an
2656 integral number of words. We do this above when we call
2657 assign_stack_local if space was not allocated in the argument
2658 list. If it was, this will not work if PARM_BOUNDARY is not
2659 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2660 if it becomes a problem. Exception is when BLKmode arrives
2661 with arguments not conforming to word_mode. */
2662
2663 if (data->stack_parm == 0)
2664 ;
2665 else if (GET_CODE (entry_parm) == PARALLEL)
2666 ;
2667 else
2668 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2669
2670 mem = validize_mem (stack_parm);
2671
2672 /* Handle values in multiple non-contiguous locations. */
2673 if (GET_CODE (entry_parm) == PARALLEL)
2674 {
2675 push_to_sequence2 (all->first_conversion_insn,
2676 all->last_conversion_insn);
2677 emit_group_store (mem, entry_parm, data->passed_type, size);
2678 all->first_conversion_insn = get_insns ();
2679 all->last_conversion_insn = get_last_insn ();
2680 end_sequence ();
2681 }
2682
2683 else if (size == 0)
2684 ;
2685
2686 /* If SIZE is that of a mode no bigger than a word, just use
2687 that mode's store operation. */
2688 else if (size <= UNITS_PER_WORD)
2689 {
2690 enum machine_mode mode
2691 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2692
2693 if (mode != BLKmode
2694 #ifdef BLOCK_REG_PADDING
2695 && (size == UNITS_PER_WORD
2696 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2697 != (BYTES_BIG_ENDIAN ? upward : downward)))
2698 #endif
2699 )
2700 {
2701 rtx reg;
2702
2703 /* We are really truncating a word_mode value containing
2704 SIZE bytes into a value of mode MODE. If such an
2705 operation requires no actual instructions, we can refer
2706 to the value directly in mode MODE, otherwise we must
2707 start with the register in word_mode and explicitly
2708 convert it. */
2709 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2710 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2711 else
2712 {
2713 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2714 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2715 }
2716 emit_move_insn (change_address (mem, mode, 0), reg);
2717 }
2718
2719 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2720 machine must be aligned to the left before storing
2721 to memory. Note that the previous test doesn't
2722 handle all cases (e.g. SIZE == 3). */
2723 else if (size != UNITS_PER_WORD
2724 #ifdef BLOCK_REG_PADDING
2725 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2726 == downward)
2727 #else
2728 && BYTES_BIG_ENDIAN
2729 #endif
2730 )
2731 {
2732 rtx tem, x;
2733 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2734 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2735
2736 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2737 build_int_cst (NULL_TREE, by),
2738 NULL_RTX, 1);
2739 tem = change_address (mem, word_mode, 0);
2740 emit_move_insn (tem, x);
2741 }
2742 else
2743 move_block_from_reg (REGNO (entry_parm), mem,
2744 size_stored / UNITS_PER_WORD);
2745 }
2746 else
2747 move_block_from_reg (REGNO (entry_parm), mem,
2748 size_stored / UNITS_PER_WORD);
2749 }
2750 else if (data->stack_parm == 0)
2751 {
2752 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2753 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2754 BLOCK_OP_NORMAL);
2755 all->first_conversion_insn = get_insns ();
2756 all->last_conversion_insn = get_last_insn ();
2757 end_sequence ();
2758 }
2759
2760 data->stack_parm = stack_parm;
2761 SET_DECL_RTL (parm, stack_parm);
2762 }
2763
2764 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2765 parameter. Get it there. Perform all ABI specified conversions. */
2766
2767 static void
2768 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2769 struct assign_parm_data_one *data)
2770 {
2771 rtx parmreg;
2772 enum machine_mode promoted_nominal_mode;
2773 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2774 bool did_conversion = false;
2775
2776 /* Store the parm in a pseudoregister during the function, but we may
2777 need to do it in a wider mode. */
2778
2779 /* This is not really promoting for a call. However we need to be
2780 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2781 promoted_nominal_mode
2782 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2783
2784 parmreg = gen_reg_rtx (promoted_nominal_mode);
2785
2786 if (!DECL_ARTIFICIAL (parm))
2787 mark_user_reg (parmreg);
2788
2789 /* If this was an item that we received a pointer to,
2790 set DECL_RTL appropriately. */
2791 if (data->passed_pointer)
2792 {
2793 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2794 set_mem_attributes (x, parm, 1);
2795 SET_DECL_RTL (parm, x);
2796 }
2797 else
2798 SET_DECL_RTL (parm, parmreg);
2799
2800 assign_parm_remove_parallels (data);
2801
2802 /* Copy the value into the register. */
2803 if (data->nominal_mode != data->passed_mode
2804 || promoted_nominal_mode != data->promoted_mode)
2805 {
2806 int save_tree_used;
2807
2808 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2809 mode, by the caller. We now have to convert it to
2810 NOMINAL_MODE, if different. However, PARMREG may be in
2811 a different mode than NOMINAL_MODE if it is being stored
2812 promoted.
2813
2814 If ENTRY_PARM is a hard register, it might be in a register
2815 not valid for operating in its mode (e.g., an odd-numbered
2816 register for a DFmode). In that case, moves are the only
2817 thing valid, so we can't do a convert from there. This
2818 occurs when the calling sequence allow such misaligned
2819 usages.
2820
2821 In addition, the conversion may involve a call, which could
2822 clobber parameters which haven't been copied to pseudo
2823 registers yet. Therefore, we must first copy the parm to
2824 a pseudo reg here, and save the conversion until after all
2825 parameters have been moved. */
2826
2827 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2828
2829 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2830
2831 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2832 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2833
2834 if (GET_CODE (tempreg) == SUBREG
2835 && GET_MODE (tempreg) == data->nominal_mode
2836 && REG_P (SUBREG_REG (tempreg))
2837 && data->nominal_mode == data->passed_mode
2838 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2839 && GET_MODE_SIZE (GET_MODE (tempreg))
2840 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2841 {
2842 /* The argument is already sign/zero extended, so note it
2843 into the subreg. */
2844 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2845 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2846 }
2847
2848 /* TREE_USED gets set erroneously during expand_assignment. */
2849 save_tree_used = TREE_USED (parm);
2850 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2851 TREE_USED (parm) = save_tree_used;
2852 all->first_conversion_insn = get_insns ();
2853 all->last_conversion_insn = get_last_insn ();
2854 end_sequence ();
2855
2856 did_conversion = true;
2857 }
2858 else
2859 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2860
2861 /* If we were passed a pointer but the actual value can safely live
2862 in a register, put it in one. */
2863 if (data->passed_pointer
2864 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2865 /* If by-reference argument was promoted, demote it. */
2866 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2867 || use_register_for_decl (parm)))
2868 {
2869 /* We can't use nominal_mode, because it will have been set to
2870 Pmode above. We must use the actual mode of the parm. */
2871 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2872 mark_user_reg (parmreg);
2873
2874 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2875 {
2876 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2877 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2878
2879 push_to_sequence2 (all->first_conversion_insn,
2880 all->last_conversion_insn);
2881 emit_move_insn (tempreg, DECL_RTL (parm));
2882 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2883 emit_move_insn (parmreg, tempreg);
2884 all->first_conversion_insn = get_insns ();
2885 all->last_conversion_insn = get_last_insn ();
2886 end_sequence ();
2887
2888 did_conversion = true;
2889 }
2890 else
2891 emit_move_insn (parmreg, DECL_RTL (parm));
2892
2893 SET_DECL_RTL (parm, parmreg);
2894
2895 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2896 now the parm. */
2897 data->stack_parm = NULL;
2898 }
2899
2900 /* Mark the register as eliminable if we did no conversion and it was
2901 copied from memory at a fixed offset, and the arg pointer was not
2902 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2903 offset formed an invalid address, such memory-equivalences as we
2904 make here would screw up life analysis for it. */
2905 if (data->nominal_mode == data->passed_mode
2906 && !did_conversion
2907 && data->stack_parm != 0
2908 && MEM_P (data->stack_parm)
2909 && data->locate.offset.var == 0
2910 && reg_mentioned_p (virtual_incoming_args_rtx,
2911 XEXP (data->stack_parm, 0)))
2912 {
2913 rtx linsn = get_last_insn ();
2914 rtx sinsn, set;
2915
2916 /* Mark complex types separately. */
2917 if (GET_CODE (parmreg) == CONCAT)
2918 {
2919 enum machine_mode submode
2920 = GET_MODE_INNER (GET_MODE (parmreg));
2921 int regnor = REGNO (XEXP (parmreg, 0));
2922 int regnoi = REGNO (XEXP (parmreg, 1));
2923 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2924 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2925 GET_MODE_SIZE (submode));
2926
2927 /* Scan backwards for the set of the real and
2928 imaginary parts. */
2929 for (sinsn = linsn; sinsn != 0;
2930 sinsn = prev_nonnote_insn (sinsn))
2931 {
2932 set = single_set (sinsn);
2933 if (set == 0)
2934 continue;
2935
2936 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2937 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2938 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2939 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2940 }
2941 }
2942 else if ((set = single_set (linsn)) != 0
2943 && SET_DEST (set) == parmreg)
2944 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2945 }
2946
2947 /* For pointer data type, suggest pointer register. */
2948 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2949 mark_reg_pointer (parmreg,
2950 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2951 }
2952
2953 /* A subroutine of assign_parms. Allocate stack space to hold the current
2954 parameter. Get it there. Perform all ABI specified conversions. */
2955
2956 static void
2957 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2958 struct assign_parm_data_one *data)
2959 {
2960 /* Value must be stored in the stack slot STACK_PARM during function
2961 execution. */
2962 bool to_conversion = false;
2963
2964 assign_parm_remove_parallels (data);
2965
2966 if (data->promoted_mode != data->nominal_mode)
2967 {
2968 /* Conversion is required. */
2969 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2970
2971 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2972
2973 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2974 to_conversion = true;
2975
2976 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2977 TYPE_UNSIGNED (TREE_TYPE (parm)));
2978
2979 if (data->stack_parm)
2980 /* ??? This may need a big-endian conversion on sparc64. */
2981 data->stack_parm
2982 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2983 }
2984
2985 if (data->entry_parm != data->stack_parm)
2986 {
2987 rtx src, dest;
2988
2989 if (data->stack_parm == 0)
2990 {
2991 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
2992 GET_MODE (data->entry_parm),
2993 TYPE_ALIGN (data->passed_type));
2994 data->stack_parm
2995 = assign_stack_local (GET_MODE (data->entry_parm),
2996 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2997 align);
2998 set_mem_attributes (data->stack_parm, parm, 1);
2999 }
3000
3001 dest = validize_mem (data->stack_parm);
3002 src = validize_mem (data->entry_parm);
3003
3004 if (MEM_P (src))
3005 {
3006 /* Use a block move to handle potentially misaligned entry_parm. */
3007 if (!to_conversion)
3008 push_to_sequence2 (all->first_conversion_insn,
3009 all->last_conversion_insn);
3010 to_conversion = true;
3011
3012 emit_block_move (dest, src,
3013 GEN_INT (int_size_in_bytes (data->passed_type)),
3014 BLOCK_OP_NORMAL);
3015 }
3016 else
3017 emit_move_insn (dest, src);
3018 }
3019
3020 if (to_conversion)
3021 {
3022 all->first_conversion_insn = get_insns ();
3023 all->last_conversion_insn = get_last_insn ();
3024 end_sequence ();
3025 }
3026
3027 SET_DECL_RTL (parm, data->stack_parm);
3028 }
3029
3030 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3031 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3032
3033 static void
3034 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
3035 {
3036 tree parm;
3037 tree orig_fnargs = all->orig_fnargs;
3038
3039 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
3040 {
3041 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3042 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3043 {
3044 rtx tmp, real, imag;
3045 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3046
3047 real = DECL_RTL (fnargs);
3048 imag = DECL_RTL (TREE_CHAIN (fnargs));
3049 if (inner != GET_MODE (real))
3050 {
3051 real = gen_lowpart_SUBREG (inner, real);
3052 imag = gen_lowpart_SUBREG (inner, imag);
3053 }
3054
3055 if (TREE_ADDRESSABLE (parm))
3056 {
3057 rtx rmem, imem;
3058 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3059 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3060 DECL_MODE (parm),
3061 TYPE_ALIGN (TREE_TYPE (parm)));
3062
3063 /* split_complex_arg put the real and imag parts in
3064 pseudos. Move them to memory. */
3065 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3066 set_mem_attributes (tmp, parm, 1);
3067 rmem = adjust_address_nv (tmp, inner, 0);
3068 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3069 push_to_sequence2 (all->first_conversion_insn,
3070 all->last_conversion_insn);
3071 emit_move_insn (rmem, real);
3072 emit_move_insn (imem, imag);
3073 all->first_conversion_insn = get_insns ();
3074 all->last_conversion_insn = get_last_insn ();
3075 end_sequence ();
3076 }
3077 else
3078 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3079 SET_DECL_RTL (parm, tmp);
3080
3081 real = DECL_INCOMING_RTL (fnargs);
3082 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
3083 if (inner != GET_MODE (real))
3084 {
3085 real = gen_lowpart_SUBREG (inner, real);
3086 imag = gen_lowpart_SUBREG (inner, imag);
3087 }
3088 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3089 set_decl_incoming_rtl (parm, tmp, false);
3090 fnargs = TREE_CHAIN (fnargs);
3091 }
3092 else
3093 {
3094 SET_DECL_RTL (parm, DECL_RTL (fnargs));
3095 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
3096
3097 /* Set MEM_EXPR to the original decl, i.e. to PARM,
3098 instead of the copy of decl, i.e. FNARGS. */
3099 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
3100 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
3101 }
3102
3103 fnargs = TREE_CHAIN (fnargs);
3104 }
3105 }
3106
3107 /* Assign RTL expressions to the function's parameters. This may involve
3108 copying them into registers and using those registers as the DECL_RTL. */
3109
3110 static void
3111 assign_parms (tree fndecl)
3112 {
3113 struct assign_parm_data_all all;
3114 tree fnargs, parm;
3115
3116 crtl->args.internal_arg_pointer
3117 = targetm.calls.internal_arg_pointer ();
3118
3119 assign_parms_initialize_all (&all);
3120 fnargs = assign_parms_augmented_arg_list (&all);
3121
3122 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3123 {
3124 struct assign_parm_data_one data;
3125
3126 /* Extract the type of PARM; adjust it according to ABI. */
3127 assign_parm_find_data_types (&all, parm, &data);
3128
3129 /* Early out for errors and void parameters. */
3130 if (data.passed_mode == VOIDmode)
3131 {
3132 SET_DECL_RTL (parm, const0_rtx);
3133 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3134 continue;
3135 }
3136
3137 /* Estimate stack alignment from parameter alignment. */
3138 if (SUPPORTS_STACK_ALIGNMENT)
3139 {
3140 unsigned int align = FUNCTION_ARG_BOUNDARY (data.promoted_mode,
3141 data.passed_type);
3142 if (TYPE_ALIGN (data.nominal_type) > align)
3143 align = TYPE_ALIGN (data.passed_type);
3144 if (crtl->stack_alignment_estimated < align)
3145 {
3146 gcc_assert (!crtl->stack_realign_processed);
3147 crtl->stack_alignment_estimated = align;
3148 }
3149 }
3150
3151 if (cfun->stdarg && !TREE_CHAIN (parm))
3152 assign_parms_setup_varargs (&all, &data, false);
3153
3154 /* Find out where the parameter arrives in this function. */
3155 assign_parm_find_entry_rtl (&all, &data);
3156
3157 /* Find out where stack space for this parameter might be. */
3158 if (assign_parm_is_stack_parm (&all, &data))
3159 {
3160 assign_parm_find_stack_rtl (parm, &data);
3161 assign_parm_adjust_entry_rtl (&data);
3162 }
3163
3164 /* Record permanently how this parm was passed. */
3165 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
3166
3167 /* Update info on where next arg arrives in registers. */
3168 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3169 data.passed_type, data.named_arg);
3170
3171 assign_parm_adjust_stack_rtl (&data);
3172
3173 if (assign_parm_setup_block_p (&data))
3174 assign_parm_setup_block (&all, parm, &data);
3175 else if (data.passed_pointer || use_register_for_decl (parm))
3176 assign_parm_setup_reg (&all, parm, &data);
3177 else
3178 assign_parm_setup_stack (&all, parm, &data);
3179 }
3180
3181 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3182 assign_parms_unsplit_complex (&all, fnargs);
3183
3184 /* Output all parameter conversion instructions (possibly including calls)
3185 now that all parameters have been copied out of hard registers. */
3186 emit_insn (all.first_conversion_insn);
3187
3188 /* Estimate reload stack alignment from scalar return mode. */
3189 if (SUPPORTS_STACK_ALIGNMENT)
3190 {
3191 if (DECL_RESULT (fndecl))
3192 {
3193 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3194 enum machine_mode mode = TYPE_MODE (type);
3195
3196 if (mode != BLKmode
3197 && mode != VOIDmode
3198 && !AGGREGATE_TYPE_P (type))
3199 {
3200 unsigned int align = GET_MODE_ALIGNMENT (mode);
3201 if (crtl->stack_alignment_estimated < align)
3202 {
3203 gcc_assert (!crtl->stack_realign_processed);
3204 crtl->stack_alignment_estimated = align;
3205 }
3206 }
3207 }
3208 }
3209
3210 /* If we are receiving a struct value address as the first argument, set up
3211 the RTL for the function result. As this might require code to convert
3212 the transmitted address to Pmode, we do this here to ensure that possible
3213 preliminary conversions of the address have been emitted already. */
3214 if (all.function_result_decl)
3215 {
3216 tree result = DECL_RESULT (current_function_decl);
3217 rtx addr = DECL_RTL (all.function_result_decl);
3218 rtx x;
3219
3220 if (DECL_BY_REFERENCE (result))
3221 x = addr;
3222 else
3223 {
3224 addr = convert_memory_address (Pmode, addr);
3225 x = gen_rtx_MEM (DECL_MODE (result), addr);
3226 set_mem_attributes (x, result, 1);
3227 }
3228 SET_DECL_RTL (result, x);
3229 }
3230
3231 /* We have aligned all the args, so add space for the pretend args. */
3232 crtl->args.pretend_args_size = all.pretend_args_size;
3233 all.stack_args_size.constant += all.extra_pretend_bytes;
3234 crtl->args.size = all.stack_args_size.constant;
3235
3236 /* Adjust function incoming argument size for alignment and
3237 minimum length. */
3238
3239 #ifdef REG_PARM_STACK_SPACE
3240 crtl->args.size = MAX (crtl->args.size,
3241 REG_PARM_STACK_SPACE (fndecl));
3242 #endif
3243
3244 crtl->args.size = CEIL_ROUND (crtl->args.size,
3245 PARM_BOUNDARY / BITS_PER_UNIT);
3246
3247 #ifdef ARGS_GROW_DOWNWARD
3248 crtl->args.arg_offset_rtx
3249 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3250 : expand_expr (size_diffop (all.stack_args_size.var,
3251 size_int (-all.stack_args_size.constant)),
3252 NULL_RTX, VOIDmode, 0));
3253 #else
3254 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3255 #endif
3256
3257 /* See how many bytes, if any, of its args a function should try to pop
3258 on return. */
3259
3260 crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3261 crtl->args.size);
3262
3263 /* For stdarg.h function, save info about
3264 regs and stack space used by the named args. */
3265
3266 crtl->args.info = all.args_so_far;
3267
3268 /* Set the rtx used for the function return value. Put this in its
3269 own variable so any optimizers that need this information don't have
3270 to include tree.h. Do this here so it gets done when an inlined
3271 function gets output. */
3272
3273 crtl->return_rtx
3274 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3275 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3276
3277 /* If scalar return value was computed in a pseudo-reg, or was a named
3278 return value that got dumped to the stack, copy that to the hard
3279 return register. */
3280 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3281 {
3282 tree decl_result = DECL_RESULT (fndecl);
3283 rtx decl_rtl = DECL_RTL (decl_result);
3284
3285 if (REG_P (decl_rtl)
3286 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3287 : DECL_REGISTER (decl_result))
3288 {
3289 rtx real_decl_rtl;
3290
3291 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3292 fndecl, true);
3293 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3294 /* The delay slot scheduler assumes that crtl->return_rtx
3295 holds the hard register containing the return value, not a
3296 temporary pseudo. */
3297 crtl->return_rtx = real_decl_rtl;
3298 }
3299 }
3300 }
3301
3302 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3303 For all seen types, gimplify their sizes. */
3304
3305 static tree
3306 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3307 {
3308 tree t = *tp;
3309
3310 *walk_subtrees = 0;
3311 if (TYPE_P (t))
3312 {
3313 if (POINTER_TYPE_P (t))
3314 *walk_subtrees = 1;
3315 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3316 && !TYPE_SIZES_GIMPLIFIED (t))
3317 {
3318 gimplify_type_sizes (t, (gimple_seq *) data);
3319 *walk_subtrees = 1;
3320 }
3321 }
3322
3323 return NULL;
3324 }
3325
3326 /* Gimplify the parameter list for current_function_decl. This involves
3327 evaluating SAVE_EXPRs of variable sized parameters and generating code
3328 to implement callee-copies reference parameters. Returns a sequence of
3329 statements to add to the beginning of the function. */
3330
3331 gimple_seq
3332 gimplify_parameters (void)
3333 {
3334 struct assign_parm_data_all all;
3335 tree fnargs, parm;
3336 gimple_seq stmts = NULL;
3337
3338 assign_parms_initialize_all (&all);
3339 fnargs = assign_parms_augmented_arg_list (&all);
3340
3341 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3342 {
3343 struct assign_parm_data_one data;
3344
3345 /* Extract the type of PARM; adjust it according to ABI. */
3346 assign_parm_find_data_types (&all, parm, &data);
3347
3348 /* Early out for errors and void parameters. */
3349 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3350 continue;
3351
3352 /* Update info on where next arg arrives in registers. */
3353 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3354 data.passed_type, data.named_arg);
3355
3356 /* ??? Once upon a time variable_size stuffed parameter list
3357 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3358 turned out to be less than manageable in the gimple world.
3359 Now we have to hunt them down ourselves. */
3360 walk_tree_without_duplicates (&data.passed_type,
3361 gimplify_parm_type, &stmts);
3362
3363 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3364 {
3365 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3366 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3367 }
3368
3369 if (data.passed_pointer)
3370 {
3371 tree type = TREE_TYPE (data.passed_type);
3372 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3373 type, data.named_arg))
3374 {
3375 tree local, t;
3376
3377 /* For constant-sized objects, this is trivial; for
3378 variable-sized objects, we have to play games. */
3379 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3380 && !(flag_stack_check == GENERIC_STACK_CHECK
3381 && compare_tree_int (DECL_SIZE_UNIT (parm),
3382 STACK_CHECK_MAX_VAR_SIZE) > 0))
3383 {
3384 local = create_tmp_var (type, get_name (parm));
3385 DECL_IGNORED_P (local) = 0;
3386 /* If PARM was addressable, move that flag over
3387 to the local copy, as its address will be taken,
3388 not the PARMs. */
3389 if (TREE_ADDRESSABLE (parm))
3390 {
3391 TREE_ADDRESSABLE (parm) = 0;
3392 TREE_ADDRESSABLE (local) = 1;
3393 }
3394 }
3395 else
3396 {
3397 tree ptr_type, addr;
3398
3399 ptr_type = build_pointer_type (type);
3400 addr = create_tmp_var (ptr_type, get_name (parm));
3401 DECL_IGNORED_P (addr) = 0;
3402 local = build_fold_indirect_ref (addr);
3403
3404 t = built_in_decls[BUILT_IN_ALLOCA];
3405 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3406 t = fold_convert (ptr_type, t);
3407 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3408 gimplify_and_add (t, &stmts);
3409 }
3410
3411 gimplify_assign (local, parm, &stmts);
3412
3413 SET_DECL_VALUE_EXPR (parm, local);
3414 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3415 }
3416 }
3417 }
3418
3419 return stmts;
3420 }
3421
3422 /* Compute the size and offset from the start of the stacked arguments for a
3423 parm passed in mode PASSED_MODE and with type TYPE.
3424
3425 INITIAL_OFFSET_PTR points to the current offset into the stacked
3426 arguments.
3427
3428 The starting offset and size for this parm are returned in
3429 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3430 nonzero, the offset is that of stack slot, which is returned in
3431 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3432 padding required from the initial offset ptr to the stack slot.
3433
3434 IN_REGS is nonzero if the argument will be passed in registers. It will
3435 never be set if REG_PARM_STACK_SPACE is not defined.
3436
3437 FNDECL is the function in which the argument was defined.
3438
3439 There are two types of rounding that are done. The first, controlled by
3440 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3441 list to be aligned to the specific boundary (in bits). This rounding
3442 affects the initial and starting offsets, but not the argument size.
3443
3444 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3445 optionally rounds the size of the parm to PARM_BOUNDARY. The
3446 initial offset is not affected by this rounding, while the size always
3447 is and the starting offset may be. */
3448
3449 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3450 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3451 callers pass in the total size of args so far as
3452 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3453
3454 void
3455 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3456 int partial, tree fndecl ATTRIBUTE_UNUSED,
3457 struct args_size *initial_offset_ptr,
3458 struct locate_and_pad_arg_data *locate)
3459 {
3460 tree sizetree;
3461 enum direction where_pad;
3462 unsigned int boundary;
3463 int reg_parm_stack_space = 0;
3464 int part_size_in_regs;
3465
3466 #ifdef REG_PARM_STACK_SPACE
3467 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3468
3469 /* If we have found a stack parm before we reach the end of the
3470 area reserved for registers, skip that area. */
3471 if (! in_regs)
3472 {
3473 if (reg_parm_stack_space > 0)
3474 {
3475 if (initial_offset_ptr->var)
3476 {
3477 initial_offset_ptr->var
3478 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3479 ssize_int (reg_parm_stack_space));
3480 initial_offset_ptr->constant = 0;
3481 }
3482 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3483 initial_offset_ptr->constant = reg_parm_stack_space;
3484 }
3485 }
3486 #endif /* REG_PARM_STACK_SPACE */
3487
3488 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3489
3490 sizetree
3491 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3492 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3493 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3494 locate->where_pad = where_pad;
3495
3496 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3497 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3498 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3499
3500 locate->boundary = boundary;
3501
3502 if (SUPPORTS_STACK_ALIGNMENT)
3503 {
3504 /* stack_alignment_estimated can't change after stack has been
3505 realigned. */
3506 if (crtl->stack_alignment_estimated < boundary)
3507 {
3508 if (!crtl->stack_realign_processed)
3509 crtl->stack_alignment_estimated = boundary;
3510 else
3511 {
3512 /* If stack is realigned and stack alignment value
3513 hasn't been finalized, it is OK not to increase
3514 stack_alignment_estimated. The bigger alignment
3515 requirement is recorded in stack_alignment_needed
3516 below. */
3517 gcc_assert (!crtl->stack_realign_finalized
3518 && crtl->stack_realign_needed);
3519 }
3520 }
3521 }
3522
3523 /* Remember if the outgoing parameter requires extra alignment on the
3524 calling function side. */
3525 if (crtl->stack_alignment_needed < boundary)
3526 crtl->stack_alignment_needed = boundary;
3527 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
3528 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
3529 if (crtl->preferred_stack_boundary < boundary)
3530 crtl->preferred_stack_boundary = boundary;
3531
3532 #ifdef ARGS_GROW_DOWNWARD
3533 locate->slot_offset.constant = -initial_offset_ptr->constant;
3534 if (initial_offset_ptr->var)
3535 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3536 initial_offset_ptr->var);
3537
3538 {
3539 tree s2 = sizetree;
3540 if (where_pad != none
3541 && (!host_integerp (sizetree, 1)
3542 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3543 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3544 SUB_PARM_SIZE (locate->slot_offset, s2);
3545 }
3546
3547 locate->slot_offset.constant += part_size_in_regs;
3548
3549 if (!in_regs
3550 #ifdef REG_PARM_STACK_SPACE
3551 || REG_PARM_STACK_SPACE (fndecl) > 0
3552 #endif
3553 )
3554 pad_to_arg_alignment (&locate->slot_offset, boundary,
3555 &locate->alignment_pad);
3556
3557 locate->size.constant = (-initial_offset_ptr->constant
3558 - locate->slot_offset.constant);
3559 if (initial_offset_ptr->var)
3560 locate->size.var = size_binop (MINUS_EXPR,
3561 size_binop (MINUS_EXPR,
3562 ssize_int (0),
3563 initial_offset_ptr->var),
3564 locate->slot_offset.var);
3565
3566 /* Pad_below needs the pre-rounded size to know how much to pad
3567 below. */
3568 locate->offset = locate->slot_offset;
3569 if (where_pad == downward)
3570 pad_below (&locate->offset, passed_mode, sizetree);
3571
3572 #else /* !ARGS_GROW_DOWNWARD */
3573 if (!in_regs
3574 #ifdef REG_PARM_STACK_SPACE
3575 || REG_PARM_STACK_SPACE (fndecl) > 0
3576 #endif
3577 )
3578 pad_to_arg_alignment (initial_offset_ptr, boundary,
3579 &locate->alignment_pad);
3580 locate->slot_offset = *initial_offset_ptr;
3581
3582 #ifdef PUSH_ROUNDING
3583 if (passed_mode != BLKmode)
3584 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3585 #endif
3586
3587 /* Pad_below needs the pre-rounded size to know how much to pad below
3588 so this must be done before rounding up. */
3589 locate->offset = locate->slot_offset;
3590 if (where_pad == downward)
3591 pad_below (&locate->offset, passed_mode, sizetree);
3592
3593 if (where_pad != none
3594 && (!host_integerp (sizetree, 1)
3595 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3596 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3597
3598 ADD_PARM_SIZE (locate->size, sizetree);
3599
3600 locate->size.constant -= part_size_in_regs;
3601 #endif /* ARGS_GROW_DOWNWARD */
3602
3603 #ifdef FUNCTION_ARG_OFFSET
3604 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3605 #endif
3606 }
3607
3608 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3609 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3610
3611 static void
3612 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3613 struct args_size *alignment_pad)
3614 {
3615 tree save_var = NULL_TREE;
3616 HOST_WIDE_INT save_constant = 0;
3617 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3618 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3619
3620 #ifdef SPARC_STACK_BOUNDARY_HACK
3621 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3622 the real alignment of %sp. However, when it does this, the
3623 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3624 if (SPARC_STACK_BOUNDARY_HACK)
3625 sp_offset = 0;
3626 #endif
3627
3628 if (boundary > PARM_BOUNDARY)
3629 {
3630 save_var = offset_ptr->var;
3631 save_constant = offset_ptr->constant;
3632 }
3633
3634 alignment_pad->var = NULL_TREE;
3635 alignment_pad->constant = 0;
3636
3637 if (boundary > BITS_PER_UNIT)
3638 {
3639 if (offset_ptr->var)
3640 {
3641 tree sp_offset_tree = ssize_int (sp_offset);
3642 tree offset = size_binop (PLUS_EXPR,
3643 ARGS_SIZE_TREE (*offset_ptr),
3644 sp_offset_tree);
3645 #ifdef ARGS_GROW_DOWNWARD
3646 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3647 #else
3648 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3649 #endif
3650
3651 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3652 /* ARGS_SIZE_TREE includes constant term. */
3653 offset_ptr->constant = 0;
3654 if (boundary > PARM_BOUNDARY)
3655 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3656 save_var);
3657 }
3658 else
3659 {
3660 offset_ptr->constant = -sp_offset +
3661 #ifdef ARGS_GROW_DOWNWARD
3662 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3663 #else
3664 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3665 #endif
3666 if (boundary > PARM_BOUNDARY)
3667 alignment_pad->constant = offset_ptr->constant - save_constant;
3668 }
3669 }
3670 }
3671
3672 static void
3673 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3674 {
3675 if (passed_mode != BLKmode)
3676 {
3677 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3678 offset_ptr->constant
3679 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3680 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3681 - GET_MODE_SIZE (passed_mode));
3682 }
3683 else
3684 {
3685 if (TREE_CODE (sizetree) != INTEGER_CST
3686 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3687 {
3688 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3689 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3690 /* Add it in. */
3691 ADD_PARM_SIZE (*offset_ptr, s2);
3692 SUB_PARM_SIZE (*offset_ptr, sizetree);
3693 }
3694 }
3695 }
3696
3697
3698 /* True if register REGNO was alive at a place where `setjmp' was
3699 called and was set more than once or is an argument. Such regs may
3700 be clobbered by `longjmp'. */
3701
3702 static bool
3703 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3704 {
3705 /* There appear to be cases where some local vars never reach the
3706 backend but have bogus regnos. */
3707 if (regno >= max_reg_num ())
3708 return false;
3709
3710 return ((REG_N_SETS (regno) > 1
3711 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3712 && REGNO_REG_SET_P (setjmp_crosses, regno));
3713 }
3714
3715 /* Walk the tree of blocks describing the binding levels within a
3716 function and warn about variables the might be killed by setjmp or
3717 vfork. This is done after calling flow_analysis before register
3718 allocation since that will clobber the pseudo-regs to hard
3719 regs. */
3720
3721 static void
3722 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3723 {
3724 tree decl, sub;
3725
3726 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3727 {
3728 if (TREE_CODE (decl) == VAR_DECL
3729 && DECL_RTL_SET_P (decl)
3730 && REG_P (DECL_RTL (decl))
3731 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3732 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3733 " %<longjmp%> or %<vfork%>", decl);
3734 }
3735
3736 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3737 setjmp_vars_warning (setjmp_crosses, sub);
3738 }
3739
3740 /* Do the appropriate part of setjmp_vars_warning
3741 but for arguments instead of local variables. */
3742
3743 static void
3744 setjmp_args_warning (bitmap setjmp_crosses)
3745 {
3746 tree decl;
3747 for (decl = DECL_ARGUMENTS (current_function_decl);
3748 decl; decl = TREE_CHAIN (decl))
3749 if (DECL_RTL (decl) != 0
3750 && REG_P (DECL_RTL (decl))
3751 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3752 warning (OPT_Wclobbered,
3753 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3754 decl);
3755 }
3756
3757 /* Generate warning messages for variables live across setjmp. */
3758
3759 void
3760 generate_setjmp_warnings (void)
3761 {
3762 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3763
3764 if (n_basic_blocks == NUM_FIXED_BLOCKS
3765 || bitmap_empty_p (setjmp_crosses))
3766 return;
3767
3768 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3769 setjmp_args_warning (setjmp_crosses);
3770 }
3771
3772
3773 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3774 and create duplicate blocks. */
3775 /* ??? Need an option to either create block fragments or to create
3776 abstract origin duplicates of a source block. It really depends
3777 on what optimization has been performed. */
3778
3779 void
3780 reorder_blocks (void)
3781 {
3782 tree block = DECL_INITIAL (current_function_decl);
3783 VEC(tree,heap) *block_stack;
3784
3785 if (block == NULL_TREE)
3786 return;
3787
3788 block_stack = VEC_alloc (tree, heap, 10);
3789
3790 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3791 clear_block_marks (block);
3792
3793 /* Prune the old trees away, so that they don't get in the way. */
3794 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3795 BLOCK_CHAIN (block) = NULL_TREE;
3796
3797 /* Recreate the block tree from the note nesting. */
3798 reorder_blocks_1 (get_insns (), block, &block_stack);
3799 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3800
3801 VEC_free (tree, heap, block_stack);
3802 }
3803
3804 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3805
3806 void
3807 clear_block_marks (tree block)
3808 {
3809 while (block)
3810 {
3811 TREE_ASM_WRITTEN (block) = 0;
3812 clear_block_marks (BLOCK_SUBBLOCKS (block));
3813 block = BLOCK_CHAIN (block);
3814 }
3815 }
3816
3817 static void
3818 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3819 {
3820 rtx insn;
3821
3822 for (insn = insns; insn; insn = NEXT_INSN (insn))
3823 {
3824 if (NOTE_P (insn))
3825 {
3826 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3827 {
3828 tree block = NOTE_BLOCK (insn);
3829 tree origin;
3830
3831 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3832 ? BLOCK_FRAGMENT_ORIGIN (block)
3833 : block);
3834
3835 /* If we have seen this block before, that means it now
3836 spans multiple address regions. Create a new fragment. */
3837 if (TREE_ASM_WRITTEN (block))
3838 {
3839 tree new_block = copy_node (block);
3840
3841 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3842 BLOCK_FRAGMENT_CHAIN (new_block)
3843 = BLOCK_FRAGMENT_CHAIN (origin);
3844 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3845
3846 NOTE_BLOCK (insn) = new_block;
3847 block = new_block;
3848 }
3849
3850 BLOCK_SUBBLOCKS (block) = 0;
3851 TREE_ASM_WRITTEN (block) = 1;
3852 /* When there's only one block for the entire function,
3853 current_block == block and we mustn't do this, it
3854 will cause infinite recursion. */
3855 if (block != current_block)
3856 {
3857 if (block != origin)
3858 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3859
3860 BLOCK_SUPERCONTEXT (block) = current_block;
3861 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3862 BLOCK_SUBBLOCKS (current_block) = block;
3863 current_block = origin;
3864 }
3865 VEC_safe_push (tree, heap, *p_block_stack, block);
3866 }
3867 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3868 {
3869 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3870 BLOCK_SUBBLOCKS (current_block)
3871 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3872 current_block = BLOCK_SUPERCONTEXT (current_block);
3873 }
3874 }
3875 }
3876 }
3877
3878 /* Reverse the order of elements in the chain T of blocks,
3879 and return the new head of the chain (old last element). */
3880
3881 tree
3882 blocks_nreverse (tree t)
3883 {
3884 tree prev = 0, decl, next;
3885 for (decl = t; decl; decl = next)
3886 {
3887 next = BLOCK_CHAIN (decl);
3888 BLOCK_CHAIN (decl) = prev;
3889 prev = decl;
3890 }
3891 return prev;
3892 }
3893
3894 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3895 non-NULL, list them all into VECTOR, in a depth-first preorder
3896 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3897 blocks. */
3898
3899 static int
3900 all_blocks (tree block, tree *vector)
3901 {
3902 int n_blocks = 0;
3903
3904 while (block)
3905 {
3906 TREE_ASM_WRITTEN (block) = 0;
3907
3908 /* Record this block. */
3909 if (vector)
3910 vector[n_blocks] = block;
3911
3912 ++n_blocks;
3913
3914 /* Record the subblocks, and their subblocks... */
3915 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3916 vector ? vector + n_blocks : 0);
3917 block = BLOCK_CHAIN (block);
3918 }
3919
3920 return n_blocks;
3921 }
3922
3923 /* Return a vector containing all the blocks rooted at BLOCK. The
3924 number of elements in the vector is stored in N_BLOCKS_P. The
3925 vector is dynamically allocated; it is the caller's responsibility
3926 to call `free' on the pointer returned. */
3927
3928 static tree *
3929 get_block_vector (tree block, int *n_blocks_p)
3930 {
3931 tree *block_vector;
3932
3933 *n_blocks_p = all_blocks (block, NULL);
3934 block_vector = XNEWVEC (tree, *n_blocks_p);
3935 all_blocks (block, block_vector);
3936
3937 return block_vector;
3938 }
3939
3940 static GTY(()) int next_block_index = 2;
3941
3942 /* Set BLOCK_NUMBER for all the blocks in FN. */
3943
3944 void
3945 number_blocks (tree fn)
3946 {
3947 int i;
3948 int n_blocks;
3949 tree *block_vector;
3950
3951 /* For SDB and XCOFF debugging output, we start numbering the blocks
3952 from 1 within each function, rather than keeping a running
3953 count. */
3954 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3955 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3956 next_block_index = 1;
3957 #endif
3958
3959 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3960
3961 /* The top-level BLOCK isn't numbered at all. */
3962 for (i = 1; i < n_blocks; ++i)
3963 /* We number the blocks from two. */
3964 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3965
3966 free (block_vector);
3967
3968 return;
3969 }
3970
3971 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3972
3973 tree
3974 debug_find_var_in_block_tree (tree var, tree block)
3975 {
3976 tree t;
3977
3978 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3979 if (t == var)
3980 return block;
3981
3982 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3983 {
3984 tree ret = debug_find_var_in_block_tree (var, t);
3985 if (ret)
3986 return ret;
3987 }
3988
3989 return NULL_TREE;
3990 }
3991
3992 /* Keep track of whether we're in a dummy function context. If we are,
3993 we don't want to invoke the set_current_function hook, because we'll
3994 get into trouble if the hook calls target_reinit () recursively or
3995 when the initial initialization is not yet complete. */
3996
3997 static bool in_dummy_function;
3998
3999 /* Invoke the target hook when setting cfun. Update the optimization options
4000 if the function uses different options than the default. */
4001
4002 static void
4003 invoke_set_current_function_hook (tree fndecl)
4004 {
4005 if (!in_dummy_function)
4006 {
4007 tree opts = ((fndecl)
4008 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4009 : optimization_default_node);
4010
4011 if (!opts)
4012 opts = optimization_default_node;
4013
4014 /* Change optimization options if needed. */
4015 if (optimization_current_node != opts)
4016 {
4017 optimization_current_node = opts;
4018 cl_optimization_restore (TREE_OPTIMIZATION (opts));
4019 }
4020
4021 targetm.set_current_function (fndecl);
4022 }
4023 }
4024
4025 /* cfun should never be set directly; use this function. */
4026
4027 void
4028 set_cfun (struct function *new_cfun)
4029 {
4030 if (cfun != new_cfun)
4031 {
4032 cfun = new_cfun;
4033 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4034 }
4035 }
4036
4037 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4038
4039 static VEC(function_p,heap) *cfun_stack;
4040
4041 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4042
4043 void
4044 push_cfun (struct function *new_cfun)
4045 {
4046 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4047 set_cfun (new_cfun);
4048 }
4049
4050 /* Pop cfun from the stack. */
4051
4052 void
4053 pop_cfun (void)
4054 {
4055 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
4056 set_cfun (new_cfun);
4057 }
4058
4059 /* Return value of funcdef and increase it. */
4060 int
4061 get_next_funcdef_no (void)
4062 {
4063 return funcdef_no++;
4064 }
4065
4066 /* Allocate a function structure for FNDECL and set its contents
4067 to the defaults. Set cfun to the newly-allocated object.
4068 Some of the helper functions invoked during initialization assume
4069 that cfun has already been set. Therefore, assign the new object
4070 directly into cfun and invoke the back end hook explicitly at the
4071 very end, rather than initializing a temporary and calling set_cfun
4072 on it.
4073
4074 ABSTRACT_P is true if this is a function that will never be seen by
4075 the middle-end. Such functions are front-end concepts (like C++
4076 function templates) that do not correspond directly to functions
4077 placed in object files. */
4078
4079 void
4080 allocate_struct_function (tree fndecl, bool abstract_p)
4081 {
4082 tree result;
4083 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4084
4085 cfun = GGC_CNEW (struct function);
4086
4087 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
4088
4089 init_eh_for_function ();
4090
4091 if (init_machine_status)
4092 cfun->machine = (*init_machine_status) ();
4093
4094 #ifdef OVERRIDE_ABI_FORMAT
4095 OVERRIDE_ABI_FORMAT (fndecl);
4096 #endif
4097
4098 invoke_set_current_function_hook (fndecl);
4099
4100 if (fndecl != NULL_TREE)
4101 {
4102 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4103 cfun->decl = fndecl;
4104 current_function_funcdef_no = get_next_funcdef_no ();
4105
4106 result = DECL_RESULT (fndecl);
4107 if (!abstract_p && aggregate_value_p (result, fndecl))
4108 {
4109 #ifdef PCC_STATIC_STRUCT_RETURN
4110 cfun->returns_pcc_struct = 1;
4111 #endif
4112 cfun->returns_struct = 1;
4113 }
4114
4115 cfun->stdarg
4116 = (fntype
4117 && TYPE_ARG_TYPES (fntype) != 0
4118 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4119 != void_type_node));
4120
4121 /* Assume all registers in stdarg functions need to be saved. */
4122 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4123 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4124 }
4125 }
4126
4127 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4128 instead of just setting it. */
4129
4130 void
4131 push_struct_function (tree fndecl)
4132 {
4133 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4134 allocate_struct_function (fndecl, false);
4135 }
4136
4137 /* Reset cfun, and other non-struct-function variables to defaults as
4138 appropriate for emitting rtl at the start of a function. */
4139
4140 static void
4141 prepare_function_start (void)
4142 {
4143 gcc_assert (!crtl->emit.x_last_insn);
4144 init_temp_slots ();
4145 init_emit ();
4146 init_varasm_status ();
4147 init_expr ();
4148 default_rtl_profile ();
4149
4150 cse_not_expected = ! optimize;
4151
4152 /* Caller save not needed yet. */
4153 caller_save_needed = 0;
4154
4155 /* We haven't done register allocation yet. */
4156 reg_renumber = 0;
4157
4158 /* Indicate that we have not instantiated virtual registers yet. */
4159 virtuals_instantiated = 0;
4160
4161 /* Indicate that we want CONCATs now. */
4162 generating_concat_p = 1;
4163
4164 /* Indicate we have no need of a frame pointer yet. */
4165 frame_pointer_needed = 0;
4166 }
4167
4168 /* Initialize the rtl expansion mechanism so that we can do simple things
4169 like generate sequences. This is used to provide a context during global
4170 initialization of some passes. You must call expand_dummy_function_end
4171 to exit this context. */
4172
4173 void
4174 init_dummy_function_start (void)
4175 {
4176 gcc_assert (!in_dummy_function);
4177 in_dummy_function = true;
4178 push_struct_function (NULL_TREE);
4179 prepare_function_start ();
4180 }
4181
4182 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4183 and initialize static variables for generating RTL for the statements
4184 of the function. */
4185
4186 void
4187 init_function_start (tree subr)
4188 {
4189 if (subr && DECL_STRUCT_FUNCTION (subr))
4190 set_cfun (DECL_STRUCT_FUNCTION (subr));
4191 else
4192 allocate_struct_function (subr, false);
4193 prepare_function_start ();
4194
4195 /* Warn if this value is an aggregate type,
4196 regardless of which calling convention we are using for it. */
4197 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4198 warning (OPT_Waggregate_return, "function returns an aggregate");
4199 }
4200
4201 /* Make sure all values used by the optimization passes have sane
4202 defaults. */
4203 unsigned int
4204 init_function_for_compilation (void)
4205 {
4206 reg_renumber = 0;
4207
4208 /* No prologue/epilogue insns yet. Make sure that these vectors are
4209 empty. */
4210 gcc_assert (VEC_length (int, prologue) == 0);
4211 gcc_assert (VEC_length (int, epilogue) == 0);
4212 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
4213 return 0;
4214 }
4215
4216 struct rtl_opt_pass pass_init_function =
4217 {
4218 {
4219 RTL_PASS,
4220 NULL, /* name */
4221 NULL, /* gate */
4222 init_function_for_compilation, /* execute */
4223 NULL, /* sub */
4224 NULL, /* next */
4225 0, /* static_pass_number */
4226 0, /* tv_id */
4227 0, /* properties_required */
4228 0, /* properties_provided */
4229 0, /* properties_destroyed */
4230 0, /* todo_flags_start */
4231 0 /* todo_flags_finish */
4232 }
4233 };
4234
4235
4236 void
4237 expand_main_function (void)
4238 {
4239 #if (defined(INVOKE__main) \
4240 || (!defined(HAS_INIT_SECTION) \
4241 && !defined(INIT_SECTION_ASM_OP) \
4242 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4243 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4244 #endif
4245 }
4246
4247 /* Expand code to initialize the stack_protect_guard. This is invoked at
4248 the beginning of a function to be protected. */
4249
4250 #ifndef HAVE_stack_protect_set
4251 # define HAVE_stack_protect_set 0
4252 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4253 #endif
4254
4255 void
4256 stack_protect_prologue (void)
4257 {
4258 tree guard_decl = targetm.stack_protect_guard ();
4259 rtx x, y;
4260
4261 /* Avoid expand_expr here, because we don't want guard_decl pulled
4262 into registers unless absolutely necessary. And we know that
4263 crtl->stack_protect_guard is a local stack slot, so this skips
4264 all the fluff. */
4265 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4266 y = validize_mem (DECL_RTL (guard_decl));
4267
4268 /* Allow the target to copy from Y to X without leaking Y into a
4269 register. */
4270 if (HAVE_stack_protect_set)
4271 {
4272 rtx insn = gen_stack_protect_set (x, y);
4273 if (insn)
4274 {
4275 emit_insn (insn);
4276 return;
4277 }
4278 }
4279
4280 /* Otherwise do a straight move. */
4281 emit_move_insn (x, y);
4282 }
4283
4284 /* Expand code to verify the stack_protect_guard. This is invoked at
4285 the end of a function to be protected. */
4286
4287 #ifndef HAVE_stack_protect_test
4288 # define HAVE_stack_protect_test 0
4289 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4290 #endif
4291
4292 void
4293 stack_protect_epilogue (void)
4294 {
4295 tree guard_decl = targetm.stack_protect_guard ();
4296 rtx label = gen_label_rtx ();
4297 rtx x, y, tmp;
4298
4299 /* Avoid expand_expr here, because we don't want guard_decl pulled
4300 into registers unless absolutely necessary. And we know that
4301 crtl->stack_protect_guard is a local stack slot, so this skips
4302 all the fluff. */
4303 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4304 y = validize_mem (DECL_RTL (guard_decl));
4305
4306 /* Allow the target to compare Y with X without leaking either into
4307 a register. */
4308 switch (HAVE_stack_protect_test != 0)
4309 {
4310 case 1:
4311 tmp = gen_stack_protect_test (x, y, label);
4312 if (tmp)
4313 {
4314 emit_insn (tmp);
4315 break;
4316 }
4317 /* FALLTHRU */
4318
4319 default:
4320 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4321 break;
4322 }
4323
4324 /* The noreturn predictor has been moved to the tree level. The rtl-level
4325 predictors estimate this branch about 20%, which isn't enough to get
4326 things moved out of line. Since this is the only extant case of adding
4327 a noreturn function at the rtl level, it doesn't seem worth doing ought
4328 except adding the prediction by hand. */
4329 tmp = get_last_insn ();
4330 if (JUMP_P (tmp))
4331 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4332
4333 expand_expr_stmt (targetm.stack_protect_fail ());
4334 emit_label (label);
4335 }
4336
4337 /* Start the RTL for a new function, and set variables used for
4338 emitting RTL.
4339 SUBR is the FUNCTION_DECL node.
4340 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4341 the function's parameters, which must be run at any return statement. */
4342
4343 void
4344 expand_function_start (tree subr)
4345 {
4346 /* Make sure volatile mem refs aren't considered
4347 valid operands of arithmetic insns. */
4348 init_recog_no_volatile ();
4349
4350 crtl->profile
4351 = (profile_flag
4352 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4353
4354 crtl->limit_stack
4355 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4356
4357 /* Make the label for return statements to jump to. Do not special
4358 case machines with special return instructions -- they will be
4359 handled later during jump, ifcvt, or epilogue creation. */
4360 return_label = gen_label_rtx ();
4361
4362 /* Initialize rtx used to return the value. */
4363 /* Do this before assign_parms so that we copy the struct value address
4364 before any library calls that assign parms might generate. */
4365
4366 /* Decide whether to return the value in memory or in a register. */
4367 if (aggregate_value_p (DECL_RESULT (subr), subr))
4368 {
4369 /* Returning something that won't go in a register. */
4370 rtx value_address = 0;
4371
4372 #ifdef PCC_STATIC_STRUCT_RETURN
4373 if (cfun->returns_pcc_struct)
4374 {
4375 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4376 value_address = assemble_static_space (size);
4377 }
4378 else
4379 #endif
4380 {
4381 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4382 /* Expect to be passed the address of a place to store the value.
4383 If it is passed as an argument, assign_parms will take care of
4384 it. */
4385 if (sv)
4386 {
4387 value_address = gen_reg_rtx (Pmode);
4388 emit_move_insn (value_address, sv);
4389 }
4390 }
4391 if (value_address)
4392 {
4393 rtx x = value_address;
4394 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4395 {
4396 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4397 set_mem_attributes (x, DECL_RESULT (subr), 1);
4398 }
4399 SET_DECL_RTL (DECL_RESULT (subr), x);
4400 }
4401 }
4402 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4403 /* If return mode is void, this decl rtl should not be used. */
4404 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4405 else
4406 {
4407 /* Compute the return values into a pseudo reg, which we will copy
4408 into the true return register after the cleanups are done. */
4409 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4410 if (TYPE_MODE (return_type) != BLKmode
4411 && targetm.calls.return_in_msb (return_type))
4412 /* expand_function_end will insert the appropriate padding in
4413 this case. Use the return value's natural (unpadded) mode
4414 within the function proper. */
4415 SET_DECL_RTL (DECL_RESULT (subr),
4416 gen_reg_rtx (TYPE_MODE (return_type)));
4417 else
4418 {
4419 /* In order to figure out what mode to use for the pseudo, we
4420 figure out what the mode of the eventual return register will
4421 actually be, and use that. */
4422 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4423
4424 /* Structures that are returned in registers are not
4425 aggregate_value_p, so we may see a PARALLEL or a REG. */
4426 if (REG_P (hard_reg))
4427 SET_DECL_RTL (DECL_RESULT (subr),
4428 gen_reg_rtx (GET_MODE (hard_reg)));
4429 else
4430 {
4431 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4432 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4433 }
4434 }
4435
4436 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4437 result to the real return register(s). */
4438 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4439 }
4440
4441 /* Initialize rtx for parameters and local variables.
4442 In some cases this requires emitting insns. */
4443 assign_parms (subr);
4444
4445 /* If function gets a static chain arg, store it. */
4446 if (cfun->static_chain_decl)
4447 {
4448 tree parm = cfun->static_chain_decl;
4449 rtx local = gen_reg_rtx (Pmode);
4450
4451 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4452 SET_DECL_RTL (parm, local);
4453 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4454
4455 emit_move_insn (local, static_chain_incoming_rtx);
4456 }
4457
4458 /* If the function receives a non-local goto, then store the
4459 bits we need to restore the frame pointer. */
4460 if (cfun->nonlocal_goto_save_area)
4461 {
4462 tree t_save;
4463 rtx r_save;
4464
4465 /* ??? We need to do this save early. Unfortunately here is
4466 before the frame variable gets declared. Help out... */
4467 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4468 if (!DECL_RTL_SET_P (var))
4469 expand_decl (var);
4470
4471 t_save = build4 (ARRAY_REF, ptr_type_node,
4472 cfun->nonlocal_goto_save_area,
4473 integer_zero_node, NULL_TREE, NULL_TREE);
4474 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4475 r_save = convert_memory_address (Pmode, r_save);
4476
4477 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4478 update_nonlocal_goto_save_area ();
4479 }
4480
4481 /* The following was moved from init_function_start.
4482 The move is supposed to make sdb output more accurate. */
4483 /* Indicate the beginning of the function body,
4484 as opposed to parm setup. */
4485 emit_note (NOTE_INSN_FUNCTION_BEG);
4486
4487 gcc_assert (NOTE_P (get_last_insn ()));
4488
4489 parm_birth_insn = get_last_insn ();
4490
4491 if (crtl->profile)
4492 {
4493 #ifdef PROFILE_HOOK
4494 PROFILE_HOOK (current_function_funcdef_no);
4495 #endif
4496 }
4497
4498 /* After the display initializations is where the stack checking
4499 probe should go. */
4500 if(flag_stack_check)
4501 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4502
4503 /* Make sure there is a line number after the function entry setup code. */
4504 force_next_line_note ();
4505 }
4506
4507 /* Undo the effects of init_dummy_function_start. */
4508 void
4509 expand_dummy_function_end (void)
4510 {
4511 gcc_assert (in_dummy_function);
4512
4513 /* End any sequences that failed to be closed due to syntax errors. */
4514 while (in_sequence_p ())
4515 end_sequence ();
4516
4517 /* Outside function body, can't compute type's actual size
4518 until next function's body starts. */
4519
4520 free_after_parsing (cfun);
4521 free_after_compilation (cfun);
4522 pop_cfun ();
4523 in_dummy_function = false;
4524 }
4525
4526 /* Call DOIT for each hard register used as a return value from
4527 the current function. */
4528
4529 void
4530 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4531 {
4532 rtx outgoing = crtl->return_rtx;
4533
4534 if (! outgoing)
4535 return;
4536
4537 if (REG_P (outgoing))
4538 (*doit) (outgoing, arg);
4539 else if (GET_CODE (outgoing) == PARALLEL)
4540 {
4541 int i;
4542
4543 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4544 {
4545 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4546
4547 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4548 (*doit) (x, arg);
4549 }
4550 }
4551 }
4552
4553 static void
4554 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4555 {
4556 emit_clobber (reg);
4557 }
4558
4559 void
4560 clobber_return_register (void)
4561 {
4562 diddle_return_value (do_clobber_return_reg, NULL);
4563
4564 /* In case we do use pseudo to return value, clobber it too. */
4565 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4566 {
4567 tree decl_result = DECL_RESULT (current_function_decl);
4568 rtx decl_rtl = DECL_RTL (decl_result);
4569 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4570 {
4571 do_clobber_return_reg (decl_rtl, NULL);
4572 }
4573 }
4574 }
4575
4576 static void
4577 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4578 {
4579 emit_use (reg);
4580 }
4581
4582 static void
4583 use_return_register (void)
4584 {
4585 diddle_return_value (do_use_return_reg, NULL);
4586 }
4587
4588 /* Possibly warn about unused parameters. */
4589 void
4590 do_warn_unused_parameter (tree fn)
4591 {
4592 tree decl;
4593
4594 for (decl = DECL_ARGUMENTS (fn);
4595 decl; decl = TREE_CHAIN (decl))
4596 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4597 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4598 && !TREE_NO_WARNING (decl))
4599 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4600 }
4601
4602 static GTY(()) rtx initial_trampoline;
4603
4604 /* Generate RTL for the end of the current function. */
4605
4606 void
4607 expand_function_end (void)
4608 {
4609 rtx clobber_after;
4610
4611 /* If arg_pointer_save_area was referenced only from a nested
4612 function, we will not have initialized it yet. Do that now. */
4613 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4614 get_arg_pointer_save_area ();
4615
4616 /* If we are doing generic stack checking and this function makes calls,
4617 do a stack probe at the start of the function to ensure we have enough
4618 space for another stack frame. */
4619 if (flag_stack_check == GENERIC_STACK_CHECK)
4620 {
4621 rtx insn, seq;
4622
4623 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4624 if (CALL_P (insn))
4625 {
4626 start_sequence ();
4627 probe_stack_range (STACK_OLD_CHECK_PROTECT,
4628 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4629 seq = get_insns ();
4630 end_sequence ();
4631 emit_insn_before (seq, stack_check_probe_note);
4632 break;
4633 }
4634 }
4635
4636 /* End any sequences that failed to be closed due to syntax errors. */
4637 while (in_sequence_p ())
4638 end_sequence ();
4639
4640 clear_pending_stack_adjust ();
4641 do_pending_stack_adjust ();
4642
4643 /* Output a linenumber for the end of the function.
4644 SDB depends on this. */
4645 force_next_line_note ();
4646 set_curr_insn_source_location (input_location);
4647
4648 /* Before the return label (if any), clobber the return
4649 registers so that they are not propagated live to the rest of
4650 the function. This can only happen with functions that drop
4651 through; if there had been a return statement, there would
4652 have either been a return rtx, or a jump to the return label.
4653
4654 We delay actual code generation after the current_function_value_rtx
4655 is computed. */
4656 clobber_after = get_last_insn ();
4657
4658 /* Output the label for the actual return from the function. */
4659 emit_label (return_label);
4660
4661 if (USING_SJLJ_EXCEPTIONS)
4662 {
4663 /* Let except.c know where it should emit the call to unregister
4664 the function context for sjlj exceptions. */
4665 if (flag_exceptions)
4666 sjlj_emit_function_exit_after (get_last_insn ());
4667 }
4668 else
4669 {
4670 /* We want to ensure that instructions that may trap are not
4671 moved into the epilogue by scheduling, because we don't
4672 always emit unwind information for the epilogue. */
4673 if (flag_non_call_exceptions)
4674 emit_insn (gen_blockage ());
4675 }
4676
4677 /* If this is an implementation of throw, do what's necessary to
4678 communicate between __builtin_eh_return and the epilogue. */
4679 expand_eh_return ();
4680
4681 /* If scalar return value was computed in a pseudo-reg, or was a named
4682 return value that got dumped to the stack, copy that to the hard
4683 return register. */
4684 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4685 {
4686 tree decl_result = DECL_RESULT (current_function_decl);
4687 rtx decl_rtl = DECL_RTL (decl_result);
4688
4689 if (REG_P (decl_rtl)
4690 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4691 : DECL_REGISTER (decl_result))
4692 {
4693 rtx real_decl_rtl = crtl->return_rtx;
4694
4695 /* This should be set in assign_parms. */
4696 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4697
4698 /* If this is a BLKmode structure being returned in registers,
4699 then use the mode computed in expand_return. Note that if
4700 decl_rtl is memory, then its mode may have been changed,
4701 but that crtl->return_rtx has not. */
4702 if (GET_MODE (real_decl_rtl) == BLKmode)
4703 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4704
4705 /* If a non-BLKmode return value should be padded at the least
4706 significant end of the register, shift it left by the appropriate
4707 amount. BLKmode results are handled using the group load/store
4708 machinery. */
4709 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4710 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4711 {
4712 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4713 REGNO (real_decl_rtl)),
4714 decl_rtl);
4715 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4716 }
4717 /* If a named return value dumped decl_return to memory, then
4718 we may need to re-do the PROMOTE_MODE signed/unsigned
4719 extension. */
4720 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4721 {
4722 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4723
4724 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4725 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4726 &unsignedp, 1);
4727
4728 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4729 }
4730 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4731 {
4732 /* If expand_function_start has created a PARALLEL for decl_rtl,
4733 move the result to the real return registers. Otherwise, do
4734 a group load from decl_rtl for a named return. */
4735 if (GET_CODE (decl_rtl) == PARALLEL)
4736 emit_group_move (real_decl_rtl, decl_rtl);
4737 else
4738 emit_group_load (real_decl_rtl, decl_rtl,
4739 TREE_TYPE (decl_result),
4740 int_size_in_bytes (TREE_TYPE (decl_result)));
4741 }
4742 /* In the case of complex integer modes smaller than a word, we'll
4743 need to generate some non-trivial bitfield insertions. Do that
4744 on a pseudo and not the hard register. */
4745 else if (GET_CODE (decl_rtl) == CONCAT
4746 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4747 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4748 {
4749 int old_generating_concat_p;
4750 rtx tmp;
4751
4752 old_generating_concat_p = generating_concat_p;
4753 generating_concat_p = 0;
4754 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4755 generating_concat_p = old_generating_concat_p;
4756
4757 emit_move_insn (tmp, decl_rtl);
4758 emit_move_insn (real_decl_rtl, tmp);
4759 }
4760 else
4761 emit_move_insn (real_decl_rtl, decl_rtl);
4762 }
4763 }
4764
4765 /* If returning a structure, arrange to return the address of the value
4766 in a place where debuggers expect to find it.
4767
4768 If returning a structure PCC style,
4769 the caller also depends on this value.
4770 And cfun->returns_pcc_struct is not necessarily set. */
4771 if (cfun->returns_struct
4772 || cfun->returns_pcc_struct)
4773 {
4774 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4775 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4776 rtx outgoing;
4777
4778 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4779 type = TREE_TYPE (type);
4780 else
4781 value_address = XEXP (value_address, 0);
4782
4783 outgoing = targetm.calls.function_value (build_pointer_type (type),
4784 current_function_decl, true);
4785
4786 /* Mark this as a function return value so integrate will delete the
4787 assignment and USE below when inlining this function. */
4788 REG_FUNCTION_VALUE_P (outgoing) = 1;
4789
4790 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4791 value_address = convert_memory_address (GET_MODE (outgoing),
4792 value_address);
4793
4794 emit_move_insn (outgoing, value_address);
4795
4796 /* Show return register used to hold result (in this case the address
4797 of the result. */
4798 crtl->return_rtx = outgoing;
4799 }
4800
4801 /* Emit the actual code to clobber return register. */
4802 {
4803 rtx seq;
4804
4805 start_sequence ();
4806 clobber_return_register ();
4807 expand_naked_return ();
4808 seq = get_insns ();
4809 end_sequence ();
4810
4811 emit_insn_after (seq, clobber_after);
4812 }
4813
4814 /* Output the label for the naked return from the function. */
4815 emit_label (naked_return_label);
4816
4817 /* @@@ This is a kludge. We want to ensure that instructions that
4818 may trap are not moved into the epilogue by scheduling, because
4819 we don't always emit unwind information for the epilogue. */
4820 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4821 emit_insn (gen_blockage ());
4822
4823 /* If stack protection is enabled for this function, check the guard. */
4824 if (crtl->stack_protect_guard)
4825 stack_protect_epilogue ();
4826
4827 /* If we had calls to alloca, and this machine needs
4828 an accurate stack pointer to exit the function,
4829 insert some code to save and restore the stack pointer. */
4830 if (! EXIT_IGNORE_STACK
4831 && cfun->calls_alloca)
4832 {
4833 rtx tem = 0;
4834
4835 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4836 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4837 }
4838
4839 /* ??? This should no longer be necessary since stupid is no longer with
4840 us, but there are some parts of the compiler (eg reload_combine, and
4841 sh mach_dep_reorg) that still try and compute their own lifetime info
4842 instead of using the general framework. */
4843 use_return_register ();
4844 }
4845
4846 rtx
4847 get_arg_pointer_save_area (void)
4848 {
4849 rtx ret = arg_pointer_save_area;
4850
4851 if (! ret)
4852 {
4853 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4854 arg_pointer_save_area = ret;
4855 }
4856
4857 if (! crtl->arg_pointer_save_area_init)
4858 {
4859 rtx seq;
4860
4861 /* Save the arg pointer at the beginning of the function. The
4862 generated stack slot may not be a valid memory address, so we
4863 have to check it and fix it if necessary. */
4864 start_sequence ();
4865 emit_move_insn (validize_mem (ret),
4866 crtl->args.internal_arg_pointer);
4867 seq = get_insns ();
4868 end_sequence ();
4869
4870 push_topmost_sequence ();
4871 emit_insn_after (seq, entry_of_function ());
4872 pop_topmost_sequence ();
4873 }
4874
4875 return ret;
4876 }
4877
4878 /* Extend a vector that records the INSN_UIDs of INSNS
4879 (a list of one or more insns). */
4880
4881 static void
4882 record_insns (rtx insns, VEC(int,heap) **vecp)
4883 {
4884 rtx tmp;
4885
4886 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4887 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4888 }
4889
4890 /* Set the locator of the insn chain starting at INSN to LOC. */
4891 static void
4892 set_insn_locators (rtx insn, int loc)
4893 {
4894 while (insn != NULL_RTX)
4895 {
4896 if (INSN_P (insn))
4897 INSN_LOCATOR (insn) = loc;
4898 insn = NEXT_INSN (insn);
4899 }
4900 }
4901
4902 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4903 be running after reorg, SEQUENCE rtl is possible. */
4904
4905 static int
4906 contains (const_rtx insn, VEC(int,heap) **vec)
4907 {
4908 int i, j;
4909
4910 if (NONJUMP_INSN_P (insn)
4911 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4912 {
4913 int count = 0;
4914 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4915 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4916 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4917 == VEC_index (int, *vec, j))
4918 count++;
4919 return count;
4920 }
4921 else
4922 {
4923 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4924 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4925 return 1;
4926 }
4927 return 0;
4928 }
4929
4930 int
4931 prologue_epilogue_contains (const_rtx insn)
4932 {
4933 if (contains (insn, &prologue))
4934 return 1;
4935 if (contains (insn, &epilogue))
4936 return 1;
4937 return 0;
4938 }
4939
4940 int
4941 sibcall_epilogue_contains (const_rtx insn)
4942 {
4943 if (sibcall_epilogue)
4944 return contains (insn, &sibcall_epilogue);
4945 return 0;
4946 }
4947
4948 #ifdef HAVE_return
4949 /* Insert gen_return at the end of block BB. This also means updating
4950 block_for_insn appropriately. */
4951
4952 static void
4953 emit_return_into_block (basic_block bb)
4954 {
4955 emit_jump_insn_after (gen_return (), BB_END (bb));
4956 }
4957 #endif /* HAVE_return */
4958
4959 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4960 this into place with notes indicating where the prologue ends and where
4961 the epilogue begins. Update the basic block information when possible. */
4962
4963 static void
4964 thread_prologue_and_epilogue_insns (void)
4965 {
4966 int inserted = 0;
4967 edge e;
4968 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4969 rtx seq;
4970 #endif
4971 #if defined (HAVE_epilogue) || defined(HAVE_return)
4972 rtx epilogue_end = NULL_RTX;
4973 #endif
4974 edge_iterator ei;
4975
4976 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4977 #ifdef HAVE_prologue
4978 if (HAVE_prologue)
4979 {
4980 start_sequence ();
4981 seq = gen_prologue ();
4982 emit_insn (seq);
4983
4984 /* Insert an explicit USE for the frame pointer
4985 if the profiling is on and the frame pointer is required. */
4986 if (crtl->profile && frame_pointer_needed)
4987 emit_use (hard_frame_pointer_rtx);
4988
4989 /* Retain a map of the prologue insns. */
4990 record_insns (seq, &prologue);
4991 emit_note (NOTE_INSN_PROLOGUE_END);
4992
4993 #ifndef PROFILE_BEFORE_PROLOGUE
4994 /* Ensure that instructions are not moved into the prologue when
4995 profiling is on. The call to the profiling routine can be
4996 emitted within the live range of a call-clobbered register. */
4997 if (crtl->profile)
4998 emit_insn (gen_blockage ());
4999 #endif
5000
5001 seq = get_insns ();
5002 end_sequence ();
5003 set_insn_locators (seq, prologue_locator);
5004
5005 /* Can't deal with multiple successors of the entry block
5006 at the moment. Function should always have at least one
5007 entry point. */
5008 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5009
5010 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5011 inserted = 1;
5012 }
5013 #endif
5014
5015 /* If the exit block has no non-fake predecessors, we don't need
5016 an epilogue. */
5017 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5018 if ((e->flags & EDGE_FAKE) == 0)
5019 break;
5020 if (e == NULL)
5021 goto epilogue_done;
5022
5023 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5024 #ifdef HAVE_return
5025 if (optimize && HAVE_return)
5026 {
5027 /* If we're allowed to generate a simple return instruction,
5028 then by definition we don't need a full epilogue. Examine
5029 the block that falls through to EXIT. If it does not
5030 contain any code, examine its predecessors and try to
5031 emit (conditional) return instructions. */
5032
5033 basic_block last;
5034 rtx label;
5035
5036 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5037 if (e->flags & EDGE_FALLTHRU)
5038 break;
5039 if (e == NULL)
5040 goto epilogue_done;
5041 last = e->src;
5042
5043 /* Verify that there are no active instructions in the last block. */
5044 label = BB_END (last);
5045 while (label && !LABEL_P (label))
5046 {
5047 if (active_insn_p (label))
5048 break;
5049 label = PREV_INSN (label);
5050 }
5051
5052 if (BB_HEAD (last) == label && LABEL_P (label))
5053 {
5054 edge_iterator ei2;
5055
5056 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5057 {
5058 basic_block bb = e->src;
5059 rtx jump;
5060
5061 if (bb == ENTRY_BLOCK_PTR)
5062 {
5063 ei_next (&ei2);
5064 continue;
5065 }
5066
5067 jump = BB_END (bb);
5068 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5069 {
5070 ei_next (&ei2);
5071 continue;
5072 }
5073
5074 /* If we have an unconditional jump, we can replace that
5075 with a simple return instruction. */
5076 if (simplejump_p (jump))
5077 {
5078 emit_return_into_block (bb);
5079 delete_insn (jump);
5080 }
5081
5082 /* If we have a conditional jump, we can try to replace
5083 that with a conditional return instruction. */
5084 else if (condjump_p (jump))
5085 {
5086 if (! redirect_jump (jump, 0, 0))
5087 {
5088 ei_next (&ei2);
5089 continue;
5090 }
5091
5092 /* If this block has only one successor, it both jumps
5093 and falls through to the fallthru block, so we can't
5094 delete the edge. */
5095 if (single_succ_p (bb))
5096 {
5097 ei_next (&ei2);
5098 continue;
5099 }
5100 }
5101 else
5102 {
5103 ei_next (&ei2);
5104 continue;
5105 }
5106
5107 /* Fix up the CFG for the successful change we just made. */
5108 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5109 }
5110
5111 /* Emit a return insn for the exit fallthru block. Whether
5112 this is still reachable will be determined later. */
5113
5114 emit_barrier_after (BB_END (last));
5115 emit_return_into_block (last);
5116 epilogue_end = BB_END (last);
5117 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5118 goto epilogue_done;
5119 }
5120 }
5121 #endif
5122 /* Find the edge that falls through to EXIT. Other edges may exist
5123 due to RETURN instructions, but those don't need epilogues.
5124 There really shouldn't be a mixture -- either all should have
5125 been converted or none, however... */
5126
5127 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5128 if (e->flags & EDGE_FALLTHRU)
5129 break;
5130 if (e == NULL)
5131 goto epilogue_done;
5132
5133 #ifdef HAVE_epilogue
5134 if (HAVE_epilogue)
5135 {
5136 start_sequence ();
5137 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5138 seq = gen_epilogue ();
5139 emit_jump_insn (seq);
5140
5141 /* Retain a map of the epilogue insns. */
5142 record_insns (seq, &epilogue);
5143 set_insn_locators (seq, epilogue_locator);
5144
5145 seq = get_insns ();
5146 end_sequence ();
5147
5148 insert_insn_on_edge (seq, e);
5149 inserted = 1;
5150 }
5151 else
5152 #endif
5153 {
5154 basic_block cur_bb;
5155
5156 if (! next_active_insn (BB_END (e->src)))
5157 goto epilogue_done;
5158 /* We have a fall-through edge to the exit block, the source is not
5159 at the end of the function, and there will be an assembler epilogue
5160 at the end of the function.
5161 We can't use force_nonfallthru here, because that would try to
5162 use return. Inserting a jump 'by hand' is extremely messy, so
5163 we take advantage of cfg_layout_finalize using
5164 fixup_fallthru_exit_predecessor. */
5165 cfg_layout_initialize (0);
5166 FOR_EACH_BB (cur_bb)
5167 if (cur_bb->index >= NUM_FIXED_BLOCKS
5168 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5169 cur_bb->aux = cur_bb->next_bb;
5170 cfg_layout_finalize ();
5171 }
5172 epilogue_done:
5173 default_rtl_profile ();
5174
5175 if (inserted)
5176 {
5177 commit_edge_insertions ();
5178
5179 /* The epilogue insns we inserted may cause the exit edge to no longer
5180 be fallthru. */
5181 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5182 {
5183 if (((e->flags & EDGE_FALLTHRU) != 0)
5184 && returnjump_p (BB_END (e->src)))
5185 e->flags &= ~EDGE_FALLTHRU;
5186 }
5187 }
5188
5189 #ifdef HAVE_sibcall_epilogue
5190 /* Emit sibling epilogues before any sibling call sites. */
5191 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5192 {
5193 basic_block bb = e->src;
5194 rtx insn = BB_END (bb);
5195
5196 if (!CALL_P (insn)
5197 || ! SIBLING_CALL_P (insn))
5198 {
5199 ei_next (&ei);
5200 continue;
5201 }
5202
5203 start_sequence ();
5204 emit_insn (gen_sibcall_epilogue ());
5205 seq = get_insns ();
5206 end_sequence ();
5207
5208 /* Retain a map of the epilogue insns. Used in life analysis to
5209 avoid getting rid of sibcall epilogue insns. Do this before we
5210 actually emit the sequence. */
5211 record_insns (seq, &sibcall_epilogue);
5212 set_insn_locators (seq, epilogue_locator);
5213
5214 emit_insn_before (seq, insn);
5215 ei_next (&ei);
5216 }
5217 #endif
5218
5219 #ifdef HAVE_epilogue
5220 if (epilogue_end)
5221 {
5222 rtx insn, next;
5223
5224 /* Similarly, move any line notes that appear after the epilogue.
5225 There is no need, however, to be quite so anal about the existence
5226 of such a note. Also possibly move
5227 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5228 info generation. */
5229 for (insn = epilogue_end; insn; insn = next)
5230 {
5231 next = NEXT_INSN (insn);
5232 if (NOTE_P (insn)
5233 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5234 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5235 }
5236 }
5237 #endif
5238
5239 /* Threading the prologue and epilogue changes the artificial refs
5240 in the entry and exit blocks. */
5241 epilogue_completed = 1;
5242 df_update_entry_exit_and_calls ();
5243 }
5244
5245 /* Reposition the prologue-end and epilogue-begin notes after instruction
5246 scheduling and delayed branch scheduling. */
5247
5248 void
5249 reposition_prologue_and_epilogue_notes (void)
5250 {
5251 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5252 rtx insn, last, note;
5253 int len;
5254
5255 if ((len = VEC_length (int, prologue)) > 0)
5256 {
5257 last = 0, note = 0;
5258
5259 /* Scan from the beginning until we reach the last prologue insn.
5260 We apparently can't depend on basic_block_{head,end} after
5261 reorg has run. */
5262 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5263 {
5264 if (NOTE_P (insn))
5265 {
5266 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5267 note = insn;
5268 }
5269 else if (contains (insn, &prologue))
5270 {
5271 last = insn;
5272 if (--len == 0)
5273 break;
5274 }
5275 }
5276
5277 if (last)
5278 {
5279 /* Find the prologue-end note if we haven't already, and
5280 move it to just after the last prologue insn. */
5281 if (note == 0)
5282 {
5283 for (note = last; (note = NEXT_INSN (note));)
5284 if (NOTE_P (note)
5285 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5286 break;
5287 }
5288
5289 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5290 if (LABEL_P (last))
5291 last = NEXT_INSN (last);
5292 reorder_insns (note, note, last);
5293 }
5294 }
5295
5296 if ((len = VEC_length (int, epilogue)) > 0)
5297 {
5298 last = 0, note = 0;
5299
5300 /* Scan from the end until we reach the first epilogue insn.
5301 We apparently can't depend on basic_block_{head,end} after
5302 reorg has run. */
5303 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5304 {
5305 if (NOTE_P (insn))
5306 {
5307 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5308 note = insn;
5309 }
5310 else if (contains (insn, &epilogue))
5311 {
5312 last = insn;
5313 if (--len == 0)
5314 break;
5315 }
5316 }
5317
5318 if (last)
5319 {
5320 /* Find the epilogue-begin note if we haven't already, and
5321 move it to just before the first epilogue insn. */
5322 if (note == 0)
5323 {
5324 for (note = insn; (note = PREV_INSN (note));)
5325 if (NOTE_P (note)
5326 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5327 break;
5328 }
5329
5330 if (PREV_INSN (last) != note)
5331 reorder_insns (note, note, PREV_INSN (last));
5332 }
5333 }
5334 #endif /* HAVE_prologue or HAVE_epilogue */
5335 }
5336
5337 /* Returns the name of the current function. */
5338 const char *
5339 current_function_name (void)
5340 {
5341 return lang_hooks.decl_printable_name (cfun->decl, 2);
5342 }
5343
5344 /* Returns the raw (mangled) name of the current function. */
5345 const char *
5346 current_function_assembler_name (void)
5347 {
5348 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5349 }
5350
5351
5352 static unsigned int
5353 rest_of_handle_check_leaf_regs (void)
5354 {
5355 #ifdef LEAF_REGISTERS
5356 current_function_uses_only_leaf_regs
5357 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5358 #endif
5359 return 0;
5360 }
5361
5362 /* Insert a TYPE into the used types hash table of CFUN. */
5363 static void
5364 used_types_insert_helper (tree type, struct function *func)
5365 {
5366 if (type != NULL && func != NULL)
5367 {
5368 void **slot;
5369
5370 if (func->used_types_hash == NULL)
5371 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5372 htab_eq_pointer, NULL);
5373 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5374 if (*slot == NULL)
5375 *slot = type;
5376 }
5377 }
5378
5379 /* Given a type, insert it into the used hash table in cfun. */
5380 void
5381 used_types_insert (tree t)
5382 {
5383 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5384 t = TREE_TYPE (t);
5385 t = TYPE_MAIN_VARIANT (t);
5386 if (debug_info_level > DINFO_LEVEL_NONE)
5387 used_types_insert_helper (t, cfun);
5388 }
5389
5390 struct rtl_opt_pass pass_leaf_regs =
5391 {
5392 {
5393 RTL_PASS,
5394 NULL, /* name */
5395 NULL, /* gate */
5396 rest_of_handle_check_leaf_regs, /* execute */
5397 NULL, /* sub */
5398 NULL, /* next */
5399 0, /* static_pass_number */
5400 0, /* tv_id */
5401 0, /* properties_required */
5402 0, /* properties_provided */
5403 0, /* properties_destroyed */
5404 0, /* todo_flags_start */
5405 0 /* todo_flags_finish */
5406 }
5407 };
5408
5409 static unsigned int
5410 rest_of_handle_thread_prologue_and_epilogue (void)
5411 {
5412 if (optimize)
5413 cleanup_cfg (CLEANUP_EXPENSIVE);
5414 /* On some machines, the prologue and epilogue code, or parts thereof,
5415 can be represented as RTL. Doing so lets us schedule insns between
5416 it and the rest of the code and also allows delayed branch
5417 scheduling to operate in the epilogue. */
5418
5419 thread_prologue_and_epilogue_insns ();
5420 return 0;
5421 }
5422
5423 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5424 {
5425 {
5426 RTL_PASS,
5427 "pro_and_epilogue", /* name */
5428 NULL, /* gate */
5429 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5430 NULL, /* sub */
5431 NULL, /* next */
5432 0, /* static_pass_number */
5433 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5434 0, /* properties_required */
5435 0, /* properties_provided */
5436 0, /* properties_destroyed */
5437 TODO_verify_flow, /* todo_flags_start */
5438 TODO_dump_func |
5439 TODO_df_verify |
5440 TODO_df_finish | TODO_verify_rtl_sharing |
5441 TODO_ggc_collect /* todo_flags_finish */
5442 }
5443 };
5444
5445
5446 /* This mini-pass fixes fall-out from SSA in asm statements that have
5447 in-out constraints. Say you start with
5448
5449 orig = inout;
5450 asm ("": "+mr" (inout));
5451 use (orig);
5452
5453 which is transformed very early to use explicit output and match operands:
5454
5455 orig = inout;
5456 asm ("": "=mr" (inout) : "0" (inout));
5457 use (orig);
5458
5459 Or, after SSA and copyprop,
5460
5461 asm ("": "=mr" (inout_2) : "0" (inout_1));
5462 use (inout_1);
5463
5464 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5465 they represent two separate values, so they will get different pseudo
5466 registers during expansion. Then, since the two operands need to match
5467 per the constraints, but use different pseudo registers, reload can
5468 only register a reload for these operands. But reloads can only be
5469 satisfied by hardregs, not by memory, so we need a register for this
5470 reload, just because we are presented with non-matching operands.
5471 So, even though we allow memory for this operand, no memory can be
5472 used for it, just because the two operands don't match. This can
5473 cause reload failures on register-starved targets.
5474
5475 So it's a symptom of reload not being able to use memory for reloads
5476 or, alternatively it's also a symptom of both operands not coming into
5477 reload as matching (in which case the pseudo could go to memory just
5478 fine, as the alternative allows it, and no reload would be necessary).
5479 We fix the latter problem here, by transforming
5480
5481 asm ("": "=mr" (inout_2) : "0" (inout_1));
5482
5483 back to
5484
5485 inout_2 = inout_1;
5486 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5487
5488 static void
5489 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5490 {
5491 int i;
5492 bool changed = false;
5493 rtx op = SET_SRC (p_sets[0]);
5494 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5495 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5496 bool *output_matched = XALLOCAVEC (bool, noutputs);
5497
5498 memset (output_matched, 0, noutputs * sizeof (bool));
5499 for (i = 0; i < ninputs; i++)
5500 {
5501 rtx input, output, insns;
5502 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5503 char *end;
5504 int match, j;
5505
5506 if (*constraint == '%')
5507 constraint++;
5508
5509 match = strtoul (constraint, &end, 10);
5510 if (end == constraint)
5511 continue;
5512
5513 gcc_assert (match < noutputs);
5514 output = SET_DEST (p_sets[match]);
5515 input = RTVEC_ELT (inputs, i);
5516 /* Only do the transformation for pseudos. */
5517 if (! REG_P (output)
5518 || rtx_equal_p (output, input)
5519 || (GET_MODE (input) != VOIDmode
5520 && GET_MODE (input) != GET_MODE (output)))
5521 continue;
5522
5523 /* We can't do anything if the output is also used as input,
5524 as we're going to overwrite it. */
5525 for (j = 0; j < ninputs; j++)
5526 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5527 break;
5528 if (j != ninputs)
5529 continue;
5530
5531 /* Avoid changing the same input several times. For
5532 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5533 only change in once (to out1), rather than changing it
5534 first to out1 and afterwards to out2. */
5535 if (i > 0)
5536 {
5537 for (j = 0; j < noutputs; j++)
5538 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5539 break;
5540 if (j != noutputs)
5541 continue;
5542 }
5543 output_matched[match] = true;
5544
5545 start_sequence ();
5546 emit_move_insn (output, input);
5547 insns = get_insns ();
5548 end_sequence ();
5549 emit_insn_before (insns, insn);
5550
5551 /* Now replace all mentions of the input with output. We can't
5552 just replace the occurrence in inputs[i], as the register might
5553 also be used in some other input (or even in an address of an
5554 output), which would mean possibly increasing the number of
5555 inputs by one (namely 'output' in addition), which might pose
5556 a too complicated problem for reload to solve. E.g. this situation:
5557
5558 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5559
5560 Here 'input' is used in two occurrences as input (once for the
5561 input operand, once for the address in the second output operand).
5562 If we would replace only the occurrence of the input operand (to
5563 make the matching) we would be left with this:
5564
5565 output = input
5566 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5567
5568 Now we suddenly have two different input values (containing the same
5569 value, but different pseudos) where we formerly had only one.
5570 With more complicated asms this might lead to reload failures
5571 which wouldn't have happen without this pass. So, iterate over
5572 all operands and replace all occurrences of the register used. */
5573 for (j = 0; j < noutputs; j++)
5574 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5575 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5576 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5577 input, output);
5578 for (j = 0; j < ninputs; j++)
5579 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5580 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5581 input, output);
5582
5583 changed = true;
5584 }
5585
5586 if (changed)
5587 df_insn_rescan (insn);
5588 }
5589
5590 static unsigned
5591 rest_of_match_asm_constraints (void)
5592 {
5593 basic_block bb;
5594 rtx insn, pat, *p_sets;
5595 int noutputs;
5596
5597 if (!crtl->has_asm_statement)
5598 return 0;
5599
5600 df_set_flags (DF_DEFER_INSN_RESCAN);
5601 FOR_EACH_BB (bb)
5602 {
5603 FOR_BB_INSNS (bb, insn)
5604 {
5605 if (!INSN_P (insn))
5606 continue;
5607
5608 pat = PATTERN (insn);
5609 if (GET_CODE (pat) == PARALLEL)
5610 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5611 else if (GET_CODE (pat) == SET)
5612 p_sets = &PATTERN (insn), noutputs = 1;
5613 else
5614 continue;
5615
5616 if (GET_CODE (*p_sets) == SET
5617 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5618 match_asm_constraints_1 (insn, p_sets, noutputs);
5619 }
5620 }
5621
5622 return TODO_df_finish;
5623 }
5624
5625 struct rtl_opt_pass pass_match_asm_constraints =
5626 {
5627 {
5628 RTL_PASS,
5629 "asmcons", /* name */
5630 NULL, /* gate */
5631 rest_of_match_asm_constraints, /* execute */
5632 NULL, /* sub */
5633 NULL, /* next */
5634 0, /* static_pass_number */
5635 0, /* tv_id */
5636 0, /* properties_required */
5637 0, /* properties_provided */
5638 0, /* properties_destroyed */
5639 0, /* todo_flags_start */
5640 TODO_dump_func /* todo_flags_finish */
5641 }
5642 };
5643
5644
5645 #include "gt-function.h"