comparison gcc/function.c @ 67:f6334be47118

update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
date Tue, 22 Mar 2011 17:18:12 +0900
parents b7f97abdc517
children 1b10fe6932e1 04ced10e8804
comparison
equal deleted inserted replaced
65:65488c3d617d 67:f6334be47118
1 /* Expands front end tree to back end RTL for GCC. 1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010 Free Software Foundation, Inc. 4 2010, 2011 Free Software Foundation, Inc.
5 5
6 This file is part of GCC. 6 This file is part of GCC.
7 7
8 GCC is free software; you can redistribute it and/or modify it under 8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free 9 the terms of the GNU General Public License as published by the Free
35 35
36 #include "config.h" 36 #include "config.h"
37 #include "system.h" 37 #include "system.h"
38 #include "coretypes.h" 38 #include "coretypes.h"
39 #include "tm.h" 39 #include "tm.h"
40 #include "rtl.h" 40 #include "rtl-error.h"
41 #include "tree.h" 41 #include "tree.h"
42 #include "flags.h" 42 #include "flags.h"
43 #include "except.h" 43 #include "except.h"
44 #include "function.h" 44 #include "function.h"
45 #include "expr.h" 45 #include "expr.h"
49 #include "hard-reg-set.h" 49 #include "hard-reg-set.h"
50 #include "insn-config.h" 50 #include "insn-config.h"
51 #include "recog.h" 51 #include "recog.h"
52 #include "output.h" 52 #include "output.h"
53 #include "basic-block.h" 53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h" 54 #include "hashtab.h"
56 #include "ggc.h" 55 #include "ggc.h"
57 #include "tm_p.h" 56 #include "tm_p.h"
58 #include "integrate.h" 57 #include "integrate.h"
59 #include "langhooks.h" 58 #include "langhooks.h"
130 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) 129 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
131 htab_t epilogue_insn_hash; 130 htab_t epilogue_insn_hash;
132 131
133 132
134 htab_t types_used_by_vars_hash = NULL; 133 htab_t types_used_by_vars_hash = NULL;
135 tree types_used_by_cur_var_decl = NULL; 134 VEC(tree,gc) *types_used_by_cur_var_decl;
136 135
137 /* Forward declarations. */ 136 /* Forward declarations. */
138 137
139 static struct temp_slot *find_temp_slot_from_address (rtx); 138 static struct temp_slot *find_temp_slot_from_address (rtx);
140 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *); 139 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
338 function's frame_space_list. */ 337 function's frame_space_list. */
339 338
340 static void 339 static void
341 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end) 340 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
342 { 341 {
343 struct frame_space *space = GGC_NEW (struct frame_space); 342 struct frame_space *space = ggc_alloc_frame_space ();
344 space->next = crtl->frame_space_list; 343 space->next = crtl->frame_space_list;
345 crtl->frame_space_list = space; 344 crtl->frame_space_list = space;
346 space->start = start; 345 space->start = start;
347 space->length = end - start; 346 space->length = end - start;
348 } 347 }
354 0 means according to MODE, 353 0 means according to MODE,
355 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that, 354 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
356 -2 means use BITS_PER_UNIT, 355 -2 means use BITS_PER_UNIT,
357 positive specifies alignment boundary in bits. 356 positive specifies alignment boundary in bits.
358 357
359 If REDUCE_ALIGNMENT_OK is true, it is OK to reduce alignment. 358 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
359 alignment and ASLK_RECORD_PAD bit set if we should remember
360 extra space we allocated for alignment purposes. When we are
361 called from assign_stack_temp_for_type, it is not set so we don't
362 track the same stack slot in two independent lists.
360 363
361 We do not round to stack_boundary here. */ 364 We do not round to stack_boundary here. */
362 365
363 rtx 366 rtx
364 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, 367 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
365 int align, 368 int align, int kind)
366 bool reduce_alignment_ok ATTRIBUTE_UNUSED)
367 { 369 {
368 rtx x, addr; 370 rtx x, addr;
369 int bigend_correction = 0; 371 int bigend_correction = 0;
370 HOST_WIDE_INT slot_offset, old_frame_offset; 372 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
371 unsigned int alignment, alignment_in_bits; 373 unsigned int alignment, alignment_in_bits;
372 374
373 if (align == 0) 375 if (align == 0)
374 { 376 {
375 alignment = get_stack_local_alignment (NULL, mode); 377 alignment = get_stack_local_alignment (NULL, mode);
411 if (!crtl->stack_realign_needed) 413 if (!crtl->stack_realign_needed)
412 { 414 {
413 /* It is OK to reduce the alignment as long as the 415 /* It is OK to reduce the alignment as long as the
414 requested size is 0 or the estimated stack 416 requested size is 0 or the estimated stack
415 alignment >= mode alignment. */ 417 alignment >= mode alignment. */
416 gcc_assert (reduce_alignment_ok 418 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
417 || size == 0 419 || size == 0
418 || (crtl->stack_alignment_estimated 420 || (crtl->stack_alignment_estimated
419 >= GET_MODE_ALIGNMENT (mode))); 421 >= GET_MODE_ALIGNMENT (mode)));
420 alignment_in_bits = crtl->stack_alignment_estimated; 422 alignment_in_bits = crtl->stack_alignment_estimated;
421 alignment = alignment_in_bits / BITS_PER_UNIT; 423 alignment = alignment_in_bits / BITS_PER_UNIT;
429 if (crtl->max_used_stack_slot_alignment < alignment_in_bits) 431 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
430 crtl->max_used_stack_slot_alignment = alignment_in_bits; 432 crtl->max_used_stack_slot_alignment = alignment_in_bits;
431 433
432 if (mode != BLKmode || size != 0) 434 if (mode != BLKmode || size != 0)
433 { 435 {
434 struct frame_space **psp; 436 if (kind & ASLK_RECORD_PAD)
435 437 {
436 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next) 438 struct frame_space **psp;
437 { 439
438 struct frame_space *space = *psp; 440 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
439 if (!try_fit_stack_local (space->start, space->length, size, 441 {
440 alignment, &slot_offset)) 442 struct frame_space *space = *psp;
441 continue; 443 if (!try_fit_stack_local (space->start, space->length, size,
442 *psp = space->next; 444 alignment, &slot_offset))
443 if (slot_offset > space->start) 445 continue;
444 add_frame_space (space->start, slot_offset); 446 *psp = space->next;
445 if (slot_offset + size < space->start + space->length) 447 if (slot_offset > space->start)
446 add_frame_space (slot_offset + size, 448 add_frame_space (space->start, slot_offset);
447 space->start + space->length); 449 if (slot_offset + size < space->start + space->length)
448 goto found_space; 450 add_frame_space (slot_offset + size,
451 space->start + space->length);
452 goto found_space;
453 }
449 } 454 }
450 } 455 }
451 else if (!STACK_ALIGNMENT_NEEDED) 456 else if (!STACK_ALIGNMENT_NEEDED)
452 { 457 {
453 slot_offset = frame_offset; 458 slot_offset = frame_offset;
459 if (FRAME_GROWS_DOWNWARD) 464 if (FRAME_GROWS_DOWNWARD)
460 { 465 {
461 frame_offset -= size; 466 frame_offset -= size;
462 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset); 467 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
463 468
464 if (slot_offset > frame_offset) 469 if (kind & ASLK_RECORD_PAD)
465 add_frame_space (frame_offset, slot_offset); 470 {
466 if (slot_offset + size < old_frame_offset) 471 if (slot_offset > frame_offset)
467 add_frame_space (slot_offset + size, old_frame_offset); 472 add_frame_space (frame_offset, slot_offset);
473 if (slot_offset + size < old_frame_offset)
474 add_frame_space (slot_offset + size, old_frame_offset);
475 }
468 } 476 }
469 else 477 else
470 { 478 {
471 frame_offset += size; 479 frame_offset += size;
472 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset); 480 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
473 481
474 if (slot_offset > old_frame_offset) 482 if (kind & ASLK_RECORD_PAD)
475 add_frame_space (old_frame_offset, slot_offset); 483 {
476 if (slot_offset + size < frame_offset) 484 if (slot_offset > old_frame_offset)
477 add_frame_space (slot_offset + size, frame_offset); 485 add_frame_space (old_frame_offset, slot_offset);
486 if (slot_offset + size < frame_offset)
487 add_frame_space (slot_offset + size, frame_offset);
488 }
478 } 489 }
479 490
480 found_space: 491 found_space:
481 /* On a big-endian machine, if we are allocating more space than we will use, 492 /* On a big-endian machine, if we are allocating more space than we will use,
482 use the least significant bytes of those that are allocated. */ 493 use the least significant bytes of those that are allocated. */
512 /* Wrap up assign_stack_local_1 with last parameter as false. */ 523 /* Wrap up assign_stack_local_1 with last parameter as false. */
513 524
514 rtx 525 rtx
515 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align) 526 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
516 { 527 {
517 return assign_stack_local_1 (mode, size, align, false); 528 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
518 } 529 }
519 530
520 531
521 /* In order to evaluate some expressions, such as function calls returning 532 /* In order to evaluate some expressions, such as function calls returning
522 structures in memory, we need to temporarily allocate stack locations. 533 structures in memory, we need to temporarily allocate stack locations.
681 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */ 692 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
682 static void 693 static void
683 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot) 694 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
684 { 695 {
685 void **slot; 696 void **slot;
686 struct temp_slot_address_entry *t = GGC_NEW (struct temp_slot_address_entry); 697 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
687 t->address = address; 698 t->address = address;
688 t->temp_slot = temp_slot; 699 t->temp_slot = temp_slot;
689 t->hash = temp_slot_address_compute_hash (t); 700 t->hash = temp_slot_address_compute_hash (t);
690 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT); 701 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
691 *slot = t; 702 *slot = t;
833 int alignment = best_p->align / BITS_PER_UNIT; 844 int alignment = best_p->align / BITS_PER_UNIT;
834 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment); 845 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
835 846
836 if (best_p->size - rounded_size >= alignment) 847 if (best_p->size - rounded_size >= alignment)
837 { 848 {
838 p = GGC_NEW (struct temp_slot); 849 p = ggc_alloc_temp_slot ();
839 p->in_use = p->addr_taken = 0; 850 p->in_use = p->addr_taken = 0;
840 p->size = best_p->size - rounded_size; 851 p->size = best_p->size - rounded_size;
841 p->base_offset = best_p->base_offset + rounded_size; 852 p->base_offset = best_p->base_offset + rounded_size;
842 p->full_size = best_p->full_size - rounded_size; 853 p->full_size = best_p->full_size - rounded_size;
843 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size); 854 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
857 /* If we still didn't find one, make a new temporary. */ 868 /* If we still didn't find one, make a new temporary. */
858 if (selected == 0) 869 if (selected == 0)
859 { 870 {
860 HOST_WIDE_INT frame_offset_old = frame_offset; 871 HOST_WIDE_INT frame_offset_old = frame_offset;
861 872
862 p = GGC_NEW (struct temp_slot); 873 p = ggc_alloc_temp_slot ();
863 874
864 /* We are passing an explicit alignment request to assign_stack_local. 875 /* We are passing an explicit alignment request to assign_stack_local.
865 One side effect of that is assign_stack_local will not round SIZE 876 One side effect of that is assign_stack_local will not round SIZE
866 to ensure the frame offset remains suitably aligned. 877 to ensure the frame offset remains suitably aligned.
867 878
868 So for requests which depended on the rounding of SIZE, we go ahead 879 So for requests which depended on the rounding of SIZE, we go ahead
869 and round it now. We also make sure ALIGNMENT is at least 880 and round it now. We also make sure ALIGNMENT is at least
870 BIGGEST_ALIGNMENT. */ 881 BIGGEST_ALIGNMENT. */
871 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT); 882 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
872 p->slot = assign_stack_local (mode, 883 p->slot = assign_stack_local_1 (mode,
873 (mode == BLKmode 884 (mode == BLKmode
874 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT) 885 ? CEIL_ROUND (size,
875 : size), 886 (int) align
876 align); 887 / BITS_PER_UNIT)
888 : size),
889 align, 0);
877 890
878 p->align = align; 891 p->align = align;
879 892
880 /* The following slot size computation is necessary because we don't 893 /* The following slot size computation is necessary because we don't
881 know the actual size of the temporary slot until assign_stack_local 894 know the actual size of the temporary slot until assign_stack_local
927 940
928 /* If a type is specified, set the relevant flags. */ 941 /* If a type is specified, set the relevant flags. */
929 if (type != 0) 942 if (type != 0)
930 { 943 {
931 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type); 944 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
932 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type) 945 gcc_checking_assert (!MEM_SCALAR_P (slot) && !MEM_IN_STRUCT_P (slot));
933 || TREE_CODE (type) == COMPLEX_TYPE)); 946 if (AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE)
947 MEM_IN_STRUCT_P (slot) = 1;
948 else
949 MEM_SCALAR_P (slot) = 1;
934 } 950 }
935 MEM_NOTRAP_P (slot) = 1; 951 MEM_NOTRAP_P (slot) = 1;
936 952
937 return slot; 953 return slot;
938 } 954 }
1403 new_rtx = frame_pointer_rtx; 1419 new_rtx = frame_pointer_rtx;
1404 #else 1420 #else
1405 new_rtx = arg_pointer_rtx; 1421 new_rtx = arg_pointer_rtx;
1406 #endif 1422 #endif
1407 offset = cfa_offset; 1423 offset = cfa_offset;
1424 }
1425 else if (x == virtual_preferred_stack_boundary_rtx)
1426 {
1427 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1428 offset = 0;
1408 } 1429 }
1409 else 1430 else
1410 return NULL_RTX; 1431 return NULL_RTX;
1411 1432
1412 *poffset = offset; 1433 *poffset = offset;
1778 { 1799 {
1779 tree t = *tp; 1800 tree t = *tp;
1780 if (! EXPR_P (t)) 1801 if (! EXPR_P (t))
1781 { 1802 {
1782 *walk_subtrees = 0; 1803 *walk_subtrees = 0;
1783 if (DECL_P (t) && DECL_RTL_SET_P (t)) 1804 if (DECL_P (t))
1784 instantiate_decl_rtl (DECL_RTL (t)); 1805 {
1806 if (DECL_RTL_SET_P (t))
1807 instantiate_decl_rtl (DECL_RTL (t));
1808 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1809 && DECL_INCOMING_RTL (t))
1810 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1811 if ((TREE_CODE (t) == VAR_DECL
1812 || TREE_CODE (t) == RESULT_DECL)
1813 && DECL_HAS_VALUE_EXPR_P (t))
1814 {
1815 tree v = DECL_VALUE_EXPR (t);
1816 walk_tree (&v, instantiate_expr, NULL, NULL);
1817 }
1818 }
1785 } 1819 }
1786 return NULL; 1820 return NULL;
1787 } 1821 }
1788 1822
1789 /* Subroutine of instantiate_decls: Process all decls in the given 1823 /* Subroutine of instantiate_decls: Process all decls in the given
1792 static void 1826 static void
1793 instantiate_decls_1 (tree let) 1827 instantiate_decls_1 (tree let)
1794 { 1828 {
1795 tree t; 1829 tree t;
1796 1830
1797 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t)) 1831 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1798 { 1832 {
1799 if (DECL_RTL_SET_P (t)) 1833 if (DECL_RTL_SET_P (t))
1800 instantiate_decl_rtl (DECL_RTL (t)); 1834 instantiate_decl_rtl (DECL_RTL (t));
1801 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t)) 1835 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1802 { 1836 {
1814 all virtual registers in their DECL_RTL's. */ 1848 all virtual registers in their DECL_RTL's. */
1815 1849
1816 static void 1850 static void
1817 instantiate_decls (tree fndecl) 1851 instantiate_decls (tree fndecl)
1818 { 1852 {
1819 tree decl, t, next; 1853 tree decl;
1854 unsigned ix;
1820 1855
1821 /* Process all parameters of the function. */ 1856 /* Process all parameters of the function. */
1822 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl)) 1857 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1823 { 1858 {
1824 instantiate_decl_rtl (DECL_RTL (decl)); 1859 instantiate_decl_rtl (DECL_RTL (decl));
1825 instantiate_decl_rtl (DECL_INCOMING_RTL (decl)); 1860 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1826 if (DECL_HAS_VALUE_EXPR_P (decl)) 1861 if (DECL_HAS_VALUE_EXPR_P (decl))
1827 { 1862 {
1828 tree v = DECL_VALUE_EXPR (decl); 1863 tree v = DECL_VALUE_EXPR (decl);
1829 walk_tree (&v, instantiate_expr, NULL, NULL); 1864 walk_tree (&v, instantiate_expr, NULL, NULL);
1830 } 1865 }
1831 } 1866 }
1832 1867
1868 if ((decl = DECL_RESULT (fndecl))
1869 && TREE_CODE (decl) == RESULT_DECL)
1870 {
1871 if (DECL_RTL_SET_P (decl))
1872 instantiate_decl_rtl (DECL_RTL (decl));
1873 if (DECL_HAS_VALUE_EXPR_P (decl))
1874 {
1875 tree v = DECL_VALUE_EXPR (decl);
1876 walk_tree (&v, instantiate_expr, NULL, NULL);
1877 }
1878 }
1879
1833 /* Now process all variables defined in the function or its subblocks. */ 1880 /* Now process all variables defined in the function or its subblocks. */
1834 instantiate_decls_1 (DECL_INITIAL (fndecl)); 1881 instantiate_decls_1 (DECL_INITIAL (fndecl));
1835 1882
1836 t = cfun->local_decls; 1883 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1837 cfun->local_decls = NULL_TREE; 1884 if (DECL_RTL_SET_P (decl))
1838 for (; t; t = next) 1885 instantiate_decl_rtl (DECL_RTL (decl));
1839 { 1886 VEC_free (tree, gc, cfun->local_decls);
1840 next = TREE_CHAIN (t);
1841 decl = TREE_VALUE (t);
1842 if (DECL_RTL_SET_P (decl))
1843 instantiate_decl_rtl (DECL_RTL (decl));
1844 ggc_free (t);
1845 }
1846 } 1887 }
1847 1888
1848 /* Pass through the INSNS of function FNDECL and convert virtual register 1889 /* Pass through the INSNS of function FNDECL and convert virtual register
1849 references to hard register references. */ 1890 references to hard register references. */
1850 1891
1903 targetm.instantiate_decls (); 1944 targetm.instantiate_decls ();
1904 1945
1905 /* Indicate that, from now on, assign_stack_local should use 1946 /* Indicate that, from now on, assign_stack_local should use
1906 frame_pointer_rtx. */ 1947 frame_pointer_rtx. */
1907 virtuals_instantiated = 1; 1948 virtuals_instantiated = 1;
1949
1950 /* See allocate_dynamic_stack_space for the rationale. */
1951 #ifdef SETJMP_VIA_SAVE_AREA
1952 if (flag_stack_usage && cfun->calls_setjmp)
1953 {
1954 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1955 dynamic_offset = (dynamic_offset + align - 1) / align * align;
1956 current_function_dynamic_stack_size
1957 += current_function_dynamic_alloc_count * dynamic_offset;
1958 }
1959 #endif
1960
1908 return 0; 1961 return 0;
1909 } 1962 }
1910 1963
1911 struct rtl_opt_pass pass_instantiate_virtual_regs = 1964 struct rtl_opt_pass pass_instantiate_virtual_regs =
1912 { 1965 {
2175 split_complex_args (VEC(tree, heap) **args) 2228 split_complex_args (VEC(tree, heap) **args)
2176 { 2229 {
2177 unsigned i; 2230 unsigned i;
2178 tree p; 2231 tree p;
2179 2232
2180 for (i = 0; VEC_iterate (tree, *args, i, p); ++i) 2233 FOR_EACH_VEC_ELT (tree, *args, i, p)
2181 { 2234 {
2182 tree type = TREE_TYPE (p); 2235 tree type = TREE_TYPE (p);
2183 if (TREE_CODE (type) == COMPLEX_TYPE 2236 if (TREE_CODE (type) == COMPLEX_TYPE
2184 && targetm.calls.split_complex_arg (type)) 2237 && targetm.calls.split_complex_arg (type))
2185 { 2238 {
2226 tree fndecl = current_function_decl; 2279 tree fndecl = current_function_decl;
2227 tree fntype = TREE_TYPE (fndecl); 2280 tree fntype = TREE_TYPE (fndecl);
2228 VEC(tree, heap) *fnargs = NULL; 2281 VEC(tree, heap) *fnargs = NULL;
2229 tree arg; 2282 tree arg;
2230 2283
2231 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = TREE_CHAIN (arg)) 2284 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2232 VEC_safe_push (tree, heap, fnargs, arg); 2285 VEC_safe_push (tree, heap, fnargs, arg);
2233 2286
2234 all->orig_fnargs = DECL_ARGUMENTS (fndecl); 2287 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2235 2288
2236 /* If struct value address is treated as the first argument, make it so. */ 2289 /* If struct value address is treated as the first argument, make it so. */
2240 { 2293 {
2241 tree type = build_pointer_type (TREE_TYPE (fntype)); 2294 tree type = build_pointer_type (TREE_TYPE (fntype));
2242 tree decl; 2295 tree decl;
2243 2296
2244 decl = build_decl (DECL_SOURCE_LOCATION (fndecl), 2297 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2245 PARM_DECL, NULL_TREE, type); 2298 PARM_DECL, get_identifier (".result_ptr"), type);
2246 DECL_ARG_TYPE (decl) = type; 2299 DECL_ARG_TYPE (decl) = type;
2247 DECL_ARTIFICIAL (decl) = 1; 2300 DECL_ARTIFICIAL (decl) = 1;
2248 DECL_IGNORED_P (decl) = 1; 2301 DECL_NAMELESS (decl) = 1;
2249 2302 TREE_CONSTANT (decl) = 1;
2250 TREE_CHAIN (decl) = all->orig_fnargs; 2303
2304 DECL_CHAIN (decl) = all->orig_fnargs;
2251 all->orig_fnargs = decl; 2305 all->orig_fnargs = decl;
2252 VEC_safe_insert (tree, heap, fnargs, 0, decl); 2306 VEC_safe_insert (tree, heap, fnargs, 0, decl);
2253 2307
2254 all->function_result_decl = decl; 2308 all->function_result_decl = decl;
2255 } 2309 }
2276 memset (data, 0, sizeof (*data)); 2330 memset (data, 0, sizeof (*data));
2277 2331
2278 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */ 2332 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2279 if (!cfun->stdarg) 2333 if (!cfun->stdarg)
2280 data->named_arg = 1; /* No variadic parms. */ 2334 data->named_arg = 1; /* No variadic parms. */
2281 else if (TREE_CHAIN (parm)) 2335 else if (DECL_CHAIN (parm))
2282 data->named_arg = 1; /* Not the last non-variadic parm. */ 2336 data->named_arg = 1; /* Not the last non-variadic parm. */
2283 else if (targetm.calls.strict_argument_naming (&all->args_so_far)) 2337 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2284 data->named_arg = 1; /* Only variadic ones are unnamed. */ 2338 data->named_arg = 1; /* Only variadic ones are unnamed. */
2285 else 2339 else
2286 data->named_arg = 0; /* Treat as variadic. */ 2340 data->named_arg = 0; /* Treat as variadic. */
2372 { 2426 {
2373 data->entry_parm = data->stack_parm = const0_rtx; 2427 data->entry_parm = data->stack_parm = const0_rtx;
2374 return; 2428 return;
2375 } 2429 }
2376 2430
2377 #ifdef FUNCTION_INCOMING_ARG 2431 entry_parm = targetm.calls.function_incoming_arg (&all->args_so_far,
2378 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode, 2432 data->promoted_mode,
2379 data->passed_type, data->named_arg); 2433 data->passed_type,
2380 #else 2434 data->named_arg);
2381 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2382 data->passed_type, data->named_arg);
2383 #endif
2384 2435
2385 if (entry_parm == 0) 2436 if (entry_parm == 0)
2386 data->promoted_mode = data->passed_mode; 2437 data->promoted_mode = data->passed_mode;
2387 2438
2388 /* Determine parm's home in the stack, in case it arrives in the stack 2439 /* Determine parm's home in the stack, in case it arrives in the stack
2402 if (!in_regs && !data->named_arg) 2453 if (!in_regs && !data->named_arg)
2403 { 2454 {
2404 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far)) 2455 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2405 { 2456 {
2406 rtx tem; 2457 rtx tem;
2407 #ifdef FUNCTION_INCOMING_ARG 2458 tem = targetm.calls.function_incoming_arg (&all->args_so_far,
2408 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode, 2459 data->promoted_mode,
2409 data->passed_type, true); 2460 data->passed_type, true);
2410 #else
2411 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2412 data->passed_type, true);
2413 #endif
2414 in_regs = tem != NULL; 2461 in_regs = tem != NULL;
2415 } 2462 }
2416 } 2463 }
2417 2464
2418 /* If this parameter was passed both in registers and in the stack, use 2465 /* If this parameter was passed both in registers and in the stack, use
2559 2606
2560 boundary = data->locate.boundary; 2607 boundary = data->locate.boundary;
2561 align = BITS_PER_UNIT; 2608 align = BITS_PER_UNIT;
2562 2609
2563 /* If we're padding upward, we know that the alignment of the slot 2610 /* If we're padding upward, we know that the alignment of the slot
2564 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're 2611 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2565 intentionally forcing upward padding. Otherwise we have to come 2612 intentionally forcing upward padding. Otherwise we have to come
2566 up with a guess at the alignment based on OFFSET_RTX. */ 2613 up with a guess at the alignment based on OFFSET_RTX. */
2567 if (data->locate.where_pad != downward || data->entry_parm) 2614 if (data->locate.where_pad != downward || data->entry_parm)
2568 align = boundary; 2615 align = boundary;
2569 else if (CONST_INT_P (offset_rtx)) 2616 else if (CONST_INT_P (offset_rtx))
2865 2912
2866 data->stack_parm = stack_parm; 2913 data->stack_parm = stack_parm;
2867 SET_DECL_RTL (parm, stack_parm); 2914 SET_DECL_RTL (parm, stack_parm);
2868 } 2915 }
2869 2916
2917 /* A subroutine of assign_parm_setup_reg, called through note_stores.
2918 This collects sets and clobbers of hard registers in a HARD_REG_SET,
2919 which is pointed to by DATA. */
2920 static void
2921 record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2922 {
2923 HARD_REG_SET *pset = (HARD_REG_SET *)data;
2924 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
2925 {
2926 int nregs = hard_regno_nregs[REGNO (x)][GET_MODE (x)];
2927 while (nregs-- > 0)
2928 SET_HARD_REG_BIT (*pset, REGNO (x) + nregs);
2929 }
2930 }
2931
2870 /* A subroutine of assign_parms. Allocate a pseudo to hold the current 2932 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2871 parameter. Get it there. Perform all ABI specified conversions. */ 2933 parameter. Get it there. Perform all ABI specified conversions. */
2872 2934
2873 static void 2935 static void
2874 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, 2936 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2875 struct assign_parm_data_one *data) 2937 struct assign_parm_data_one *data)
2876 { 2938 {
2877 rtx parmreg; 2939 rtx parmreg, validated_mem;
2940 rtx equiv_stack_parm;
2878 enum machine_mode promoted_nominal_mode; 2941 enum machine_mode promoted_nominal_mode;
2879 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm)); 2942 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2880 bool did_conversion = false; 2943 bool did_conversion = false;
2944 bool need_conversion, moved;
2881 2945
2882 /* Store the parm in a pseudoregister during the function, but we may 2946 /* Store the parm in a pseudoregister during the function, but we may
2883 need to do it in a wider mode. Using 2 here makes the result 2947 need to do it in a wider mode. Using 2 here makes the result
2884 consistent with promote_decl_mode and thus expand_expr_real_1. */ 2948 consistent with promote_decl_mode and thus expand_expr_real_1. */
2885 promoted_nominal_mode 2949 promoted_nominal_mode
2904 2968
2905 assign_parm_remove_parallels (data); 2969 assign_parm_remove_parallels (data);
2906 2970
2907 /* Copy the value into the register, thus bridging between 2971 /* Copy the value into the register, thus bridging between
2908 assign_parm_find_data_types and expand_expr_real_1. */ 2972 assign_parm_find_data_types and expand_expr_real_1. */
2909 if (data->nominal_mode != data->passed_mode 2973
2910 || promoted_nominal_mode != data->promoted_mode) 2974 equiv_stack_parm = data->stack_parm;
2911 { 2975 validated_mem = validize_mem (data->entry_parm);
2912 int save_tree_used; 2976
2913 2977 need_conversion = (data->nominal_mode != data->passed_mode
2978 || promoted_nominal_mode != data->promoted_mode);
2979 moved = false;
2980
2981 if (need_conversion
2982 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2983 && data->nominal_mode == data->passed_mode
2984 && data->nominal_mode == GET_MODE (data->entry_parm))
2985 {
2914 /* ENTRY_PARM has been converted to PROMOTED_MODE, its 2986 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2915 mode, by the caller. We now have to convert it to 2987 mode, by the caller. We now have to convert it to
2916 NOMINAL_MODE, if different. However, PARMREG may be in 2988 NOMINAL_MODE, if different. However, PARMREG may be in
2917 a different mode than NOMINAL_MODE if it is being stored 2989 a different mode than NOMINAL_MODE if it is being stored
2918 promoted. 2990 promoted.
2924 occurs when the calling sequence allow such misaligned 2996 occurs when the calling sequence allow such misaligned
2925 usages. 2997 usages.
2926 2998
2927 In addition, the conversion may involve a call, which could 2999 In addition, the conversion may involve a call, which could
2928 clobber parameters which haven't been copied to pseudo 3000 clobber parameters which haven't been copied to pseudo
2929 registers yet. Therefore, we must first copy the parm to 3001 registers yet.
2930 a pseudo reg here, and save the conversion until after all 3002
3003 First, we try to emit an insn which performs the necessary
3004 conversion. We verify that this insn does not clobber any
3005 hard registers. */
3006
3007 enum insn_code icode;
3008 rtx op0, op1;
3009
3010 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3011 unsignedp);
3012
3013 op0 = parmreg;
3014 op1 = validated_mem;
3015 if (icode != CODE_FOR_nothing
3016 && insn_data[icode].operand[0].predicate (op0, promoted_nominal_mode)
3017 && insn_data[icode].operand[1].predicate (op1, data->passed_mode))
3018 {
3019 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3020 rtx insn, insns;
3021 HARD_REG_SET hardregs;
3022
3023 start_sequence ();
3024 insn = gen_extend_insn (op0, op1, promoted_nominal_mode,
3025 data->passed_mode, unsignedp);
3026 emit_insn (insn);
3027 insns = get_insns ();
3028
3029 moved = true;
3030 CLEAR_HARD_REG_SET (hardregs);
3031 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3032 {
3033 if (INSN_P (insn))
3034 note_stores (PATTERN (insn), record_hard_reg_sets,
3035 &hardregs);
3036 if (!hard_reg_set_empty_p (hardregs))
3037 moved = false;
3038 }
3039
3040 end_sequence ();
3041
3042 if (moved)
3043 {
3044 emit_insn (insns);
3045 if (equiv_stack_parm != NULL_RTX)
3046 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3047 equiv_stack_parm);
3048 }
3049 }
3050 }
3051
3052 if (moved)
3053 /* Nothing to do. */
3054 ;
3055 else if (need_conversion)
3056 {
3057 /* We did not have an insn to convert directly, or the sequence
3058 generated appeared unsafe. We must first copy the parm to a
3059 pseudo reg, and save the conversion until after all
2931 parameters have been moved. */ 3060 parameters have been moved. */
2932 3061
3062 int save_tree_used;
2933 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm)); 3063 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2934 3064
2935 emit_move_insn (tempreg, validize_mem (data->entry_parm)); 3065 emit_move_insn (tempreg, validated_mem);
2936 3066
2937 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); 3067 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2938 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp); 3068 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2939 3069
2940 if (GET_CODE (tempreg) == SUBREG 3070 if (GET_CODE (tempreg) == SUBREG
2960 end_sequence (); 3090 end_sequence ();
2961 3091
2962 did_conversion = true; 3092 did_conversion = true;
2963 } 3093 }
2964 else 3094 else
2965 emit_move_insn (parmreg, validize_mem (data->entry_parm)); 3095 emit_move_insn (parmreg, validated_mem);
2966 3096
2967 /* If we were passed a pointer but the actual value can safely live 3097 /* If we were passed a pointer but the actual value can safely live
2968 in a register, put it in one. */ 3098 in a register, put it in one. */
2969 if (data->passed_pointer 3099 if (data->passed_pointer
2970 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode 3100 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3045 set_unique_reg_note (sinsn, REG_EQUIV, stackr); 3175 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3046 } 3176 }
3047 } 3177 }
3048 else if ((set = single_set (linsn)) != 0 3178 else if ((set = single_set (linsn)) != 0
3049 && SET_DEST (set) == parmreg) 3179 && SET_DEST (set) == parmreg)
3050 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm); 3180 set_unique_reg_note (linsn, REG_EQUIV, equiv_stack_parm);
3051 } 3181 }
3052 3182
3053 /* For pointer data type, suggest pointer register. */ 3183 /* For pointer data type, suggest pointer register. */
3054 if (POINTER_TYPE_P (TREE_TYPE (parm))) 3184 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3055 mark_reg_pointer (parmreg, 3185 mark_reg_pointer (parmreg,
3223 = targetm.calls.internal_arg_pointer (); 3353 = targetm.calls.internal_arg_pointer ();
3224 3354
3225 assign_parms_initialize_all (&all); 3355 assign_parms_initialize_all (&all);
3226 fnargs = assign_parms_augmented_arg_list (&all); 3356 fnargs = assign_parms_augmented_arg_list (&all);
3227 3357
3228 for (i = 0; VEC_iterate (tree, fnargs, i, parm); ++i) 3358 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3229 { 3359 {
3230 struct assign_parm_data_one data; 3360 struct assign_parm_data_one data;
3231 3361
3232 /* Extract the type of PARM; adjust it according to ABI. */ 3362 /* Extract the type of PARM; adjust it according to ABI. */
3233 assign_parm_find_data_types (&all, parm, &data); 3363 assign_parm_find_data_types (&all, parm, &data);
3241 } 3371 }
3242 3372
3243 /* Estimate stack alignment from parameter alignment. */ 3373 /* Estimate stack alignment from parameter alignment. */
3244 if (SUPPORTS_STACK_ALIGNMENT) 3374 if (SUPPORTS_STACK_ALIGNMENT)
3245 { 3375 {
3246 unsigned int align = FUNCTION_ARG_BOUNDARY (data.promoted_mode, 3376 unsigned int align
3247 data.passed_type); 3377 = targetm.calls.function_arg_boundary (data.promoted_mode,
3378 data.passed_type);
3248 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode, 3379 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3249 align); 3380 align);
3250 if (TYPE_ALIGN (data.nominal_type) > align) 3381 if (TYPE_ALIGN (data.nominal_type) > align)
3251 align = MINIMUM_ALIGNMENT (data.nominal_type, 3382 align = MINIMUM_ALIGNMENT (data.nominal_type,
3252 TYPE_MODE (data.nominal_type), 3383 TYPE_MODE (data.nominal_type),
3256 gcc_assert (!crtl->stack_realign_processed); 3387 gcc_assert (!crtl->stack_realign_processed);
3257 crtl->stack_alignment_estimated = align; 3388 crtl->stack_alignment_estimated = align;
3258 } 3389 }
3259 } 3390 }
3260 3391
3261 if (cfun->stdarg && !TREE_CHAIN (parm)) 3392 if (cfun->stdarg && !DECL_CHAIN (parm))
3262 assign_parms_setup_varargs (&all, &data, false); 3393 assign_parms_setup_varargs (&all, &data, false);
3263 3394
3264 /* Find out where the parameter arrives in this function. */ 3395 /* Find out where the parameter arrives in this function. */
3265 assign_parm_find_entry_rtl (&all, &data); 3396 assign_parm_find_entry_rtl (&all, &data);
3266 3397
3273 3404
3274 /* Record permanently how this parm was passed. */ 3405 /* Record permanently how this parm was passed. */
3275 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer); 3406 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
3276 3407
3277 /* Update info on where next arg arrives in registers. */ 3408 /* Update info on where next arg arrives in registers. */
3278 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode, 3409 targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
3279 data.passed_type, data.named_arg); 3410 data.passed_type, data.named_arg);
3280 3411
3281 assign_parm_adjust_stack_rtl (&data); 3412 assign_parm_adjust_stack_rtl (&data);
3282 3413
3283 if (assign_parm_setup_block_p (&data)) 3414 if (assign_parm_setup_block_p (&data))
3284 assign_parm_setup_block (&all, parm, &data); 3415 assign_parm_setup_block (&all, parm, &data);
3328 tree result = DECL_RESULT (current_function_decl); 3459 tree result = DECL_RESULT (current_function_decl);
3329 rtx addr = DECL_RTL (all.function_result_decl); 3460 rtx addr = DECL_RTL (all.function_result_decl);
3330 rtx x; 3461 rtx x;
3331 3462
3332 if (DECL_BY_REFERENCE (result)) 3463 if (DECL_BY_REFERENCE (result))
3333 x = addr; 3464 {
3465 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3466 x = addr;
3467 }
3334 else 3468 else
3335 { 3469 {
3470 SET_DECL_VALUE_EXPR (result,
3471 build1 (INDIRECT_REF, TREE_TYPE (result),
3472 all.function_result_decl));
3336 addr = convert_memory_address (Pmode, addr); 3473 addr = convert_memory_address (Pmode, addr);
3337 x = gen_rtx_MEM (DECL_MODE (result), addr); 3474 x = gen_rtx_MEM (DECL_MODE (result), addr);
3338 set_mem_attributes (x, result, 1); 3475 set_mem_attributes (x, result, 1);
3339 } 3476 }
3477
3478 DECL_HAS_VALUE_EXPR_P (result) = 1;
3479
3340 SET_DECL_RTL (result, x); 3480 SET_DECL_RTL (result, x);
3341 } 3481 }
3342 3482
3343 /* We have aligned all the args, so add space for the pretend args. */ 3483 /* We have aligned all the args, so add space for the pretend args. */
3344 crtl->args.pretend_args_size = all.pretend_args_size; 3484 crtl->args.pretend_args_size = all.pretend_args_size;
3367 #endif 3507 #endif
3368 3508
3369 /* See how many bytes, if any, of its args a function should try to pop 3509 /* See how many bytes, if any, of its args a function should try to pop
3370 on return. */ 3510 on return. */
3371 3511
3372 crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl), 3512 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3373 crtl->args.size); 3513 TREE_TYPE (fndecl),
3514 crtl->args.size);
3374 3515
3375 /* For stdarg.h function, save info about 3516 /* For stdarg.h function, save info about
3376 regs and stack space used by the named args. */ 3517 regs and stack space used by the named args. */
3377 3518
3378 crtl->args.info = all.args_so_far; 3519 crtl->args.info = all.args_so_far;
3450 unsigned i; 3591 unsigned i;
3451 3592
3452 assign_parms_initialize_all (&all); 3593 assign_parms_initialize_all (&all);
3453 fnargs = assign_parms_augmented_arg_list (&all); 3594 fnargs = assign_parms_augmented_arg_list (&all);
3454 3595
3455 for (i = 0; VEC_iterate (tree, fnargs, i, parm); ++i) 3596 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3456 { 3597 {
3457 struct assign_parm_data_one data; 3598 struct assign_parm_data_one data;
3458 3599
3459 /* Extract the type of PARM; adjust it according to ABI. */ 3600 /* Extract the type of PARM; adjust it according to ABI. */
3460 assign_parm_find_data_types (&all, parm, &data); 3601 assign_parm_find_data_types (&all, parm, &data);
3462 /* Early out for errors and void parameters. */ 3603 /* Early out for errors and void parameters. */
3463 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL) 3604 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3464 continue; 3605 continue;
3465 3606
3466 /* Update info on where next arg arrives in registers. */ 3607 /* Update info on where next arg arrives in registers. */
3467 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode, 3608 targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
3468 data.passed_type, data.named_arg); 3609 data.passed_type, data.named_arg);
3469 3610
3470 /* ??? Once upon a time variable_size stuffed parameter list 3611 /* ??? Once upon a time variable_size stuffed parameter list
3471 SAVE_EXPRs (amongst others) onto a pending sizes list. This 3612 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3472 turned out to be less than manageable in the gimple world. 3613 turned out to be less than manageable in the gimple world.
3473 Now we have to hunt them down ourselves. */ 3614 Now we have to hunt them down ourselves. */
3493 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST 3634 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3494 && !(flag_stack_check == GENERIC_STACK_CHECK 3635 && !(flag_stack_check == GENERIC_STACK_CHECK
3495 && compare_tree_int (DECL_SIZE_UNIT (parm), 3636 && compare_tree_int (DECL_SIZE_UNIT (parm),
3496 STACK_CHECK_MAX_VAR_SIZE) > 0)) 3637 STACK_CHECK_MAX_VAR_SIZE) > 0))
3497 { 3638 {
3498 local = create_tmp_var (type, get_name (parm)); 3639 local = create_tmp_reg (type, get_name (parm));
3499 DECL_IGNORED_P (local) = 0; 3640 DECL_IGNORED_P (local) = 0;
3500 /* If PARM was addressable, move that flag over 3641 /* If PARM was addressable, move that flag over
3501 to the local copy, as its address will be taken, 3642 to the local copy, as its address will be taken,
3502 not the PARMs. */ 3643 not the PARMs. Keep the parms address taken
3644 as we'll query that flag during gimplification. */
3503 if (TREE_ADDRESSABLE (parm)) 3645 if (TREE_ADDRESSABLE (parm))
3504 { 3646 TREE_ADDRESSABLE (local) = 1;
3505 TREE_ADDRESSABLE (parm) = 0;
3506 TREE_ADDRESSABLE (local) = 1;
3507 }
3508 } 3647 }
3509 else 3648 else
3510 { 3649 {
3511 tree ptr_type, addr; 3650 tree ptr_type, addr;
3512 3651
3513 ptr_type = build_pointer_type (type); 3652 ptr_type = build_pointer_type (type);
3514 addr = create_tmp_var (ptr_type, get_name (parm)); 3653 addr = create_tmp_reg (ptr_type, get_name (parm));
3515 DECL_IGNORED_P (addr) = 0; 3654 DECL_IGNORED_P (addr) = 0;
3516 local = build_fold_indirect_ref (addr); 3655 local = build_fold_indirect_ref (addr);
3517 3656
3518 t = built_in_decls[BUILT_IN_ALLOCA]; 3657 t = built_in_decls[BUILT_IN_ALLOCA];
3519 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm)); 3658 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3659 /* The call has been built for a variable-sized object. */
3660 ALLOCA_FOR_VAR_P (t) = 1;
3520 t = fold_convert (ptr_type, t); 3661 t = fold_convert (ptr_type, t);
3521 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); 3662 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3522 gimplify_and_add (t, &stmts); 3663 gimplify_and_add (t, &stmts);
3523 } 3664 }
3524 3665
3551 never be set if REG_PARM_STACK_SPACE is not defined. 3692 never be set if REG_PARM_STACK_SPACE is not defined.
3552 3693
3553 FNDECL is the function in which the argument was defined. 3694 FNDECL is the function in which the argument was defined.
3554 3695
3555 There are two types of rounding that are done. The first, controlled by 3696 There are two types of rounding that are done. The first, controlled by
3556 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument 3697 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3557 list to be aligned to the specific boundary (in bits). This rounding 3698 argument list to be aligned to the specific boundary (in bits). This
3558 affects the initial and starting offsets, but not the argument size. 3699 rounding affects the initial and starting offsets, but not the argument
3700 size.
3559 3701
3560 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY, 3702 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3561 optionally rounds the size of the parm to PARM_BOUNDARY. The 3703 optionally rounds the size of the parm to PARM_BOUNDARY. The
3562 initial offset is not affected by this rounding, while the size always 3704 initial offset is not affected by this rounding, while the size always
3563 is and the starting offset may be. */ 3705 is and the starting offset may be. */
3604 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0); 3746 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3605 3747
3606 sizetree 3748 sizetree
3607 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode)); 3749 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3608 where_pad = FUNCTION_ARG_PADDING (passed_mode, type); 3750 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3609 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type); 3751 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3610 locate->where_pad = where_pad; 3752 locate->where_pad = where_pad;
3611 3753
3612 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */ 3754 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3613 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT) 3755 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3614 boundary = MAX_SUPPORTED_STACK_ALIGNMENT; 3756 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3835 static void 3977 static void
3836 setjmp_vars_warning (bitmap setjmp_crosses, tree block) 3978 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3837 { 3979 {
3838 tree decl, sub; 3980 tree decl, sub;
3839 3981
3840 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl)) 3982 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
3841 { 3983 {
3842 if (TREE_CODE (decl) == VAR_DECL 3984 if (TREE_CODE (decl) == VAR_DECL
3843 && DECL_RTL_SET_P (decl) 3985 && DECL_RTL_SET_P (decl)
3844 && REG_P (DECL_RTL (decl)) 3986 && REG_P (DECL_RTL (decl))
3845 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) 3987 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3857 static void 3999 static void
3858 setjmp_args_warning (bitmap setjmp_crosses) 4000 setjmp_args_warning (bitmap setjmp_crosses)
3859 { 4001 {
3860 tree decl; 4002 tree decl;
3861 for (decl = DECL_ARGUMENTS (current_function_decl); 4003 for (decl = DECL_ARGUMENTS (current_function_decl);
3862 decl; decl = TREE_CHAIN (decl)) 4004 decl; decl = DECL_CHAIN (decl))
3863 if (DECL_RTL (decl) != 0 4005 if (DECL_RTL (decl) != 0
3864 && REG_P (DECL_RTL (decl)) 4006 && REG_P (DECL_RTL (decl))
3865 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) 4007 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3866 warning (OPT_Wclobbered, 4008 warning (OPT_Wclobbered,
3867 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>", 4009 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3882 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl)); 4024 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3883 setjmp_args_warning (setjmp_crosses); 4025 setjmp_args_warning (setjmp_crosses);
3884 } 4026 }
3885 4027
3886 4028
4029 /* Reverse the order of elements in the fragment chain T of blocks,
4030 and return the new head of the chain (old last element). */
4031
4032 static tree
4033 block_fragments_nreverse (tree t)
4034 {
4035 tree prev = 0, block, next;
4036 for (block = t; block; block = next)
4037 {
4038 next = BLOCK_FRAGMENT_CHAIN (block);
4039 BLOCK_FRAGMENT_CHAIN (block) = prev;
4040 prev = block;
4041 }
4042 return prev;
4043 }
4044
4045 /* Reverse the order of elements in the chain T of blocks,
4046 and return the new head of the chain (old last element).
4047 Also do the same on subblocks and reverse the order of elements
4048 in BLOCK_FRAGMENT_CHAIN as well. */
4049
4050 static tree
4051 blocks_nreverse_all (tree t)
4052 {
4053 tree prev = 0, block, next;
4054 for (block = t; block; block = next)
4055 {
4056 next = BLOCK_CHAIN (block);
4057 BLOCK_CHAIN (block) = prev;
4058 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4059 if (BLOCK_FRAGMENT_CHAIN (block)
4060 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4061 BLOCK_FRAGMENT_CHAIN (block)
4062 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4063 prev = block;
4064 }
4065 return prev;
4066 }
4067
4068
3887 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END}, 4069 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3888 and create duplicate blocks. */ 4070 and create duplicate blocks. */
3889 /* ??? Need an option to either create block fragments or to create 4071 /* ??? Need an option to either create block fragments or to create
3890 abstract origin duplicates of a source block. It really depends 4072 abstract origin duplicates of a source block. It really depends
3891 on what optimization has been performed. */ 4073 on what optimization has been performed. */
3908 BLOCK_SUBBLOCKS (block) = NULL_TREE; 4090 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3909 BLOCK_CHAIN (block) = NULL_TREE; 4091 BLOCK_CHAIN (block) = NULL_TREE;
3910 4092
3911 /* Recreate the block tree from the note nesting. */ 4093 /* Recreate the block tree from the note nesting. */
3912 reorder_blocks_1 (get_insns (), block, &block_stack); 4094 reorder_blocks_1 (get_insns (), block, &block_stack);
3913 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block)); 4095 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
3914 4096
3915 VEC_free (tree, heap, block_stack); 4097 VEC_free (tree, heap, block_stack);
3916 } 4098 }
3917 4099
3918 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */ 4100 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3940 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG) 4122 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3941 { 4123 {
3942 tree block = NOTE_BLOCK (insn); 4124 tree block = NOTE_BLOCK (insn);
3943 tree origin; 4125 tree origin;
3944 4126
3945 origin = (BLOCK_FRAGMENT_ORIGIN (block) 4127 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
3946 ? BLOCK_FRAGMENT_ORIGIN (block) 4128 origin = block;
3947 : block);
3948 4129
3949 /* If we have seen this block before, that means it now 4130 /* If we have seen this block before, that means it now
3950 spans multiple address regions. Create a new fragment. */ 4131 spans multiple address regions. Create a new fragment. */
3951 if (TREE_ASM_WRITTEN (block)) 4132 if (TREE_ASM_WRITTEN (block))
3952 { 4133 {
3979 VEC_safe_push (tree, heap, *p_block_stack, block); 4160 VEC_safe_push (tree, heap, *p_block_stack, block);
3980 } 4161 }
3981 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END) 4162 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3982 { 4163 {
3983 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack); 4164 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3984 BLOCK_SUBBLOCKS (current_block)
3985 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3986 current_block = BLOCK_SUPERCONTEXT (current_block); 4165 current_block = BLOCK_SUPERCONTEXT (current_block);
3987 } 4166 }
3988 } 4167 }
3989 } 4168 }
3990 } 4169 }
3993 and return the new head of the chain (old last element). */ 4172 and return the new head of the chain (old last element). */
3994 4173
3995 tree 4174 tree
3996 blocks_nreverse (tree t) 4175 blocks_nreverse (tree t)
3997 { 4176 {
3998 tree prev = 0, decl, next; 4177 tree prev = 0, block, next;
3999 for (decl = t; decl; decl = next) 4178 for (block = t; block; block = next)
4000 { 4179 {
4001 next = BLOCK_CHAIN (decl); 4180 next = BLOCK_CHAIN (block);
4002 BLOCK_CHAIN (decl) = prev; 4181 BLOCK_CHAIN (block) = prev;
4003 prev = decl; 4182 prev = block;
4004 } 4183 }
4005 return prev; 4184 return prev;
4006 } 4185 }
4007 4186
4008 /* Count the subblocks of the list starting with BLOCK. If VECTOR is 4187 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4082 return; 4261 return;
4083 } 4262 }
4084 4263
4085 /* If VAR is present in a subblock of BLOCK, return the subblock. */ 4264 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4086 4265
4087 tree 4266 DEBUG_FUNCTION tree
4088 debug_find_var_in_block_tree (tree var, tree block) 4267 debug_find_var_in_block_tree (tree var, tree block)
4089 { 4268 {
4090 tree t; 4269 tree t;
4091 4270
4092 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t)) 4271 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4127 4306
4128 /* Change optimization options if needed. */ 4307 /* Change optimization options if needed. */
4129 if (optimization_current_node != opts) 4308 if (optimization_current_node != opts)
4130 { 4309 {
4131 optimization_current_node = opts; 4310 optimization_current_node = opts;
4132 cl_optimization_restore (TREE_OPTIMIZATION (opts)); 4311 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4133 } 4312 }
4134 4313
4135 targetm.set_current_function (fndecl); 4314 targetm.set_current_function (fndecl);
4136 } 4315 }
4137 } 4316 }
4194 allocate_struct_function (tree fndecl, bool abstract_p) 4373 allocate_struct_function (tree fndecl, bool abstract_p)
4195 { 4374 {
4196 tree result; 4375 tree result;
4197 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE; 4376 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4198 4377
4199 cfun = GGC_CNEW (struct function); 4378 cfun = ggc_alloc_cleared_function ();
4200 4379
4201 init_eh_for_function (); 4380 init_eh_for_function ();
4202 4381
4203 if (init_machine_status) 4382 if (init_machine_status)
4204 cfun->machine = (*init_machine_status) (); 4383 cfun->machine = (*init_machine_status) ();
4222 cfun->returns_pcc_struct = 1; 4401 cfun->returns_pcc_struct = 1;
4223 #endif 4402 #endif
4224 cfun->returns_struct = 1; 4403 cfun->returns_struct = 1;
4225 } 4404 }
4226 4405
4227 cfun->stdarg 4406 cfun->stdarg = stdarg_p (fntype);
4228 = (fntype
4229 && TYPE_ARG_TYPES (fntype) != 0
4230 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4231 != void_type_node));
4232 4407
4233 /* Assume all registers in stdarg functions need to be saved. */ 4408 /* Assume all registers in stdarg functions need to be saved. */
4234 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 4409 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4235 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; 4410 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4411
4412 /* ??? This could be set on a per-function basis by the front-end
4413 but is this worth the hassle? */
4414 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4236 } 4415 }
4237 } 4416 }
4238 4417
4239 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL 4418 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4240 instead of just setting it. */ 4419 instead of just setting it. */
4244 { 4423 {
4245 VEC_safe_push (function_p, heap, cfun_stack, cfun); 4424 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4246 allocate_struct_function (fndecl, false); 4425 allocate_struct_function (fndecl, false);
4247 } 4426 }
4248 4427
4249 /* Reset cfun, and other non-struct-function variables to defaults as 4428 /* Reset crtl and other non-struct-function variables to defaults as
4250 appropriate for emitting rtl at the start of a function. */ 4429 appropriate for emitting rtl at the start of a function. */
4251 4430
4252 static void 4431 static void
4253 prepare_function_start (void) 4432 prepare_function_start (void)
4254 { 4433 {
4256 init_temp_slots (); 4435 init_temp_slots ();
4257 init_emit (); 4436 init_emit ();
4258 init_varasm_status (); 4437 init_varasm_status ();
4259 init_expr (); 4438 init_expr ();
4260 default_rtl_profile (); 4439 default_rtl_profile ();
4440
4441 if (flag_stack_usage)
4442 {
4443 cfun->su = ggc_alloc_cleared_stack_usage ();
4444 cfun->su->static_stack_size = -1;
4445 }
4261 4446
4262 cse_not_expected = ! optimize; 4447 cse_not_expected = ! optimize;
4263 4448
4264 /* Caller save not needed yet. */ 4449 /* Caller save not needed yet. */
4265 caller_save_needed = 0; 4450 caller_save_needed = 0;
4695 do_warn_unused_parameter (tree fn) 4880 do_warn_unused_parameter (tree fn)
4696 { 4881 {
4697 tree decl; 4882 tree decl;
4698 4883
4699 for (decl = DECL_ARGUMENTS (fn); 4884 for (decl = DECL_ARGUMENTS (fn);
4700 decl; decl = TREE_CHAIN (decl)) 4885 decl; decl = DECL_CHAIN (decl))
4701 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL 4886 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4702 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl) 4887 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4703 && !TREE_NO_WARNING (decl)) 4888 && !TREE_NO_WARNING (decl))
4704 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl); 4889 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4705 } 4890 }
4734 anti_adjust_stack_and_probe (max_frame_size, true); 4919 anti_adjust_stack_and_probe (max_frame_size, true);
4735 else 4920 else
4736 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size); 4921 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
4737 seq = get_insns (); 4922 seq = get_insns ();
4738 end_sequence (); 4923 end_sequence ();
4924 set_insn_locators (seq, prologue_locator);
4739 emit_insn_before (seq, stack_check_probe_note); 4925 emit_insn_before (seq, stack_check_probe_note);
4740 break; 4926 break;
4741 } 4927 }
4742 } 4928 }
4743 4929
4764 clobber_after = get_last_insn (); 4950 clobber_after = get_last_insn ();
4765 4951
4766 /* Output the label for the actual return from the function. */ 4952 /* Output the label for the actual return from the function. */
4767 emit_label (return_label); 4953 emit_label (return_label);
4768 4954
4769 if (USING_SJLJ_EXCEPTIONS) 4955 if (targetm.except_unwind_info (&global_options) == UI_SJLJ)
4770 { 4956 {
4771 /* Let except.c know where it should emit the call to unregister 4957 /* Let except.c know where it should emit the call to unregister
4772 the function context for sjlj exceptions. */ 4958 the function context for sjlj exceptions. */
4773 if (flag_exceptions) 4959 if (flag_exceptions)
4774 sjlj_emit_function_exit_after (get_last_insn ()); 4960 sjlj_emit_function_exit_after (get_last_insn ());
4776 else 4962 else
4777 { 4963 {
4778 /* We want to ensure that instructions that may trap are not 4964 /* We want to ensure that instructions that may trap are not
4779 moved into the epilogue by scheduling, because we don't 4965 moved into the epilogue by scheduling, because we don't
4780 always emit unwind information for the epilogue. */ 4966 always emit unwind information for the epilogue. */
4781 if (flag_non_call_exceptions) 4967 if (cfun->can_throw_non_call_exceptions)
4782 emit_insn (gen_blockage ()); 4968 emit_insn (gen_blockage ());
4783 } 4969 }
4784 4970
4785 /* If this is an implementation of throw, do what's necessary to 4971 /* If this is an implementation of throw, do what's necessary to
4786 communicate between __builtin_eh_return and the epilogue. */ 4972 communicate between __builtin_eh_return and the epilogue. */
4922 emit_label (naked_return_label); 5108 emit_label (naked_return_label);
4923 5109
4924 /* @@@ This is a kludge. We want to ensure that instructions that 5110 /* @@@ This is a kludge. We want to ensure that instructions that
4925 may trap are not moved into the epilogue by scheduling, because 5111 may trap are not moved into the epilogue by scheduling, because
4926 we don't always emit unwind information for the epilogue. */ 5112 we don't always emit unwind information for the epilogue. */
4927 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions) 5113 if (cfun->can_throw_non_call_exceptions
5114 && targetm.except_unwind_info (&global_options) != UI_SJLJ)
4928 emit_insn (gen_blockage ()); 5115 emit_insn (gen_blockage ());
4929 5116
4930 /* If stack protection is enabled for this function, check the guard. */ 5117 /* If stack protection is enabled for this function, check the guard. */
4931 if (crtl->stack_protect_guard) 5118 if (crtl->stack_protect_guard)
4932 stack_protect_epilogue (); 5119 stack_protect_epilogue ();
4935 an accurate stack pointer to exit the function, 5122 an accurate stack pointer to exit the function,
4936 insert some code to save and restore the stack pointer. */ 5123 insert some code to save and restore the stack pointer. */
4937 if (! EXIT_IGNORE_STACK 5124 if (! EXIT_IGNORE_STACK
4938 && cfun->calls_alloca) 5125 && cfun->calls_alloca)
4939 { 5126 {
4940 rtx tem = 0; 5127 rtx tem = 0, seq;
4941 5128
4942 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn); 5129 start_sequence ();
4943 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX); 5130 emit_stack_save (SAVE_FUNCTION, &tem);
5131 seq = get_insns ();
5132 end_sequence ();
5133 emit_insn_before (seq, parm_birth_insn);
5134
5135 emit_stack_restore (SAVE_FUNCTION, tem);
4944 } 5136 }
4945 5137
4946 /* ??? This should no longer be necessary since stupid is no longer with 5138 /* ??? This should no longer be necessary since stupid is no longer with
4947 us, but there are some parts of the compiler (eg reload_combine, and 5139 us, but there are some parts of the compiler (eg reload_combine, and
4948 sh mach_dep_reorg) that still try and compute their own lifetime info 5140 sh mach_dep_reorg) that still try and compute their own lifetime info
4975 end_sequence (); 5167 end_sequence ();
4976 5168
4977 push_topmost_sequence (); 5169 push_topmost_sequence ();
4978 emit_insn_after (seq, entry_of_function ()); 5170 emit_insn_after (seq, entry_of_function ());
4979 pop_topmost_sequence (); 5171 pop_topmost_sequence ();
5172
5173 crtl->arg_pointer_save_area_init = true;
4980 } 5174 }
4981 5175
4982 return ret; 5176 return ret;
4983 } 5177 }
4984 5178
5001 gcc_assert (*slot == NULL); 5195 gcc_assert (*slot == NULL);
5002 *slot = tmp; 5196 *slot = tmp;
5003 } 5197 }
5004 } 5198 }
5005 5199
5006 /* INSN has been duplicated as COPY, as part of duping a basic block. 5200 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5007 If INSN is an epilogue insn, then record COPY as epilogue as well. */ 5201 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5202 insn, then record COPY as well. */
5008 5203
5009 void 5204 void
5010 maybe_copy_epilogue_insn (rtx insn, rtx copy) 5205 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5011 { 5206 {
5207 htab_t hash;
5012 void **slot; 5208 void **slot;
5013 5209
5014 if (epilogue_insn_hash == NULL 5210 hash = epilogue_insn_hash;
5015 || htab_find (epilogue_insn_hash, insn) == NULL) 5211 if (!hash || !htab_find (hash, insn))
5016 return; 5212 {
5017 5213 hash = prologue_insn_hash;
5018 slot = htab_find_slot (epilogue_insn_hash, copy, INSERT); 5214 if (!hash || !htab_find (hash, insn))
5215 return;
5216 }
5217
5218 slot = htab_find_slot (hash, copy, INSERT);
5019 gcc_assert (*slot == NULL); 5219 gcc_assert (*slot == NULL);
5020 *slot = copy; 5220 *slot = copy;
5021 } 5221 }
5022 5222
5023 /* Set the locator of the insn chain starting at INSN to LOC. */ 5223 /* Set the locator of the insn chain starting at INSN to LOC. */
5079 the epilogue begins. Update the basic block information when possible. */ 5279 the epilogue begins. Update the basic block information when possible. */
5080 5280
5081 static void 5281 static void
5082 thread_prologue_and_epilogue_insns (void) 5282 thread_prologue_and_epilogue_insns (void)
5083 { 5283 {
5084 int inserted = 0; 5284 bool inserted;
5285 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5286 edge entry_edge ATTRIBUTE_UNUSED;
5085 edge e; 5287 edge e;
5086 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue) 5288 edge_iterator ei;
5087 rtx seq; 5289
5290 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5291
5292 inserted = false;
5293 seq = NULL_RTX;
5294 epilogue_end = NULL_RTX;
5295
5296 /* Can't deal with multiple successors of the entry block at the
5297 moment. Function should always have at least one entry
5298 point. */
5299 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5300 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5301
5302 if (flag_split_stack
5303 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5304 == NULL))
5305 {
5306 #ifndef HAVE_split_stack_prologue
5307 gcc_unreachable ();
5308 #else
5309 gcc_assert (HAVE_split_stack_prologue);
5310
5311 start_sequence ();
5312 emit_insn (gen_split_stack_prologue ());
5313 seq = get_insns ();
5314 end_sequence ();
5315
5316 record_insns (seq, NULL, &prologue_insn_hash);
5317 set_insn_locators (seq, prologue_locator);
5318
5319 /* This relies on the fact that committing the edge insertion
5320 will look for basic blocks within the inserted instructions,
5321 which in turn relies on the fact that we are not in CFG
5322 layout mode here. */
5323 insert_insn_on_edge (seq, entry_edge);
5324 inserted = true;
5088 #endif 5325 #endif
5089 #if defined (HAVE_epilogue) || defined(HAVE_return) 5326 }
5090 rtx epilogue_end = NULL_RTX; 5327
5091 #endif
5092 edge_iterator ei;
5093
5094 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5095 #ifdef HAVE_prologue 5328 #ifdef HAVE_prologue
5096 if (HAVE_prologue) 5329 if (HAVE_prologue)
5097 { 5330 {
5098 start_sequence (); 5331 start_sequence ();
5099 seq = gen_prologue (); 5332 seq = gen_prologue ();
5106 5339
5107 /* Retain a map of the prologue insns. */ 5340 /* Retain a map of the prologue insns. */
5108 record_insns (seq, NULL, &prologue_insn_hash); 5341 record_insns (seq, NULL, &prologue_insn_hash);
5109 emit_note (NOTE_INSN_PROLOGUE_END); 5342 emit_note (NOTE_INSN_PROLOGUE_END);
5110 5343
5111 #ifndef PROFILE_BEFORE_PROLOGUE
5112 /* Ensure that instructions are not moved into the prologue when 5344 /* Ensure that instructions are not moved into the prologue when
5113 profiling is on. The call to the profiling routine can be 5345 profiling is on. The call to the profiling routine can be
5114 emitted within the live range of a call-clobbered register. */ 5346 emitted within the live range of a call-clobbered register. */
5115 if (crtl->profile) 5347 if (!targetm.profile_before_prologue () && crtl->profile)
5116 emit_insn (gen_blockage ()); 5348 emit_insn (gen_blockage ());
5117 #endif
5118 5349
5119 seq = get_insns (); 5350 seq = get_insns ();
5120 end_sequence (); 5351 end_sequence ();
5121 set_insn_locators (seq, prologue_locator); 5352 set_insn_locators (seq, prologue_locator);
5122 5353
5123 /* Can't deal with multiple successors of the entry block 5354 insert_insn_on_edge (seq, entry_edge);
5124 at the moment. Function should always have at least one 5355 inserted = true;
5125 entry point. */
5126 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5127
5128 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5129 inserted = 1;
5130 } 5356 }
5131 #endif 5357 #endif
5132 5358
5133 /* If the exit block has no non-fake predecessors, we don't need 5359 /* If the exit block has no non-fake predecessors, we don't need
5134 an epilogue. */ 5360 an epilogue. */
5149 emit (conditional) return instructions. */ 5375 emit (conditional) return instructions. */
5150 5376
5151 basic_block last; 5377 basic_block last;
5152 rtx label; 5378 rtx label;
5153 5379
5154 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 5380 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
5155 if (e->flags & EDGE_FALLTHRU)
5156 break;
5157 if (e == NULL) 5381 if (e == NULL)
5158 goto epilogue_done; 5382 goto epilogue_done;
5159 last = e->src; 5383 last = e->src;
5160 5384
5161 /* Verify that there are no active instructions in the last block. */ 5385 /* Verify that there are no active instructions in the last block. */
5272 /* Find the edge that falls through to EXIT. Other edges may exist 5496 /* Find the edge that falls through to EXIT. Other edges may exist
5273 due to RETURN instructions, but those don't need epilogues. 5497 due to RETURN instructions, but those don't need epilogues.
5274 There really shouldn't be a mixture -- either all should have 5498 There really shouldn't be a mixture -- either all should have
5275 been converted or none, however... */ 5499 been converted or none, however... */
5276 5500
5277 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 5501 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
5278 if (e->flags & EDGE_FALLTHRU)
5279 break;
5280 if (e == NULL) 5502 if (e == NULL)
5281 goto epilogue_done; 5503 goto epilogue_done;
5282 5504
5283 #ifdef HAVE_epilogue 5505 #ifdef HAVE_epilogue
5284 if (HAVE_epilogue) 5506 if (HAVE_epilogue)
5285 { 5507 {
5286 start_sequence (); 5508 start_sequence ();
5287 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG); 5509 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5288 seq = gen_epilogue (); 5510 seq = gen_epilogue ();
5289 emit_jump_insn (seq); 5511 if (seq)
5512 emit_jump_insn (seq);
5290 5513
5291 /* Retain a map of the epilogue insns. */ 5514 /* Retain a map of the epilogue insns. */
5292 record_insns (seq, NULL, &epilogue_insn_hash); 5515 record_insns (seq, NULL, &epilogue_insn_hash);
5293 set_insn_locators (seq, epilogue_locator); 5516 set_insn_locators (seq, epilogue_locator);
5294 5517
5295 seq = get_insns (); 5518 seq = get_insns ();
5296 end_sequence (); 5519 end_sequence ();
5297 5520
5298 insert_insn_on_edge (seq, e); 5521 insert_insn_on_edge (seq, e);
5299 inserted = 1; 5522 inserted = true;
5300 } 5523 }
5301 else 5524 else
5302 #endif 5525 #endif
5303 { 5526 {
5304 basic_block cur_bb; 5527 basic_block cur_bb;
5545 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE) 5768 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5546 if (TYPE_NAME (t)) 5769 if (TYPE_NAME (t))
5547 break; 5770 break;
5548 else 5771 else
5549 t = TREE_TYPE (t); 5772 t = TREE_TYPE (t);
5773 if (TREE_CODE (t) == ERROR_MARK)
5774 return;
5550 if (TYPE_NAME (t) == NULL_TREE 5775 if (TYPE_NAME (t) == NULL_TREE
5551 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t))) 5776 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
5552 t = TYPE_MAIN_VARIANT (t); 5777 t = TYPE_MAIN_VARIANT (t);
5553 if (debug_info_level > DINFO_LEVEL_NONE) 5778 if (debug_info_level > DINFO_LEVEL_NONE)
5554 { 5779 {
5556 used_types_insert_helper (t, cfun); 5781 used_types_insert_helper (t, cfun);
5557 else 5782 else
5558 /* So this might be a type referenced by a global variable. 5783 /* So this might be a type referenced by a global variable.
5559 Record that type so that we can later decide to emit its debug 5784 Record that type so that we can later decide to emit its debug
5560 information. */ 5785 information. */
5561 types_used_by_cur_var_decl = 5786 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
5562 tree_cons (t, NULL, types_used_by_cur_var_decl);
5563
5564 } 5787 }
5565 } 5788 }
5566 5789
5567 /* Helper to Hash a struct types_used_by_vars_entry. */ 5790 /* Helper to Hash a struct types_used_by_vars_entry. */
5568 5791
5617 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e, 5840 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
5618 hash_types_used_by_vars_entry (&e), INSERT); 5841 hash_types_used_by_vars_entry (&e), INSERT);
5619 if (*slot == NULL) 5842 if (*slot == NULL)
5620 { 5843 {
5621 struct types_used_by_vars_entry *entry; 5844 struct types_used_by_vars_entry *entry;
5622 entry = (struct types_used_by_vars_entry*) ggc_alloc 5845 entry = ggc_alloc_types_used_by_vars_entry ();
5623 (sizeof (struct types_used_by_vars_entry));
5624 entry->type = type; 5846 entry->type = type;
5625 entry->var_decl = var_decl; 5847 entry->var_decl = var_decl;
5626 *slot = entry; 5848 *slot = entry;
5627 } 5849 }
5628 } 5850 }
5650 static unsigned int 5872 static unsigned int
5651 rest_of_handle_thread_prologue_and_epilogue (void) 5873 rest_of_handle_thread_prologue_and_epilogue (void)
5652 { 5874 {
5653 if (optimize) 5875 if (optimize)
5654 cleanup_cfg (CLEANUP_EXPENSIVE); 5876 cleanup_cfg (CLEANUP_EXPENSIVE);
5877
5655 /* On some machines, the prologue and epilogue code, or parts thereof, 5878 /* On some machines, the prologue and epilogue code, or parts thereof,
5656 can be represented as RTL. Doing so lets us schedule insns between 5879 can be represented as RTL. Doing so lets us schedule insns between
5657 it and the rest of the code and also allows delayed branch 5880 it and the rest of the code and also allows delayed branch
5658 scheduling to operate in the epilogue. */ 5881 scheduling to operate in the epilogue. */
5659
5660 thread_prologue_and_epilogue_insns (); 5882 thread_prologue_and_epilogue_insns ();
5883
5884 /* The stack usage info is finalized during prologue expansion. */
5885 if (flag_stack_usage)
5886 output_stack_usage ();
5887
5661 return 0; 5888 return 0;
5662 } 5889 }
5663 5890
5664 struct rtl_opt_pass pass_thread_prologue_and_epilogue = 5891 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5665 { 5892 {