comparison gcc/function.c @ 57:326d9e06c2e3

modify c-parser.c
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Mon, 15 Feb 2010 00:54:17 +0900
parents 9907f3135723 77e2b8dfacca
children 1b10fe6932e1
comparison
equal deleted inserted replaced
54:f62c169bbc24 57:326d9e06c2e3
122 struct machine_function * (*init_machine_status) (void); 122 struct machine_function * (*init_machine_status) (void);
123 123
124 /* The currently compiled function. */ 124 /* The currently compiled function. */
125 struct function *cfun = 0; 125 struct function *cfun = 0;
126 126
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */ 127 /* These hashes record the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue; 128 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 static VEC(int,heap) *epilogue; 129 htab_t prologue_insn_hash;
130 130 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue 131 htab_t epilogue_insn_hash;
132 in this function. */
133 static VEC(int,heap) *sibcall_epilogue;
134 132
135 133
136 htab_t types_used_by_vars_hash = NULL; 134 htab_t types_used_by_vars_hash = NULL;
137 tree types_used_by_cur_var_decl = NULL; 135 tree types_used_by_cur_var_decl = NULL;
138 136
145 static int all_blocks (tree, tree *); 143 static int all_blocks (tree, tree *);
146 static tree *get_block_vector (tree, int *); 144 static tree *get_block_vector (tree, int *);
147 extern tree debug_find_var_in_block_tree (tree, tree); 145 extern tree debug_find_var_in_block_tree (tree, tree);
148 /* We always define `record_insns' even if it's not used so that we 146 /* We always define `record_insns' even if it's not used so that we
149 can always export `prologue_epilogue_contains'. */ 147 can always export `prologue_epilogue_contains'. */
150 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED; 148 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
151 static int contains (const_rtx, VEC(int,heap) **); 149 static bool contains (const_rtx, htab_t);
152 #ifdef HAVE_return 150 #ifdef HAVE_return
153 static void emit_return_into_block (basic_block); 151 static void emit_return_into_block (basic_block);
154 #endif 152 #endif
155 static void prepare_function_start (void); 153 static void prepare_function_start (void);
156 static void do_clobber_return_reg (rtx, void *); 154 static void do_clobber_return_reg (rtx, void *);
209 reclaim the memory. */ 207 reclaim the memory. */
210 208
211 void 209 void
212 free_after_compilation (struct function *f) 210 free_after_compilation (struct function *f)
213 { 211 {
214 VEC_free (int, heap, prologue); 212 prologue_insn_hash = NULL;
215 VEC_free (int, heap, epilogue); 213 epilogue_insn_hash = NULL;
216 VEC_free (int, heap, sibcall_epilogue); 214
217 if (crtl->emit.regno_pointer_align) 215 if (crtl->emit.regno_pointer_align)
218 free (crtl->emit.regno_pointer_align); 216 free (crtl->emit.regno_pointer_align);
219 217
220 memset (crtl, 0, sizeof (struct rtl_data)); 218 memset (crtl, 0, sizeof (struct rtl_data));
221 f->eh = NULL; 219 f->eh = NULL;
243 the signed target pointer arithmetics for function FUNC. Otherwise 241 the signed target pointer arithmetics for function FUNC. Otherwise
244 return FALSE. */ 242 return FALSE. */
245 243
246 bool 244 bool
247 frame_offset_overflow (HOST_WIDE_INT offset, tree func) 245 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
248 { 246 {
249 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; 247 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
250 248
251 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1)) 249 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
252 /* Leave room for the fixed part of the frame. */ 250 /* Leave room for the fixed part of the frame. */
253 - 64 * UNITS_PER_WORD) 251 - 64 * UNITS_PER_WORD)
254 { 252 {
255 error ("%Jtotal size of local objects too large", func); 253 error_at (DECL_SOURCE_LOCATION (func),
254 "total size of local objects too large");
256 return TRUE; 255 return TRUE;
257 } 256 }
258 257
259 return FALSE; 258 return FALSE;
260 } 259 }
359 } 358 }
360 } 359 }
361 360
362 if (crtl->stack_alignment_needed < alignment_in_bits) 361 if (crtl->stack_alignment_needed < alignment_in_bits)
363 crtl->stack_alignment_needed = alignment_in_bits; 362 crtl->stack_alignment_needed = alignment_in_bits;
364 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed) 363 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
365 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed; 364 crtl->max_used_stack_slot_alignment = alignment_in_bits;
366 365
367 /* Calculate how many bytes the start of local variables is off from 366 /* Calculate how many bytes the start of local variables is off from
368 stack alignment. */ 367 stack alignment. */
369 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; 368 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
370 frame_off = STARTING_FRAME_OFFSET % frame_alignment; 369 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
452 451
453 Automatic variables are also assigned temporary slots, at the nesting 452 Automatic variables are also assigned temporary slots, at the nesting
454 level where they are defined. They are marked a "kept" so that 453 level where they are defined. They are marked a "kept" so that
455 free_temp_slots will not free them. */ 454 free_temp_slots will not free them. */
456 455
457 struct temp_slot GTY(()) 456 struct GTY(()) temp_slot {
458 {
459 /* Points to next temporary slot. */ 457 /* Points to next temporary slot. */
460 struct temp_slot *next; 458 struct temp_slot *next;
461 /* Points to previous temporary slot. */ 459 /* Points to previous temporary slot. */
462 struct temp_slot *prev; 460 struct temp_slot *prev;
463 /* The rtx to used to reference the slot. */ 461 /* The rtx to used to reference the slot. */
490 /* A table of addresses that represent a stack slot. The table is a mapping 488 /* A table of addresses that represent a stack slot. The table is a mapping
491 from address RTXen to a temp slot. */ 489 from address RTXen to a temp slot. */
492 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table; 490 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
493 491
494 /* Entry for the above hash table. */ 492 /* Entry for the above hash table. */
495 struct temp_slot_address_entry GTY(()) 493 struct GTY(()) temp_slot_address_entry {
496 {
497 hashval_t hash; 494 hashval_t hash;
498 rtx address; 495 rtx address;
499 struct temp_slot *temp_slot; 496 struct temp_slot *temp_slot;
500 }; 497 };
501 498
660 return p; 657 return p;
661 658
662 /* Last resort: Address is a virtual stack var address. */ 659 /* Last resort: Address is a virtual stack var address. */
663 if (GET_CODE (x) == PLUS 660 if (GET_CODE (x) == PLUS
664 && XEXP (x, 0) == virtual_stack_vars_rtx 661 && XEXP (x, 0) == virtual_stack_vars_rtx
665 && GET_CODE (XEXP (x, 1)) == CONST_INT) 662 && CONST_INT_P (XEXP (x, 1)))
666 { 663 {
667 int i; 664 int i;
668 for (i = max_slot_level (); i >= 0; i--) 665 for (i = max_slot_level (); i >= 0; i--)
669 for (p = *temp_slots_at_level (i); p; p = p->next) 666 for (p = *temp_slots_at_level (i); p; p = p->next)
670 { 667 {
711 align = get_stack_local_alignment (type, mode); 708 align = get_stack_local_alignment (type, mode);
712 709
713 /* Try to find an available, already-allocated temporary of the proper 710 /* Try to find an available, already-allocated temporary of the proper
714 mode which meets the size and alignment requirements. Choose the 711 mode which meets the size and alignment requirements. Choose the
715 smallest one with the closest alignment. 712 smallest one with the closest alignment.
716 713
717 If assign_stack_temp is called outside of the tree->rtl expansion, 714 If assign_stack_temp is called outside of the tree->rtl expansion,
718 we cannot reuse the stack slots (that may still refer to 715 we cannot reuse the stack slots (that may still refer to
719 VIRTUAL_STACK_VARS_REGNUM). */ 716 VIRTUAL_STACK_VARS_REGNUM). */
720 if (!virtuals_instantiated) 717 if (!virtuals_instantiated)
721 { 718 {
927 return tmp; 924 return tmp;
928 } 925 }
929 926
930 #ifdef PROMOTE_MODE 927 #ifdef PROMOTE_MODE
931 if (! dont_promote) 928 if (! dont_promote)
932 mode = promote_mode (type, mode, &unsignedp, 0); 929 mode = promote_mode (type, mode, &unsignedp);
933 #endif 930 #endif
934 931
935 return gen_reg_rtx (mode); 932 return gen_reg_rtx (mode);
936 } 933 }
937 934
1165 1162
1166 void 1163 void
1167 free_temp_slots (void) 1164 free_temp_slots (void)
1168 { 1165 {
1169 struct temp_slot *p, *next; 1166 struct temp_slot *p, *next;
1167 bool some_available = false;
1170 1168
1171 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1169 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1172 { 1170 {
1173 next = p->next; 1171 next = p->next;
1174 1172
1175 if (!p->keep) 1173 if (!p->keep)
1176 make_slot_available (p); 1174 {
1177 } 1175 make_slot_available (p);
1178 1176 some_available = true;
1179 remove_unused_temp_slot_addresses (); 1177 }
1180 combine_temp_slots (); 1178 }
1179
1180 if (some_available)
1181 {
1182 remove_unused_temp_slot_addresses ();
1183 combine_temp_slots ();
1184 }
1181 } 1185 }
1182 1186
1183 /* Push deeper into the nesting level for stack temporaries. */ 1187 /* Push deeper into the nesting level for stack temporaries. */
1184 1188
1185 void 1189 void
1193 1197
1194 void 1198 void
1195 pop_temp_slots (void) 1199 pop_temp_slots (void)
1196 { 1200 {
1197 struct temp_slot *p, *next; 1201 struct temp_slot *p, *next;
1202 bool some_available = false;
1198 1203
1199 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1204 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1200 { 1205 {
1201 next = p->next; 1206 next = p->next;
1202 make_slot_available (p); 1207 make_slot_available (p);
1203 } 1208 some_available = true;
1204 1209 }
1205 remove_unused_temp_slot_addresses (); 1210
1206 combine_temp_slots (); 1211 if (some_available)
1212 {
1213 remove_unused_temp_slot_addresses ();
1214 combine_temp_slots ();
1215 }
1207 1216
1208 temp_slot_level--; 1217 temp_slot_level--;
1209 } 1218 }
1210 1219
1211 /* Initialize temporary slots. */ 1220 /* Initialize temporary slots. */
1463 operands remain valid if they're modified in place. */ 1472 operands remain valid if they're modified in place. */
1464 if (GET_CODE (SET_SRC (set)) == PLUS 1473 if (GET_CODE (SET_SRC (set)) == PLUS
1465 && recog_data.n_operands >= 3 1474 && recog_data.n_operands >= 3
1466 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0) 1475 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1467 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1) 1476 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1468 && GET_CODE (recog_data.operand[2]) == CONST_INT 1477 && CONST_INT_P (recog_data.operand[2])
1469 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset))) 1478 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1470 { 1479 {
1471 offset += INTVAL (recog_data.operand[2]); 1480 offset += INTVAL (recog_data.operand[2]);
1472 1481
1473 /* If the sum is zero, then replace with a plain move. */ 1482 /* If the sum is zero, then replace with a plain move. */
1599 Validate the new value vs the insn predicate. Note that 1608 Validate the new value vs the insn predicate. Note that
1600 asm insns will have insn_code -1 here. */ 1609 asm insns will have insn_code -1 here. */
1601 if (!safe_insn_predicate (insn_code, i, x)) 1610 if (!safe_insn_predicate (insn_code, i, x))
1602 { 1611 {
1603 start_sequence (); 1612 start_sequence ();
1604 x = force_reg (insn_data[insn_code].operand[i].mode, x); 1613 if (REG_P (x))
1614 {
1615 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1616 x = copy_to_reg (x);
1617 }
1618 else
1619 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1605 seq = get_insns (); 1620 seq = get_insns ();
1606 end_sequence (); 1621 end_sequence ();
1607 if (seq) 1622 if (seq)
1608 emit_insn_before (seq, insn); 1623 emit_insn_before (seq, insn);
1609 } 1624 }
1780 || GET_CODE (PATTERN (insn)) == CLOBBER 1795 || GET_CODE (PATTERN (insn)) == CLOBBER
1781 || GET_CODE (PATTERN (insn)) == ADDR_VEC 1796 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1782 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC 1797 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1783 || GET_CODE (PATTERN (insn)) == ASM_INPUT) 1798 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1784 continue; 1799 continue;
1785 1800 else if (DEBUG_INSN_P (insn))
1786 instantiate_virtual_regs_in_insn (insn); 1801 for_each_rtx (&INSN_VAR_LOCATION (insn),
1802 instantiate_virtual_regs_in_rtx, NULL);
1803 else
1804 instantiate_virtual_regs_in_insn (insn);
1787 1805
1788 if (INSN_DELETED_P (insn)) 1806 if (INSN_DELETED_P (insn))
1789 continue; 1807 continue;
1790 1808
1791 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL); 1809 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1792 1810
1793 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */ 1811 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1794 if (GET_CODE (insn) == CALL_INSN) 1812 if (CALL_P (insn))
1795 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn), 1813 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1796 instantiate_virtual_regs_in_rtx, NULL); 1814 instantiate_virtual_regs_in_rtx, NULL);
1797 } 1815 }
1798 1816
1799 /* Instantiate the virtual registers in the DECLs for debugging purposes. */ 1817 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1815 NULL, /* gate */ 1833 NULL, /* gate */
1816 instantiate_virtual_regs, /* execute */ 1834 instantiate_virtual_regs, /* execute */
1817 NULL, /* sub */ 1835 NULL, /* sub */
1818 NULL, /* next */ 1836 NULL, /* next */
1819 0, /* static_pass_number */ 1837 0, /* static_pass_number */
1820 0, /* tv_id */ 1838 TV_NONE, /* tv_id */
1821 0, /* properties_required */ 1839 0, /* properties_required */
1822 0, /* properties_provided */ 1840 0, /* properties_provided */
1823 0, /* properties_destroyed */ 1841 0, /* properties_destroyed */
1824 0, /* todo_flags_start */ 1842 0, /* todo_flags_start */
1825 TODO_dump_func /* todo_flags_finish */ 1843 TODO_dump_func /* todo_flags_finish */
1842 1860
1843 /* DECL node associated with FNTYPE when relevant, which we might need to 1861 /* DECL node associated with FNTYPE when relevant, which we might need to
1844 check for by-invisible-reference returns, typically for CALL_EXPR input 1862 check for by-invisible-reference returns, typically for CALL_EXPR input
1845 EXPressions. */ 1863 EXPressions. */
1846 const_tree fndecl = NULL_TREE; 1864 const_tree fndecl = NULL_TREE;
1847 1865
1848 if (fntype) 1866 if (fntype)
1849 switch (TREE_CODE (fntype)) 1867 switch (TREE_CODE (fntype))
1850 { 1868 {
1851 case CALL_EXPR: 1869 case CALL_EXPR:
1852 fndecl = get_callee_fndecl (fntype); 1870 fndecl = get_callee_fndecl (fntype);
1885 mechanism but might now be causing troubles at gimplification time if 1903 mechanism but might now be causing troubles at gimplification time if
1886 temporaries with the function type need to be created. */ 1904 temporaries with the function type need to be created. */
1887 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl) 1905 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1888 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))) 1906 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1889 return 1; 1907 return 1;
1890 1908
1891 if (targetm.calls.return_in_memory (type, fntype)) 1909 if (targetm.calls.return_in_memory (type, fntype))
1892 return 1; 1910 return 1;
1893 /* Types that are TREE_ADDRESSABLE must be constructed in memory, 1911 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1894 and thus can't be returned in registers. */ 1912 and thus can't be returned in registers. */
1895 if (TREE_ADDRESSABLE (type)) 1913 if (TREE_ADDRESSABLE (type))
1919 bool 1937 bool
1920 use_register_for_decl (const_tree decl) 1938 use_register_for_decl (const_tree decl)
1921 { 1939 {
1922 if (!targetm.calls.allocate_stack_slots_for_args()) 1940 if (!targetm.calls.allocate_stack_slots_for_args())
1923 return true; 1941 return true;
1924 1942
1925 /* Honor volatile. */ 1943 /* Honor volatile. */
1926 if (TREE_SIDE_EFFECTS (decl)) 1944 if (TREE_SIDE_EFFECTS (decl))
1927 return false; 1945 return false;
1928 1946
1929 /* Honor addressability. */ 1947 /* Honor addressability. */
2106 DECL_IGNORED_P (p) = addressable; 2124 DECL_IGNORED_P (p) = addressable;
2107 TREE_ADDRESSABLE (p) = 0; 2125 TREE_ADDRESSABLE (p) = 0;
2108 layout_decl (p, 0); 2126 layout_decl (p, 0);
2109 2127
2110 /* Build a second synthetic decl. */ 2128 /* Build a second synthetic decl. */
2111 decl = build_decl (PARM_DECL, NULL_TREE, subtype); 2129 decl = build_decl (EXPR_LOCATION (p),
2130 PARM_DECL, NULL_TREE, subtype);
2112 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p); 2131 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2113 DECL_ARTIFICIAL (decl) = addressable; 2132 DECL_ARTIFICIAL (decl) = addressable;
2114 DECL_IGNORED_P (decl) = addressable; 2133 DECL_IGNORED_P (decl) = addressable;
2115 layout_decl (decl, 0); 2134 layout_decl (decl, 0);
2116 2135
2141 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0) 2160 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2142 { 2161 {
2143 tree type = build_pointer_type (TREE_TYPE (fntype)); 2162 tree type = build_pointer_type (TREE_TYPE (fntype));
2144 tree decl; 2163 tree decl;
2145 2164
2146 decl = build_decl (PARM_DECL, NULL_TREE, type); 2165 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2166 PARM_DECL, NULL_TREE, type);
2147 DECL_ARG_TYPE (decl) = type; 2167 DECL_ARG_TYPE (decl) = type;
2148 DECL_ARTIFICIAL (decl) = 1; 2168 DECL_ARTIFICIAL (decl) = 1;
2149 DECL_IGNORED_P (decl) = 1; 2169 DECL_IGNORED_P (decl) = 1;
2150 2170
2151 TREE_CHAIN (decl) = fnargs; 2171 TREE_CHAIN (decl) = fnargs;
2170 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, 2190 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2171 struct assign_parm_data_one *data) 2191 struct assign_parm_data_one *data)
2172 { 2192 {
2173 tree nominal_type, passed_type; 2193 tree nominal_type, passed_type;
2174 enum machine_mode nominal_mode, passed_mode, promoted_mode; 2194 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2195 int unsignedp;
2175 2196
2176 memset (data, 0, sizeof (*data)); 2197 memset (data, 0, sizeof (*data));
2177 2198
2178 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */ 2199 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2179 if (!cfun->stdarg) 2200 if (!cfun->stdarg)
2222 data->passed_pointer = true; 2243 data->passed_pointer = true;
2223 passed_mode = nominal_mode = Pmode; 2244 passed_mode = nominal_mode = Pmode;
2224 } 2245 }
2225 2246
2226 /* Find mode as it is passed by the ABI. */ 2247 /* Find mode as it is passed by the ABI. */
2227 promoted_mode = passed_mode; 2248 unsignedp = TYPE_UNSIGNED (passed_type);
2228 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl))) 2249 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2229 { 2250 TREE_TYPE (current_function_decl), 0);
2230 int unsignedp = TYPE_UNSIGNED (passed_type);
2231 promoted_mode = promote_mode (passed_type, promoted_mode,
2232 &unsignedp, 1);
2233 }
2234 2251
2235 egress: 2252 egress:
2236 data->nominal_type = nominal_type; 2253 data->nominal_type = nominal_type;
2237 data->passed_type = passed_type; 2254 data->passed_type = passed_type;
2238 data->nominal_mode = nominal_mode; 2255 data->nominal_mode = nominal_mode;
2436 stack_parm = crtl->args.internal_arg_pointer; 2453 stack_parm = crtl->args.internal_arg_pointer;
2437 if (offset_rtx != const0_rtx) 2454 if (offset_rtx != const0_rtx)
2438 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx); 2455 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2439 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm); 2456 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2440 2457
2441 set_mem_attributes (stack_parm, parm, 1); 2458 if (!data->passed_pointer)
2442 /* set_mem_attributes could set MEM_SIZE to the passed mode's size, 2459 {
2443 while promoted mode's size is needed. */ 2460 set_mem_attributes (stack_parm, parm, 1);
2444 if (data->promoted_mode != BLKmode 2461 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2445 && data->promoted_mode != DECL_MODE (parm)) 2462 while promoted mode's size is needed. */
2446 { 2463 if (data->promoted_mode != BLKmode
2447 set_mem_size (stack_parm, GEN_INT (GET_MODE_SIZE (data->promoted_mode))); 2464 && data->promoted_mode != DECL_MODE (parm))
2448 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm)) 2465 {
2449 { 2466 set_mem_size (stack_parm,
2450 int offset = subreg_lowpart_offset (DECL_MODE (parm), 2467 GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
2451 data->promoted_mode); 2468 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
2452 if (offset) 2469 {
2453 set_mem_offset (stack_parm, 2470 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2454 plus_constant (MEM_OFFSET (stack_parm), -offset)); 2471 data->promoted_mode);
2472 if (offset)
2473 set_mem_offset (stack_parm,
2474 plus_constant (MEM_OFFSET (stack_parm),
2475 -offset));
2476 }
2455 } 2477 }
2456 } 2478 }
2457 2479
2458 boundary = data->locate.boundary; 2480 boundary = data->locate.boundary;
2459 align = BITS_PER_UNIT; 2481 align = BITS_PER_UNIT;
2462 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're 2484 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2463 intentionally forcing upward padding. Otherwise we have to come 2485 intentionally forcing upward padding. Otherwise we have to come
2464 up with a guess at the alignment based on OFFSET_RTX. */ 2486 up with a guess at the alignment based on OFFSET_RTX. */
2465 if (data->locate.where_pad != downward || data->entry_parm) 2487 if (data->locate.where_pad != downward || data->entry_parm)
2466 align = boundary; 2488 align = boundary;
2467 else if (GET_CODE (offset_rtx) == CONST_INT) 2489 else if (CONST_INT_P (offset_rtx))
2468 { 2490 {
2469 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary; 2491 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2470 align = align & -align; 2492 align = align & -align;
2471 } 2493 }
2472 set_mem_align (stack_parm, align); 2494 set_mem_align (stack_parm, align);
2495 { 2517 {
2496 /* Handle calls that pass values in multiple non-contiguous 2518 /* Handle calls that pass values in multiple non-contiguous
2497 locations. The Irix 6 ABI has examples of this. */ 2519 locations. The Irix 6 ABI has examples of this. */
2498 if (GET_CODE (entry_parm) == PARALLEL) 2520 if (GET_CODE (entry_parm) == PARALLEL)
2499 emit_group_store (validize_mem (stack_parm), entry_parm, 2521 emit_group_store (validize_mem (stack_parm), entry_parm,
2500 data->passed_type, 2522 data->passed_type,
2501 int_size_in_bytes (data->passed_type)); 2523 int_size_in_bytes (data->passed_type));
2502 else 2524 else
2503 { 2525 {
2504 gcc_assert (data->partial % UNITS_PER_WORD == 0); 2526 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2505 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm), 2527 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2620 #endif 2642 #endif
2621 2643
2622 return false; 2644 return false;
2623 } 2645 }
2624 2646
2625 /* A subroutine of assign_parms. Arrange for the parameter to be 2647 /* A subroutine of assign_parms. Arrange for the parameter to be
2626 present and valid in DATA->STACK_RTL. */ 2648 present and valid in DATA->STACK_RTL. */
2627 2649
2628 static void 2650 static void
2629 assign_parm_setup_block (struct assign_parm_data_all *all, 2651 assign_parm_setup_block (struct assign_parm_data_all *all,
2630 tree parm, struct assign_parm_data_one *data) 2652 tree parm, struct assign_parm_data_one *data)
2776 enum machine_mode promoted_nominal_mode; 2798 enum machine_mode promoted_nominal_mode;
2777 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm)); 2799 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2778 bool did_conversion = false; 2800 bool did_conversion = false;
2779 2801
2780 /* Store the parm in a pseudoregister during the function, but we may 2802 /* Store the parm in a pseudoregister during the function, but we may
2781 need to do it in a wider mode. */ 2803 need to do it in a wider mode. Using 2 here makes the result
2782 2804 consistent with promote_decl_mode and thus expand_expr_real_1. */
2783 /* This is not really promoting for a call. However we need to be
2784 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2785 promoted_nominal_mode 2805 promoted_nominal_mode
2786 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1); 2806 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2807 TREE_TYPE (current_function_decl), 2);
2787 2808
2788 parmreg = gen_reg_rtx (promoted_nominal_mode); 2809 parmreg = gen_reg_rtx (promoted_nominal_mode);
2789 2810
2790 if (!DECL_ARTIFICIAL (parm)) 2811 if (!DECL_ARTIFICIAL (parm))
2791 mark_user_reg (parmreg); 2812 mark_user_reg (parmreg);
2801 else 2822 else
2802 SET_DECL_RTL (parm, parmreg); 2823 SET_DECL_RTL (parm, parmreg);
2803 2824
2804 assign_parm_remove_parallels (data); 2825 assign_parm_remove_parallels (data);
2805 2826
2806 /* Copy the value into the register. */ 2827 /* Copy the value into the register, thus bridging between
2828 assign_parm_find_data_types and expand_expr_real_1. */
2807 if (data->nominal_mode != data->passed_mode 2829 if (data->nominal_mode != data->passed_mode
2808 || promoted_nominal_mode != data->promoted_mode) 2830 || promoted_nominal_mode != data->promoted_mode)
2809 { 2831 {
2810 int save_tree_used; 2832 int save_tree_used;
2811 2833
3161 { 3183 {
3162 gcc_assert (!crtl->stack_realign_processed); 3184 gcc_assert (!crtl->stack_realign_processed);
3163 crtl->stack_alignment_estimated = align; 3185 crtl->stack_alignment_estimated = align;
3164 } 3186 }
3165 } 3187 }
3166 3188
3167 if (cfun->stdarg && !TREE_CHAIN (parm)) 3189 if (cfun->stdarg && !TREE_CHAIN (parm))
3168 assign_parms_setup_varargs (&all, &data, false); 3190 assign_parms_setup_varargs (&all, &data, false);
3169 3191
3170 /* Find out where the parameter arrives in this function. */ 3192 /* Find out where the parameter arrives in this function. */
3171 assign_parm_find_entry_rtl (&all, &data); 3193 assign_parm_find_entry_rtl (&all, &data);
3218 { 3240 {
3219 gcc_assert (!crtl->stack_realign_processed); 3241 gcc_assert (!crtl->stack_realign_processed);
3220 crtl->stack_alignment_estimated = align; 3242 crtl->stack_alignment_estimated = align;
3221 } 3243 }
3222 } 3244 }
3223 } 3245 }
3224 } 3246 }
3225 3247
3226 /* If we are receiving a struct value address as the first argument, set up 3248 /* If we are receiving a struct value address as the first argument, set up
3227 the RTL for the function result. As this might require code to convert 3249 the RTL for the function result. As this might require code to convert
3228 the transmitted address to Pmode, we do this here to ensure that possible 3250 the transmitted address to Pmode, we do this here to ensure that possible
3268 #ifdef ARGS_GROW_DOWNWARD 3290 #ifdef ARGS_GROW_DOWNWARD
3269 crtl->args.arg_offset_rtx 3291 crtl->args.arg_offset_rtx
3270 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant) 3292 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3271 : expand_expr (size_diffop (all.stack_args_size.var, 3293 : expand_expr (size_diffop (all.stack_args_size.var,
3272 size_int (-all.stack_args_size.constant)), 3294 size_int (-all.stack_args_size.constant)),
3273 NULL_RTX, VOIDmode, 0)); 3295 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3274 #else 3296 #else
3275 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size); 3297 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3276 #endif 3298 #endif
3277 3299
3278 /* See how many bytes, if any, of its args a function should try to pop 3300 /* See how many bytes, if any, of its args a function should try to pop
3543 3565
3544 /* Remember if the outgoing parameter requires extra alignment on the 3566 /* Remember if the outgoing parameter requires extra alignment on the
3545 calling function side. */ 3567 calling function side. */
3546 if (crtl->stack_alignment_needed < boundary) 3568 if (crtl->stack_alignment_needed < boundary)
3547 crtl->stack_alignment_needed = boundary; 3569 crtl->stack_alignment_needed = boundary;
3548 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
3549 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
3550 if (crtl->preferred_stack_boundary < boundary) 3570 if (crtl->preferred_stack_boundary < boundary)
3551 crtl->preferred_stack_boundary = boundary; 3571 crtl->preferred_stack_boundary = boundary;
3552 3572
3553 #ifdef ARGS_GROW_DOWNWARD 3573 #ifdef ARGS_GROW_DOWNWARD
3554 locate->slot_offset.constant = -initial_offset_ptr->constant; 3574 locate->slot_offset.constant = -initial_offset_ptr->constant;
3748 { 3768 {
3749 if (TREE_CODE (decl) == VAR_DECL 3769 if (TREE_CODE (decl) == VAR_DECL
3750 && DECL_RTL_SET_P (decl) 3770 && DECL_RTL_SET_P (decl)
3751 && REG_P (DECL_RTL (decl)) 3771 && REG_P (DECL_RTL (decl))
3752 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) 3772 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3753 warning (OPT_Wclobbered, "variable %q+D might be clobbered by" 3773 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3754 " %<longjmp%> or %<vfork%>", decl); 3774 " %<longjmp%> or %<vfork%>", decl);
3755 } 3775 }
3756 3776
3757 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub)) 3777 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3758 setjmp_vars_warning (setjmp_crosses, sub); 3778 setjmp_vars_warning (setjmp_crosses, sub);
3768 for (decl = DECL_ARGUMENTS (current_function_decl); 3788 for (decl = DECL_ARGUMENTS (current_function_decl);
3769 decl; decl = TREE_CHAIN (decl)) 3789 decl; decl = TREE_CHAIN (decl))
3770 if (DECL_RTL (decl) != 0 3790 if (DECL_RTL (decl) != 0
3771 && REG_P (DECL_RTL (decl)) 3791 && REG_P (DECL_RTL (decl))
3772 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) 3792 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3773 warning (OPT_Wclobbered, 3793 warning (OPT_Wclobbered,
3774 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>", 3794 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3775 decl); 3795 decl);
3776 } 3796 }
3777 3797
3778 /* Generate warning messages for variables live across setjmp. */ 3798 /* Generate warning messages for variables live across setjmp. */
3779 3799
3780 void 3800 void
3781 generate_setjmp_warnings (void) 3801 generate_setjmp_warnings (void)
3782 { 3802 {
3783 bitmap setjmp_crosses = regstat_get_setjmp_crosses (); 3803 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3784 3804
3785 if (n_basic_blocks == NUM_FIXED_BLOCKS 3805 if (n_basic_blocks == NUM_FIXED_BLOCKS
4077 set_cfun (new_cfun); 4097 set_cfun (new_cfun);
4078 } 4098 }
4079 4099
4080 /* Return value of funcdef and increase it. */ 4100 /* Return value of funcdef and increase it. */
4081 int 4101 int
4082 get_next_funcdef_no (void) 4102 get_next_funcdef_no (void)
4083 { 4103 {
4084 return funcdef_no++; 4104 return funcdef_no++;
4085 } 4105 }
4086 4106
4087 /* Allocate a function structure for FNDECL and set its contents 4107 /* Allocate a function structure for FNDECL and set its contents
4136 cfun->stdarg 4156 cfun->stdarg
4137 = (fntype 4157 = (fntype
4138 && TYPE_ARG_TYPES (fntype) != 0 4158 && TYPE_ARG_TYPES (fntype) != 0
4139 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) 4159 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4140 != void_type_node)); 4160 != void_type_node));
4141 4161
4142 /* Assume all registers in stdarg functions need to be saved. */ 4162 /* Assume all registers in stdarg functions need to be saved. */
4143 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 4163 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4144 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; 4164 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4145 } 4165 }
4146 } 4166 }
4217 regardless of which calling convention we are using for it. */ 4237 regardless of which calling convention we are using for it. */
4218 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)))) 4238 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4219 warning (OPT_Waggregate_return, "function returns an aggregate"); 4239 warning (OPT_Waggregate_return, "function returns an aggregate");
4220 } 4240 }
4221 4241
4222 /* Make sure all values used by the optimization passes have sane 4242 /* Make sure all values used by the optimization passes have sane defaults. */
4223 defaults. */
4224 unsigned int 4243 unsigned int
4225 init_function_for_compilation (void) 4244 init_function_for_compilation (void)
4226 { 4245 {
4227 reg_renumber = 0; 4246 reg_renumber = 0;
4228
4229 /* No prologue/epilogue insns yet. Make sure that these vectors are
4230 empty. */
4231 gcc_assert (VEC_length (int, prologue) == 0);
4232 gcc_assert (VEC_length (int, epilogue) == 0);
4233 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
4234 return 0; 4247 return 0;
4235 } 4248 }
4236 4249
4237 struct rtl_opt_pass pass_init_function = 4250 struct rtl_opt_pass pass_init_function =
4238 { 4251 {
4239 { 4252 {
4240 RTL_PASS, 4253 RTL_PASS,
4241 NULL, /* name */ 4254 "*init_function", /* name */
4242 NULL, /* gate */ 4255 NULL, /* gate */
4243 init_function_for_compilation, /* execute */ 4256 init_function_for_compilation, /* execute */
4244 NULL, /* sub */ 4257 NULL, /* sub */
4245 NULL, /* next */ 4258 NULL, /* next */
4246 0, /* static_pass_number */ 4259 0, /* static_pass_number */
4247 0, /* tv_id */ 4260 TV_NONE, /* tv_id */
4248 0, /* properties_required */ 4261 0, /* properties_required */
4249 0, /* properties_provided */ 4262 0, /* properties_provided */
4250 0, /* properties_destroyed */ 4263 0, /* properties_destroyed */
4251 0, /* todo_flags_start */ 4264 0, /* todo_flags_start */
4252 0 /* todo_flags_finish */ 4265 0 /* todo_flags_finish */
4457 4470
4458 /* If function gets a static chain arg, store it. */ 4471 /* If function gets a static chain arg, store it. */
4459 if (cfun->static_chain_decl) 4472 if (cfun->static_chain_decl)
4460 { 4473 {
4461 tree parm = cfun->static_chain_decl; 4474 tree parm = cfun->static_chain_decl;
4462 rtx local = gen_reg_rtx (Pmode); 4475 rtx local, chain, insn;
4463 4476
4464 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false); 4477 local = gen_reg_rtx (Pmode);
4478 chain = targetm.calls.static_chain (current_function_decl, true);
4479
4480 set_decl_incoming_rtl (parm, chain, false);
4465 SET_DECL_RTL (parm, local); 4481 SET_DECL_RTL (parm, local);
4466 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); 4482 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4467 4483
4468 emit_move_insn (local, static_chain_incoming_rtx); 4484 insn = emit_move_insn (local, chain);
4485
4486 /* Mark the register as eliminable, similar to parameters. */
4487 if (MEM_P (chain)
4488 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4489 set_unique_reg_note (insn, REG_EQUIV, chain);
4469 } 4490 }
4470 4491
4471 /* If the function receives a non-local goto, then store the 4492 /* If the function receives a non-local goto, then store the
4472 bits we need to restore the frame pointer. */ 4493 bits we need to restore the frame pointer. */
4473 if (cfun->nonlocal_goto_save_area) 4494 if (cfun->nonlocal_goto_save_area)
4634 rtx insn, seq; 4655 rtx insn, seq;
4635 4656
4636 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 4657 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4637 if (CALL_P (insn)) 4658 if (CALL_P (insn))
4638 { 4659 {
4660 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
4639 start_sequence (); 4661 start_sequence ();
4640 probe_stack_range (STACK_OLD_CHECK_PROTECT, 4662 if (STACK_CHECK_MOVING_SP)
4641 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE)); 4663 anti_adjust_stack_and_probe (max_frame_size, true);
4664 else
4665 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
4642 seq = get_insns (); 4666 seq = get_insns ();
4643 end_sequence (); 4667 end_sequence ();
4644 emit_insn_before (seq, stack_check_probe_note); 4668 emit_insn_before (seq, stack_check_probe_note);
4645 break; 4669 break;
4646 } 4670 }
4731 we may need to re-do the PROMOTE_MODE signed/unsigned 4755 we may need to re-do the PROMOTE_MODE signed/unsigned
4732 extension. */ 4756 extension. */
4733 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl)) 4757 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4734 { 4758 {
4735 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result)); 4759 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4736 4760 promote_function_mode (TREE_TYPE (decl_result),
4737 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl))) 4761 GET_MODE (decl_rtl), &unsignedp,
4738 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl), 4762 TREE_TYPE (current_function_decl), 1);
4739 &unsignedp, 1);
4740 4763
4741 convert_move (real_decl_rtl, decl_rtl, unsignedp); 4764 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4742 } 4765 }
4743 else if (GET_CODE (real_decl_rtl) == PARALLEL) 4766 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4744 { 4767 {
4815 { 4838 {
4816 rtx seq; 4839 rtx seq;
4817 4840
4818 start_sequence (); 4841 start_sequence ();
4819 clobber_return_register (); 4842 clobber_return_register ();
4820 expand_naked_return ();
4821 seq = get_insns (); 4843 seq = get_insns ();
4822 end_sequence (); 4844 end_sequence ();
4823 4845
4824 emit_insn_after (seq, clobber_after); 4846 emit_insn_after (seq, clobber_after);
4825 } 4847 }
4826 4848
4827 /* Output the label for the naked return from the function. */ 4849 /* Output the label for the naked return from the function. */
4828 emit_label (naked_return_label); 4850 if (naked_return_label)
4851 emit_label (naked_return_label);
4829 4852
4830 /* @@@ This is a kludge. We want to ensure that instructions that 4853 /* @@@ This is a kludge. We want to ensure that instructions that
4831 may trap are not moved into the epilogue by scheduling, because 4854 may trap are not moved into the epilogue by scheduling, because
4832 we don't always emit unwind information for the epilogue. */ 4855 we don't always emit unwind information for the epilogue. */
4833 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions) 4856 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4886 } 4909 }
4887 4910
4888 return ret; 4911 return ret;
4889 } 4912 }
4890 4913
4891 /* Extend a vector that records the INSN_UIDs of INSNS 4914 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
4892 (a list of one or more insns). */ 4915 for the first time. */
4893 4916
4894 static void 4917 static void
4895 record_insns (rtx insns, VEC(int,heap) **vecp) 4918 record_insns (rtx insns, rtx end, htab_t *hashp)
4896 { 4919 {
4897 rtx tmp; 4920 rtx tmp;
4898 4921 htab_t hash = *hashp;
4899 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp)) 4922
4900 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp)); 4923 if (hash == NULL)
4924 *hashp = hash
4925 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
4926
4927 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
4928 {
4929 void **slot = htab_find_slot (hash, tmp, INSERT);
4930 gcc_assert (*slot == NULL);
4931 *slot = tmp;
4932 }
4933 }
4934
4935 /* INSN has been duplicated as COPY, as part of duping a basic block.
4936 If INSN is an epilogue insn, then record COPY as epilogue as well. */
4937
4938 void
4939 maybe_copy_epilogue_insn (rtx insn, rtx copy)
4940 {
4941 void **slot;
4942
4943 if (epilogue_insn_hash == NULL
4944 || htab_find (epilogue_insn_hash, insn) == NULL)
4945 return;
4946
4947 slot = htab_find_slot (epilogue_insn_hash, copy, INSERT);
4948 gcc_assert (*slot == NULL);
4949 *slot = copy;
4901 } 4950 }
4902 4951
4903 /* Set the locator of the insn chain starting at INSN to LOC. */ 4952 /* Set the locator of the insn chain starting at INSN to LOC. */
4904 static void 4953 static void
4905 set_insn_locators (rtx insn, int loc) 4954 set_insn_locators (rtx insn, int loc)
4910 INSN_LOCATOR (insn) = loc; 4959 INSN_LOCATOR (insn) = loc;
4911 insn = NEXT_INSN (insn); 4960 insn = NEXT_INSN (insn);
4912 } 4961 }
4913 } 4962 }
4914 4963
4915 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can 4964 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
4916 be running after reorg, SEQUENCE rtl is possible. */ 4965 we can be running after reorg, SEQUENCE rtl is possible. */
4917 4966
4918 static int 4967 static bool
4919 contains (const_rtx insn, VEC(int,heap) **vec) 4968 contains (const_rtx insn, htab_t hash)
4920 { 4969 {
4921 int i, j; 4970 if (hash == NULL)
4922 4971 return false;
4923 if (NONJUMP_INSN_P (insn) 4972
4924 && GET_CODE (PATTERN (insn)) == SEQUENCE) 4973 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4925 { 4974 {
4926 int count = 0; 4975 int i;
4927 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) 4976 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4928 for (j = VEC_length (int, *vec) - 1; j >= 0; --j) 4977 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
4929 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) 4978 return true;
4930 == VEC_index (int, *vec, j)) 4979 return false;
4931 count++; 4980 }
4932 return count; 4981
4933 } 4982 return htab_find (hash, insn) != NULL;
4934 else
4935 {
4936 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4937 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4938 return 1;
4939 }
4940 return 0;
4941 } 4983 }
4942 4984
4943 int 4985 int
4944 prologue_epilogue_contains (const_rtx insn) 4986 prologue_epilogue_contains (const_rtx insn)
4945 { 4987 {
4946 if (contains (insn, &prologue)) 4988 if (contains (insn, prologue_insn_hash))
4947 return 1; 4989 return 1;
4948 if (contains (insn, &epilogue)) 4990 if (contains (insn, epilogue_insn_hash))
4949 return 1; 4991 return 1;
4950 return 0;
4951 }
4952
4953 int
4954 sibcall_epilogue_contains (const_rtx insn)
4955 {
4956 if (sibcall_epilogue)
4957 return contains (insn, &sibcall_epilogue);
4958 return 0; 4992 return 0;
4959 } 4993 }
4960 4994
4961 #ifdef HAVE_return 4995 #ifdef HAVE_return
4962 /* Insert gen_return at the end of block BB. This also means updating 4996 /* Insert gen_return at the end of block BB. This also means updating
4992 { 5026 {
4993 start_sequence (); 5027 start_sequence ();
4994 seq = gen_prologue (); 5028 seq = gen_prologue ();
4995 emit_insn (seq); 5029 emit_insn (seq);
4996 5030
4997 /* Insert an explicit USE for the frame pointer 5031 /* Insert an explicit USE for the frame pointer
4998 if the profiling is on and the frame pointer is required. */ 5032 if the profiling is on and the frame pointer is required. */
4999 if (crtl->profile && frame_pointer_needed) 5033 if (crtl->profile && frame_pointer_needed)
5000 emit_use (hard_frame_pointer_rtx); 5034 emit_use (hard_frame_pointer_rtx);
5001 5035
5002 /* Retain a map of the prologue insns. */ 5036 /* Retain a map of the prologue insns. */
5003 record_insns (seq, &prologue); 5037 record_insns (seq, NULL, &prologue_insn_hash);
5004 emit_note (NOTE_INSN_PROLOGUE_END); 5038 emit_note (NOTE_INSN_PROLOGUE_END);
5005 5039
5006 #ifndef PROFILE_BEFORE_PROLOGUE 5040 #ifndef PROFILE_BEFORE_PROLOGUE
5007 /* Ensure that instructions are not moved into the prologue when 5041 /* Ensure that instructions are not moved into the prologue when
5008 profiling is on. The call to the profiling routine can be 5042 profiling is on. The call to the profiling routine can be
5009 emitted within the live range of a call-clobbered register. */ 5043 emitted within the live range of a call-clobbered register. */
5010 if (crtl->profile) 5044 if (crtl->profile)
5130 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU; 5164 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5131 goto epilogue_done; 5165 goto epilogue_done;
5132 } 5166 }
5133 } 5167 }
5134 #endif 5168 #endif
5169
5170 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5171 this marker for the splits of EH_RETURN patterns, and nothing else
5172 uses the flag in the meantime. */
5173 epilogue_completed = 1;
5174
5175 #ifdef HAVE_eh_return
5176 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5177 some targets, these get split to a special version of the epilogue
5178 code. In order to be able to properly annotate these with unwind
5179 info, try to split them now. If we get a valid split, drop an
5180 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5181 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5182 {
5183 rtx prev, last, trial;
5184
5185 if (e->flags & EDGE_FALLTHRU)
5186 continue;
5187 last = BB_END (e->src);
5188 if (!eh_returnjump_p (last))
5189 continue;
5190
5191 prev = PREV_INSN (last);
5192 trial = try_split (PATTERN (last), last, 1);
5193 if (trial == last)
5194 continue;
5195
5196 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5197 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5198 }
5199 #endif
5200
5135 /* Find the edge that falls through to EXIT. Other edges may exist 5201 /* Find the edge that falls through to EXIT. Other edges may exist
5136 due to RETURN instructions, but those don't need epilogues. 5202 due to RETURN instructions, but those don't need epilogues.
5137 There really shouldn't be a mixture -- either all should have 5203 There really shouldn't be a mixture -- either all should have
5138 been converted or none, however... */ 5204 been converted or none, however... */
5139 5205
5150 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG); 5216 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5151 seq = gen_epilogue (); 5217 seq = gen_epilogue ();
5152 emit_jump_insn (seq); 5218 emit_jump_insn (seq);
5153 5219
5154 /* Retain a map of the epilogue insns. */ 5220 /* Retain a map of the epilogue insns. */
5155 record_insns (seq, &epilogue); 5221 record_insns (seq, NULL, &epilogue_insn_hash);
5156 set_insn_locators (seq, epilogue_locator); 5222 set_insn_locators (seq, epilogue_locator);
5157 5223
5158 seq = get_insns (); 5224 seq = get_insns ();
5159 end_sequence (); 5225 end_sequence ();
5160 5226
5212 ei_next (&ei); 5278 ei_next (&ei);
5213 continue; 5279 continue;
5214 } 5280 }
5215 5281
5216 start_sequence (); 5282 start_sequence ();
5283 emit_note (NOTE_INSN_EPILOGUE_BEG);
5217 emit_insn (gen_sibcall_epilogue ()); 5284 emit_insn (gen_sibcall_epilogue ());
5218 seq = get_insns (); 5285 seq = get_insns ();
5219 end_sequence (); 5286 end_sequence ();
5220 5287
5221 /* Retain a map of the epilogue insns. Used in life analysis to 5288 /* Retain a map of the epilogue insns. Used in life analysis to
5222 avoid getting rid of sibcall epilogue insns. Do this before we 5289 avoid getting rid of sibcall epilogue insns. Do this before we
5223 actually emit the sequence. */ 5290 actually emit the sequence. */
5224 record_insns (seq, &sibcall_epilogue); 5291 record_insns (seq, NULL, &epilogue_insn_hash);
5225 set_insn_locators (seq, epilogue_locator); 5292 set_insn_locators (seq, epilogue_locator);
5226 5293
5227 emit_insn_before (seq, insn); 5294 emit_insn_before (seq, insn);
5228 ei_next (&ei); 5295 ei_next (&ei);
5229 } 5296 }
5240 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug 5307 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5241 info generation. */ 5308 info generation. */
5242 for (insn = epilogue_end; insn; insn = next) 5309 for (insn = epilogue_end; insn; insn = next)
5243 { 5310 {
5244 next = NEXT_INSN (insn); 5311 next = NEXT_INSN (insn);
5245 if (NOTE_P (insn) 5312 if (NOTE_P (insn)
5246 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)) 5313 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5247 reorder_insns (insn, insn, PREV_INSN (epilogue_end)); 5314 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5248 } 5315 }
5249 } 5316 }
5250 #endif 5317 #endif
5253 in the entry and exit blocks. */ 5320 in the entry and exit blocks. */
5254 epilogue_completed = 1; 5321 epilogue_completed = 1;
5255 df_update_entry_exit_and_calls (); 5322 df_update_entry_exit_and_calls ();
5256 } 5323 }
5257 5324
5258 /* Reposition the prologue-end and epilogue-begin notes after instruction 5325 /* Reposition the prologue-end and epilogue-begin notes after
5259 scheduling and delayed branch scheduling. */ 5326 instruction scheduling. */
5260 5327
5261 void 5328 void
5262 reposition_prologue_and_epilogue_notes (void) 5329 reposition_prologue_and_epilogue_notes (void)
5263 { 5330 {
5264 #if defined (HAVE_prologue) || defined (HAVE_epilogue) 5331 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5265 rtx insn, last, note; 5332 || defined (HAVE_sibcall_epilogue)
5266 int len; 5333 /* Since the hash table is created on demand, the fact that it is
5267 5334 non-null is a signal that it is non-empty. */
5268 if ((len = VEC_length (int, prologue)) > 0) 5335 if (prologue_insn_hash != NULL)
5269 { 5336 {
5270 last = 0, note = 0; 5337 size_t len = htab_elements (prologue_insn_hash);
5271 5338 rtx insn, last = NULL, note = NULL;
5272 /* Scan from the beginning until we reach the last prologue insn. 5339
5273 We apparently can't depend on basic_block_{head,end} after 5340 /* Scan from the beginning until we reach the last prologue insn. */
5274 reorg has run. */ 5341 /* ??? While we do have the CFG intact, there are two problems:
5342 (1) The prologue can contain loops (typically probing the stack),
5343 which means that the end of the prologue isn't in the first bb.
5344 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
5275 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 5345 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5276 { 5346 {
5277 if (NOTE_P (insn)) 5347 if (NOTE_P (insn))
5278 { 5348 {
5279 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END) 5349 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5280 note = insn; 5350 note = insn;
5281 } 5351 }
5282 else if (contains (insn, &prologue)) 5352 else if (contains (insn, prologue_insn_hash))
5283 { 5353 {
5284 last = insn; 5354 last = insn;
5285 if (--len == 0) 5355 if (--len == 0)
5286 break; 5356 break;
5287 } 5357 }
5288 } 5358 }
5289 5359
5290 if (last) 5360 if (last)
5291 { 5361 {
5292 /* Find the prologue-end note if we haven't already, and 5362 if (note == NULL)
5293 move it to just after the last prologue insn. */
5294 if (note == 0)
5295 { 5363 {
5296 for (note = last; (note = NEXT_INSN (note));) 5364 /* Scan forward looking for the PROLOGUE_END note. It should
5297 if (NOTE_P (note) 5365 be right at the beginning of the block, possibly with other
5298 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END) 5366 insn notes that got moved there. */
5299 break; 5367 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
5368 {
5369 if (NOTE_P (note)
5370 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5371 break;
5372 }
5300 } 5373 }
5301 5374
5302 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */ 5375 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5303 if (LABEL_P (last)) 5376 if (LABEL_P (last))
5304 last = NEXT_INSN (last); 5377 last = NEXT_INSN (last);
5305 reorder_insns (note, note, last); 5378 reorder_insns (note, note, last);
5306 } 5379 }
5307 } 5380 }
5308 5381
5309 if ((len = VEC_length (int, epilogue)) > 0) 5382 if (epilogue_insn_hash != NULL)
5310 { 5383 {
5311 last = 0, note = 0; 5384 edge_iterator ei;
5312 5385 edge e;
5313 /* Scan from the end until we reach the first epilogue insn. 5386
5314 We apparently can't depend on basic_block_{head,end} after 5387 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5315 reorg has run. */ 5388 {
5316 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) 5389 rtx insn, first = NULL, note = NULL;
5317 { 5390 basic_block bb = e->src;
5318 if (NOTE_P (insn)) 5391
5392 /* Scan from the beginning until we reach the first epilogue insn. */
5393 FOR_BB_INSNS (bb, insn)
5319 { 5394 {
5320 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) 5395 if (NOTE_P (insn))
5321 note = insn; 5396 {
5397 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5398 {
5399 note = insn;
5400 if (first != NULL)
5401 break;
5402 }
5403 }
5404 else if (first == NULL && contains (insn, epilogue_insn_hash))
5405 {
5406 first = insn;
5407 if (note != NULL)
5408 break;
5409 }
5322 } 5410 }
5323 else if (contains (insn, &epilogue)) 5411
5412 if (note)
5324 { 5413 {
5325 last = insn; 5414 /* If the function has a single basic block, and no real
5326 if (--len == 0) 5415 epilogue insns (e.g. sibcall with no cleanup), the
5327 break; 5416 epilogue note can get scheduled before the prologue
5417 note. If we have frame related prologue insns, having
5418 them scanned during the epilogue will result in a crash.
5419 In this case re-order the epilogue note to just before
5420 the last insn in the block. */
5421 if (first == NULL)
5422 first = BB_END (bb);
5423
5424 if (PREV_INSN (first) != note)
5425 reorder_insns (note, note, PREV_INSN (first));
5328 } 5426 }
5329 }
5330
5331 if (last)
5332 {
5333 /* Find the epilogue-begin note if we haven't already, and
5334 move it to just before the first epilogue insn. */
5335 if (note == 0)
5336 {
5337 for (note = insn; (note = PREV_INSN (note));)
5338 if (NOTE_P (note)
5339 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5340 break;
5341 }
5342
5343 if (PREV_INSN (last) != note)
5344 reorder_insns (note, note, PREV_INSN (last));
5345 } 5427 }
5346 } 5428 }
5347 #endif /* HAVE_prologue or HAVE_epilogue */ 5429 #endif /* HAVE_prologue or HAVE_epilogue */
5348 } 5430 }
5349 5431
5350 /* Returns the name of the current function. */ 5432 /* Returns the name of the current function. */
5351 const char * 5433 const char *
5352 current_function_name (void) 5434 current_function_name (void)
5353 { 5435 {
5436 if (cfun == NULL)
5437 return "<none>";
5354 return lang_hooks.decl_printable_name (cfun->decl, 2); 5438 return lang_hooks.decl_printable_name (cfun->decl, 2);
5355 }
5356
5357 /* Returns the raw (mangled) name of the current function. */
5358 const char *
5359 current_function_assembler_name (void)
5360 {
5361 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5362 } 5439 }
5363 5440
5364 5441
5365 static unsigned int 5442 static unsigned int
5366 rest_of_handle_check_leaf_regs (void) 5443 rest_of_handle_check_leaf_regs (void)
5477 5554
5478 struct rtl_opt_pass pass_leaf_regs = 5555 struct rtl_opt_pass pass_leaf_regs =
5479 { 5556 {
5480 { 5557 {
5481 RTL_PASS, 5558 RTL_PASS,
5482 NULL, /* name */ 5559 "*leaf_regs", /* name */
5483 NULL, /* gate */ 5560 NULL, /* gate */
5484 rest_of_handle_check_leaf_regs, /* execute */ 5561 rest_of_handle_check_leaf_regs, /* execute */
5485 NULL, /* sub */ 5562 NULL, /* sub */
5486 NULL, /* next */ 5563 NULL, /* next */
5487 0, /* static_pass_number */ 5564 0, /* static_pass_number */
5488 0, /* tv_id */ 5565 TV_NONE, /* tv_id */
5489 0, /* properties_required */ 5566 0, /* properties_required */
5490 0, /* properties_provided */ 5567 0, /* properties_provided */
5491 0, /* properties_destroyed */ 5568 0, /* properties_destroyed */
5492 0, /* todo_flags_start */ 5569 0, /* todo_flags_start */
5493 0 /* todo_flags_finish */ 5570 0 /* todo_flags_finish */
5530 } 5607 }
5531 }; 5608 };
5532 5609
5533 5610
5534 /* This mini-pass fixes fall-out from SSA in asm statements that have 5611 /* This mini-pass fixes fall-out from SSA in asm statements that have
5535 in-out constraints. Say you start with 5612 in-out constraints. Say you start with
5536 5613
5537 orig = inout; 5614 orig = inout;
5538 asm ("": "+mr" (inout)); 5615 asm ("": "+mr" (inout));
5539 use (orig); 5616 use (orig);
5540 5617
5718 NULL, /* gate */ 5795 NULL, /* gate */
5719 rest_of_match_asm_constraints, /* execute */ 5796 rest_of_match_asm_constraints, /* execute */
5720 NULL, /* sub */ 5797 NULL, /* sub */
5721 NULL, /* next */ 5798 NULL, /* next */
5722 0, /* static_pass_number */ 5799 0, /* static_pass_number */
5723 0, /* tv_id */ 5800 TV_NONE, /* tv_id */
5724 0, /* properties_required */ 5801 0, /* properties_required */
5725 0, /* properties_provided */ 5802 0, /* properties_provided */
5726 0, /* properties_destroyed */ 5803 0, /* properties_destroyed */
5727 0, /* todo_flags_start */ 5804 0, /* todo_flags_start */
5728 TODO_dump_func /* todo_flags_finish */ 5805 TODO_dump_func /* todo_flags_finish */