comparison gcc/function.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents 3bfb6c00c1e0
children 326d9e06c2e3 b7f97abdc517
comparison
equal deleted inserted replaced
52:c156f1bd5cd9 55:77e2b8dfacca
122 struct machine_function * (*init_machine_status) (void); 122 struct machine_function * (*init_machine_status) (void);
123 123
124 /* The currently compiled function. */ 124 /* The currently compiled function. */
125 struct function *cfun = 0; 125 struct function *cfun = 0;
126 126
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */ 127 /* These hashes record the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue; 128 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 static VEC(int,heap) *epilogue; 129 htab_t prologue_insn_hash;
130 130 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue 131 htab_t epilogue_insn_hash;
132 in this function. */
133 static VEC(int,heap) *sibcall_epilogue;
134 132
135 133
136 htab_t types_used_by_vars_hash = NULL; 134 htab_t types_used_by_vars_hash = NULL;
137 tree types_used_by_cur_var_decl = NULL; 135 tree types_used_by_cur_var_decl = NULL;
138 136
145 static int all_blocks (tree, tree *); 143 static int all_blocks (tree, tree *);
146 static tree *get_block_vector (tree, int *); 144 static tree *get_block_vector (tree, int *);
147 extern tree debug_find_var_in_block_tree (tree, tree); 145 extern tree debug_find_var_in_block_tree (tree, tree);
148 /* We always define `record_insns' even if it's not used so that we 146 /* We always define `record_insns' even if it's not used so that we
149 can always export `prologue_epilogue_contains'. */ 147 can always export `prologue_epilogue_contains'. */
150 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED; 148 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
151 static int contains (const_rtx, VEC(int,heap) **); 149 static bool contains (const_rtx, htab_t);
152 #ifdef HAVE_return 150 #ifdef HAVE_return
153 static void emit_return_into_block (basic_block); 151 static void emit_return_into_block (basic_block);
154 #endif 152 #endif
155 static void prepare_function_start (void); 153 static void prepare_function_start (void);
156 static void do_clobber_return_reg (rtx, void *); 154 static void do_clobber_return_reg (rtx, void *);
209 reclaim the memory. */ 207 reclaim the memory. */
210 208
211 void 209 void
212 free_after_compilation (struct function *f) 210 free_after_compilation (struct function *f)
213 { 211 {
214 VEC_free (int, heap, prologue); 212 prologue_insn_hash = NULL;
215 VEC_free (int, heap, epilogue); 213 epilogue_insn_hash = NULL;
216 VEC_free (int, heap, sibcall_epilogue); 214
217 if (crtl->emit.regno_pointer_align) 215 if (crtl->emit.regno_pointer_align)
218 free (crtl->emit.regno_pointer_align); 216 free (crtl->emit.regno_pointer_align);
219 217
220 memset (crtl, 0, sizeof (struct rtl_data)); 218 memset (crtl, 0, sizeof (struct rtl_data));
221 f->eh = NULL; 219 f->eh = NULL;
243 the signed target pointer arithmetics for function FUNC. Otherwise 241 the signed target pointer arithmetics for function FUNC. Otherwise
244 return FALSE. */ 242 return FALSE. */
245 243
246 bool 244 bool
247 frame_offset_overflow (HOST_WIDE_INT offset, tree func) 245 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
248 { 246 {
249 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; 247 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
250 248
251 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1)) 249 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
252 /* Leave room for the fixed part of the frame. */ 250 /* Leave room for the fixed part of the frame. */
253 - 64 * UNITS_PER_WORD) 251 - 64 * UNITS_PER_WORD)
254 { 252 {
255 error ("%Jtotal size of local objects too large", func); 253 error_at (DECL_SOURCE_LOCATION (func),
254 "total size of local objects too large");
256 return TRUE; 255 return TRUE;
257 } 256 }
258 257
259 return FALSE; 258 return FALSE;
260 } 259 }
359 } 358 }
360 } 359 }
361 360
362 if (crtl->stack_alignment_needed < alignment_in_bits) 361 if (crtl->stack_alignment_needed < alignment_in_bits)
363 crtl->stack_alignment_needed = alignment_in_bits; 362 crtl->stack_alignment_needed = alignment_in_bits;
364 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed) 363 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
365 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed; 364 crtl->max_used_stack_slot_alignment = alignment_in_bits;
366 365
367 /* Calculate how many bytes the start of local variables is off from 366 /* Calculate how many bytes the start of local variables is off from
368 stack alignment. */ 367 stack alignment. */
369 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; 368 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
370 frame_off = STARTING_FRAME_OFFSET % frame_alignment; 369 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
452 451
453 Automatic variables are also assigned temporary slots, at the nesting 452 Automatic variables are also assigned temporary slots, at the nesting
454 level where they are defined. They are marked a "kept" so that 453 level where they are defined. They are marked a "kept" so that
455 free_temp_slots will not free them. */ 454 free_temp_slots will not free them. */
456 455
457 struct temp_slot GTY(()) 456 struct GTY(()) temp_slot {
458 {
459 /* Points to next temporary slot. */ 457 /* Points to next temporary slot. */
460 struct temp_slot *next; 458 struct temp_slot *next;
461 /* Points to previous temporary slot. */ 459 /* Points to previous temporary slot. */
462 struct temp_slot *prev; 460 struct temp_slot *prev;
463 /* The rtx to used to reference the slot. */ 461 /* The rtx to used to reference the slot. */
490 /* A table of addresses that represent a stack slot. The table is a mapping 488 /* A table of addresses that represent a stack slot. The table is a mapping
491 from address RTXen to a temp slot. */ 489 from address RTXen to a temp slot. */
492 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table; 490 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
493 491
494 /* Entry for the above hash table. */ 492 /* Entry for the above hash table. */
495 struct temp_slot_address_entry GTY(()) 493 struct GTY(()) temp_slot_address_entry {
496 {
497 hashval_t hash; 494 hashval_t hash;
498 rtx address; 495 rtx address;
499 struct temp_slot *temp_slot; 496 struct temp_slot *temp_slot;
500 }; 497 };
501 498
660 return p; 657 return p;
661 658
662 /* Last resort: Address is a virtual stack var address. */ 659 /* Last resort: Address is a virtual stack var address. */
663 if (GET_CODE (x) == PLUS 660 if (GET_CODE (x) == PLUS
664 && XEXP (x, 0) == virtual_stack_vars_rtx 661 && XEXP (x, 0) == virtual_stack_vars_rtx
665 && GET_CODE (XEXP (x, 1)) == CONST_INT) 662 && CONST_INT_P (XEXP (x, 1)))
666 { 663 {
667 int i; 664 int i;
668 for (i = max_slot_level (); i >= 0; i--) 665 for (i = max_slot_level (); i >= 0; i--)
669 for (p = *temp_slots_at_level (i); p; p = p->next) 666 for (p = *temp_slots_at_level (i); p; p = p->next)
670 { 667 {
711 align = get_stack_local_alignment (type, mode); 708 align = get_stack_local_alignment (type, mode);
712 709
713 /* Try to find an available, already-allocated temporary of the proper 710 /* Try to find an available, already-allocated temporary of the proper
714 mode which meets the size and alignment requirements. Choose the 711 mode which meets the size and alignment requirements. Choose the
715 smallest one with the closest alignment. 712 smallest one with the closest alignment.
716 713
717 If assign_stack_temp is called outside of the tree->rtl expansion, 714 If assign_stack_temp is called outside of the tree->rtl expansion,
718 we cannot reuse the stack slots (that may still refer to 715 we cannot reuse the stack slots (that may still refer to
719 VIRTUAL_STACK_VARS_REGNUM). */ 716 VIRTUAL_STACK_VARS_REGNUM). */
720 if (!virtuals_instantiated) 717 if (!virtuals_instantiated)
721 { 718 {
927 return tmp; 924 return tmp;
928 } 925 }
929 926
930 #ifdef PROMOTE_MODE 927 #ifdef PROMOTE_MODE
931 if (! dont_promote) 928 if (! dont_promote)
932 mode = promote_mode (type, mode, &unsignedp, 0); 929 mode = promote_mode (type, mode, &unsignedp);
933 #endif 930 #endif
934 931
935 return gen_reg_rtx (mode); 932 return gen_reg_rtx (mode);
936 } 933 }
937 934
1165 1162
1166 void 1163 void
1167 free_temp_slots (void) 1164 free_temp_slots (void)
1168 { 1165 {
1169 struct temp_slot *p, *next; 1166 struct temp_slot *p, *next;
1167 bool some_available = false;
1170 1168
1171 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1169 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1172 { 1170 {
1173 next = p->next; 1171 next = p->next;
1174 1172
1175 if (!p->keep) 1173 if (!p->keep)
1176 make_slot_available (p); 1174 {
1177 } 1175 make_slot_available (p);
1178 1176 some_available = true;
1179 remove_unused_temp_slot_addresses (); 1177 }
1180 combine_temp_slots (); 1178 }
1179
1180 if (some_available)
1181 {
1182 remove_unused_temp_slot_addresses ();
1183 combine_temp_slots ();
1184 }
1181 } 1185 }
1182 1186
1183 /* Push deeper into the nesting level for stack temporaries. */ 1187 /* Push deeper into the nesting level for stack temporaries. */
1184 1188
1185 void 1189 void
1193 1197
1194 void 1198 void
1195 pop_temp_slots (void) 1199 pop_temp_slots (void)
1196 { 1200 {
1197 struct temp_slot *p, *next; 1201 struct temp_slot *p, *next;
1202 bool some_available = false;
1198 1203
1199 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1204 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1200 { 1205 {
1201 next = p->next; 1206 next = p->next;
1202 make_slot_available (p); 1207 make_slot_available (p);
1203 } 1208 some_available = true;
1204 1209 }
1205 remove_unused_temp_slot_addresses (); 1210
1206 combine_temp_slots (); 1211 if (some_available)
1212 {
1213 remove_unused_temp_slot_addresses ();
1214 combine_temp_slots ();
1215 }
1207 1216
1208 temp_slot_level--; 1217 temp_slot_level--;
1209 } 1218 }
1210 1219
1211 /* Initialize temporary slots. */ 1220 /* Initialize temporary slots. */
1463 operands remain valid if they're modified in place. */ 1472 operands remain valid if they're modified in place. */
1464 if (GET_CODE (SET_SRC (set)) == PLUS 1473 if (GET_CODE (SET_SRC (set)) == PLUS
1465 && recog_data.n_operands >= 3 1474 && recog_data.n_operands >= 3
1466 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0) 1475 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1467 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1) 1476 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1468 && GET_CODE (recog_data.operand[2]) == CONST_INT 1477 && CONST_INT_P (recog_data.operand[2])
1469 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset))) 1478 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1470 { 1479 {
1471 offset += INTVAL (recog_data.operand[2]); 1480 offset += INTVAL (recog_data.operand[2]);
1472 1481
1473 /* If the sum is zero, then replace with a plain move. */ 1482 /* If the sum is zero, then replace with a plain move. */
1599 Validate the new value vs the insn predicate. Note that 1608 Validate the new value vs the insn predicate. Note that
1600 asm insns will have insn_code -1 here. */ 1609 asm insns will have insn_code -1 here. */
1601 if (!safe_insn_predicate (insn_code, i, x)) 1610 if (!safe_insn_predicate (insn_code, i, x))
1602 { 1611 {
1603 start_sequence (); 1612 start_sequence ();
1604 x = force_reg (insn_data[insn_code].operand[i].mode, x); 1613 if (REG_P (x))
1614 {
1615 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1616 x = copy_to_reg (x);
1617 }
1618 else
1619 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1605 seq = get_insns (); 1620 seq = get_insns ();
1606 end_sequence (); 1621 end_sequence ();
1607 if (seq) 1622 if (seq)
1608 emit_insn_before (seq, insn); 1623 emit_insn_before (seq, insn);
1609 } 1624 }
1780 || GET_CODE (PATTERN (insn)) == CLOBBER 1795 || GET_CODE (PATTERN (insn)) == CLOBBER
1781 || GET_CODE (PATTERN (insn)) == ADDR_VEC 1796 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1782 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC 1797 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1783 || GET_CODE (PATTERN (insn)) == ASM_INPUT) 1798 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1784 continue; 1799 continue;
1785 1800 else if (DEBUG_INSN_P (insn))
1786 instantiate_virtual_regs_in_insn (insn); 1801 for_each_rtx (&INSN_VAR_LOCATION (insn),
1802 instantiate_virtual_regs_in_rtx, NULL);
1803 else
1804 instantiate_virtual_regs_in_insn (insn);
1787 1805
1788 if (INSN_DELETED_P (insn)) 1806 if (INSN_DELETED_P (insn))
1789 continue; 1807 continue;
1790 1808
1791 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL); 1809 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1792 1810
1793 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */ 1811 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1794 if (GET_CODE (insn) == CALL_INSN) 1812 if (CALL_P (insn))
1795 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn), 1813 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1796 instantiate_virtual_regs_in_rtx, NULL); 1814 instantiate_virtual_regs_in_rtx, NULL);
1797 } 1815 }
1798 1816
1799 /* Instantiate the virtual registers in the DECLs for debugging purposes. */ 1817 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1815 NULL, /* gate */ 1833 NULL, /* gate */
1816 instantiate_virtual_regs, /* execute */ 1834 instantiate_virtual_regs, /* execute */
1817 NULL, /* sub */ 1835 NULL, /* sub */
1818 NULL, /* next */ 1836 NULL, /* next */
1819 0, /* static_pass_number */ 1837 0, /* static_pass_number */
1820 0, /* tv_id */ 1838 TV_NONE, /* tv_id */
1821 0, /* properties_required */ 1839 0, /* properties_required */
1822 0, /* properties_provided */ 1840 0, /* properties_provided */
1823 0, /* properties_destroyed */ 1841 0, /* properties_destroyed */
1824 0, /* todo_flags_start */ 1842 0, /* todo_flags_start */
1825 TODO_dump_func /* todo_flags_finish */ 1843 TODO_dump_func /* todo_flags_finish */
1842 1860
1843 /* DECL node associated with FNTYPE when relevant, which we might need to 1861 /* DECL node associated with FNTYPE when relevant, which we might need to
1844 check for by-invisible-reference returns, typically for CALL_EXPR input 1862 check for by-invisible-reference returns, typically for CALL_EXPR input
1845 EXPressions. */ 1863 EXPressions. */
1846 const_tree fndecl = NULL_TREE; 1864 const_tree fndecl = NULL_TREE;
1847 1865
1848 if (fntype) 1866 if (fntype)
1849 switch (TREE_CODE (fntype)) 1867 switch (TREE_CODE (fntype))
1850 { 1868 {
1851 case CALL_EXPR: 1869 case CALL_EXPR:
1852 fndecl = get_callee_fndecl (fntype); 1870 fndecl = get_callee_fndecl (fntype);
1885 mechanism but might now be causing troubles at gimplification time if 1903 mechanism but might now be causing troubles at gimplification time if
1886 temporaries with the function type need to be created. */ 1904 temporaries with the function type need to be created. */
1887 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl) 1905 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1888 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))) 1906 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1889 return 1; 1907 return 1;
1890 1908
1891 if (targetm.calls.return_in_memory (type, fntype)) 1909 if (targetm.calls.return_in_memory (type, fntype))
1892 return 1; 1910 return 1;
1893 /* Types that are TREE_ADDRESSABLE must be constructed in memory, 1911 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1894 and thus can't be returned in registers. */ 1912 and thus can't be returned in registers. */
1895 if (TREE_ADDRESSABLE (type)) 1913 if (TREE_ADDRESSABLE (type))
1919 bool 1937 bool
1920 use_register_for_decl (const_tree decl) 1938 use_register_for_decl (const_tree decl)
1921 { 1939 {
1922 if (!targetm.calls.allocate_stack_slots_for_args()) 1940 if (!targetm.calls.allocate_stack_slots_for_args())
1923 return true; 1941 return true;
1924 1942
1925 /* Honor volatile. */ 1943 /* Honor volatile. */
1926 if (TREE_SIDE_EFFECTS (decl)) 1944 if (TREE_SIDE_EFFECTS (decl))
1927 return false; 1945 return false;
1928 1946
1929 /* Honor addressability. */ 1947 /* Honor addressability. */
2106 DECL_IGNORED_P (p) = addressable; 2124 DECL_IGNORED_P (p) = addressable;
2107 TREE_ADDRESSABLE (p) = 0; 2125 TREE_ADDRESSABLE (p) = 0;
2108 layout_decl (p, 0); 2126 layout_decl (p, 0);
2109 2127
2110 /* Build a second synthetic decl. */ 2128 /* Build a second synthetic decl. */
2111 decl = build_decl (PARM_DECL, NULL_TREE, subtype); 2129 decl = build_decl (EXPR_LOCATION (p),
2130 PARM_DECL, NULL_TREE, subtype);
2112 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p); 2131 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2113 DECL_ARTIFICIAL (decl) = addressable; 2132 DECL_ARTIFICIAL (decl) = addressable;
2114 DECL_IGNORED_P (decl) = addressable; 2133 DECL_IGNORED_P (decl) = addressable;
2115 layout_decl (decl, 0); 2134 layout_decl (decl, 0);
2116 2135
2141 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0) 2160 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2142 { 2161 {
2143 tree type = build_pointer_type (TREE_TYPE (fntype)); 2162 tree type = build_pointer_type (TREE_TYPE (fntype));
2144 tree decl; 2163 tree decl;
2145 2164
2146 decl = build_decl (PARM_DECL, NULL_TREE, type); 2165 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2166 PARM_DECL, NULL_TREE, type);
2147 DECL_ARG_TYPE (decl) = type; 2167 DECL_ARG_TYPE (decl) = type;
2148 DECL_ARTIFICIAL (decl) = 1; 2168 DECL_ARTIFICIAL (decl) = 1;
2149 DECL_IGNORED_P (decl) = 1; 2169 DECL_IGNORED_P (decl) = 1;
2150 2170
2151 TREE_CHAIN (decl) = fnargs; 2171 TREE_CHAIN (decl) = fnargs;
2170 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, 2190 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2171 struct assign_parm_data_one *data) 2191 struct assign_parm_data_one *data)
2172 { 2192 {
2173 tree nominal_type, passed_type; 2193 tree nominal_type, passed_type;
2174 enum machine_mode nominal_mode, passed_mode, promoted_mode; 2194 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2195 int unsignedp;
2175 2196
2176 memset (data, 0, sizeof (*data)); 2197 memset (data, 0, sizeof (*data));
2177 2198
2178 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */ 2199 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2179 if (!cfun->stdarg) 2200 if (!cfun->stdarg)
2222 data->passed_pointer = true; 2243 data->passed_pointer = true;
2223 passed_mode = nominal_mode = Pmode; 2244 passed_mode = nominal_mode = Pmode;
2224 } 2245 }
2225 2246
2226 /* Find mode as it is passed by the ABI. */ 2247 /* Find mode as it is passed by the ABI. */
2227 promoted_mode = passed_mode; 2248 unsignedp = TYPE_UNSIGNED (passed_type);
2228 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl))) 2249 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2229 { 2250 TREE_TYPE (current_function_decl), 0);
2230 int unsignedp = TYPE_UNSIGNED (passed_type);
2231 promoted_mode = promote_mode (passed_type, promoted_mode,
2232 &unsignedp, 1);
2233 }
2234 2251
2235 egress: 2252 egress:
2236 data->nominal_type = nominal_type; 2253 data->nominal_type = nominal_type;
2237 data->passed_type = passed_type; 2254 data->passed_type = passed_type;
2238 data->nominal_mode = nominal_mode; 2255 data->nominal_mode = nominal_mode;
2436 stack_parm = crtl->args.internal_arg_pointer; 2453 stack_parm = crtl->args.internal_arg_pointer;
2437 if (offset_rtx != const0_rtx) 2454 if (offset_rtx != const0_rtx)
2438 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx); 2455 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2439 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm); 2456 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2440 2457
2441 set_mem_attributes (stack_parm, parm, 1); 2458 if (!data->passed_pointer)
2442 /* set_mem_attributes could set MEM_SIZE to the passed mode's size, 2459 {
2443 while promoted mode's size is needed. */ 2460 set_mem_attributes (stack_parm, parm, 1);
2444 if (data->promoted_mode != BLKmode 2461 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2445 && data->promoted_mode != DECL_MODE (parm)) 2462 while promoted mode's size is needed. */
2446 { 2463 if (data->promoted_mode != BLKmode
2447 set_mem_size (stack_parm, GEN_INT (GET_MODE_SIZE (data->promoted_mode))); 2464 && data->promoted_mode != DECL_MODE (parm))
2448 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm)) 2465 {
2449 { 2466 set_mem_size (stack_parm,
2450 int offset = subreg_lowpart_offset (DECL_MODE (parm), 2467 GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
2451 data->promoted_mode); 2468 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
2452 if (offset) 2469 {
2453 set_mem_offset (stack_parm, 2470 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2454 plus_constant (MEM_OFFSET (stack_parm), -offset)); 2471 data->promoted_mode);
2472 if (offset)
2473 set_mem_offset (stack_parm,
2474 plus_constant (MEM_OFFSET (stack_parm),
2475 -offset));
2476 }
2455 } 2477 }
2456 } 2478 }
2457 2479
2458 boundary = data->locate.boundary; 2480 boundary = data->locate.boundary;
2459 align = BITS_PER_UNIT; 2481 align = BITS_PER_UNIT;
2462 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're 2484 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2463 intentionally forcing upward padding. Otherwise we have to come 2485 intentionally forcing upward padding. Otherwise we have to come
2464 up with a guess at the alignment based on OFFSET_RTX. */ 2486 up with a guess at the alignment based on OFFSET_RTX. */
2465 if (data->locate.where_pad != downward || data->entry_parm) 2487 if (data->locate.where_pad != downward || data->entry_parm)
2466 align = boundary; 2488 align = boundary;
2467 else if (GET_CODE (offset_rtx) == CONST_INT) 2489 else if (CONST_INT_P (offset_rtx))
2468 { 2490 {
2469 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary; 2491 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2470 align = align & -align; 2492 align = align & -align;
2471 } 2493 }
2472 set_mem_align (stack_parm, align); 2494 set_mem_align (stack_parm, align);
2495 { 2517 {
2496 /* Handle calls that pass values in multiple non-contiguous 2518 /* Handle calls that pass values in multiple non-contiguous
2497 locations. The Irix 6 ABI has examples of this. */ 2519 locations. The Irix 6 ABI has examples of this. */
2498 if (GET_CODE (entry_parm) == PARALLEL) 2520 if (GET_CODE (entry_parm) == PARALLEL)
2499 emit_group_store (validize_mem (stack_parm), entry_parm, 2521 emit_group_store (validize_mem (stack_parm), entry_parm,
2500 data->passed_type, 2522 data->passed_type,
2501 int_size_in_bytes (data->passed_type)); 2523 int_size_in_bytes (data->passed_type));
2502 else 2524 else
2503 { 2525 {
2504 gcc_assert (data->partial % UNITS_PER_WORD == 0); 2526 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2505 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm), 2527 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2620 #endif 2642 #endif
2621 2643
2622 return false; 2644 return false;
2623 } 2645 }
2624 2646
2625 /* A subroutine of assign_parms. Arrange for the parameter to be 2647 /* A subroutine of assign_parms. Arrange for the parameter to be
2626 present and valid in DATA->STACK_RTL. */ 2648 present and valid in DATA->STACK_RTL. */
2627 2649
2628 static void 2650 static void
2629 assign_parm_setup_block (struct assign_parm_data_all *all, 2651 assign_parm_setup_block (struct assign_parm_data_all *all,
2630 tree parm, struct assign_parm_data_one *data) 2652 tree parm, struct assign_parm_data_one *data)
2776 enum machine_mode promoted_nominal_mode; 2798 enum machine_mode promoted_nominal_mode;
2777 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm)); 2799 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2778 bool did_conversion = false; 2800 bool did_conversion = false;
2779 2801
2780 /* Store the parm in a pseudoregister during the function, but we may 2802 /* Store the parm in a pseudoregister during the function, but we may
2781 need to do it in a wider mode. */ 2803 need to do it in a wider mode. Using 2 here makes the result
2782 2804 consistent with promote_decl_mode and thus expand_expr_real_1. */
2783 /* This is not really promoting for a call. However we need to be
2784 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2785 promoted_nominal_mode 2805 promoted_nominal_mode
2786 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1); 2806 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2807 TREE_TYPE (current_function_decl), 2);
2787 2808
2788 parmreg = gen_reg_rtx (promoted_nominal_mode); 2809 parmreg = gen_reg_rtx (promoted_nominal_mode);
2789 2810
2790 if (!DECL_ARTIFICIAL (parm)) 2811 if (!DECL_ARTIFICIAL (parm))
2791 mark_user_reg (parmreg); 2812 mark_user_reg (parmreg);
2801 else 2822 else
2802 SET_DECL_RTL (parm, parmreg); 2823 SET_DECL_RTL (parm, parmreg);
2803 2824
2804 assign_parm_remove_parallels (data); 2825 assign_parm_remove_parallels (data);
2805 2826
2806 /* Copy the value into the register. */ 2827 /* Copy the value into the register, thus bridging between
2828 assign_parm_find_data_types and expand_expr_real_1. */
2807 if (data->nominal_mode != data->passed_mode 2829 if (data->nominal_mode != data->passed_mode
2808 || promoted_nominal_mode != data->promoted_mode) 2830 || promoted_nominal_mode != data->promoted_mode)
2809 { 2831 {
2810 int save_tree_used; 2832 int save_tree_used;
2811 2833
3161 { 3183 {
3162 gcc_assert (!crtl->stack_realign_processed); 3184 gcc_assert (!crtl->stack_realign_processed);
3163 crtl->stack_alignment_estimated = align; 3185 crtl->stack_alignment_estimated = align;
3164 } 3186 }
3165 } 3187 }
3166 3188
3167 if (cfun->stdarg && !TREE_CHAIN (parm)) 3189 if (cfun->stdarg && !TREE_CHAIN (parm))
3168 assign_parms_setup_varargs (&all, &data, false); 3190 assign_parms_setup_varargs (&all, &data, false);
3169 3191
3170 /* Find out where the parameter arrives in this function. */ 3192 /* Find out where the parameter arrives in this function. */
3171 assign_parm_find_entry_rtl (&all, &data); 3193 assign_parm_find_entry_rtl (&all, &data);
3218 { 3240 {
3219 gcc_assert (!crtl->stack_realign_processed); 3241 gcc_assert (!crtl->stack_realign_processed);
3220 crtl->stack_alignment_estimated = align; 3242 crtl->stack_alignment_estimated = align;
3221 } 3243 }
3222 } 3244 }
3223 } 3245 }
3224 } 3246 }
3225 3247
3226 /* If we are receiving a struct value address as the first argument, set up 3248 /* If we are receiving a struct value address as the first argument, set up
3227 the RTL for the function result. As this might require code to convert 3249 the RTL for the function result. As this might require code to convert
3228 the transmitted address to Pmode, we do this here to ensure that possible 3250 the transmitted address to Pmode, we do this here to ensure that possible
3263 #ifdef ARGS_GROW_DOWNWARD 3285 #ifdef ARGS_GROW_DOWNWARD
3264 crtl->args.arg_offset_rtx 3286 crtl->args.arg_offset_rtx
3265 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant) 3287 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3266 : expand_expr (size_diffop (all.stack_args_size.var, 3288 : expand_expr (size_diffop (all.stack_args_size.var,
3267 size_int (-all.stack_args_size.constant)), 3289 size_int (-all.stack_args_size.constant)),
3268 NULL_RTX, VOIDmode, 0)); 3290 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3269 #else 3291 #else
3270 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size); 3292 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3271 #endif 3293 #endif
3272 3294
3273 /* See how many bytes, if any, of its args a function should try to pop 3295 /* See how many bytes, if any, of its args a function should try to pop
3538 3560
3539 /* Remember if the outgoing parameter requires extra alignment on the 3561 /* Remember if the outgoing parameter requires extra alignment on the
3540 calling function side. */ 3562 calling function side. */
3541 if (crtl->stack_alignment_needed < boundary) 3563 if (crtl->stack_alignment_needed < boundary)
3542 crtl->stack_alignment_needed = boundary; 3564 crtl->stack_alignment_needed = boundary;
3543 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
3544 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
3545 if (crtl->preferred_stack_boundary < boundary) 3565 if (crtl->preferred_stack_boundary < boundary)
3546 crtl->preferred_stack_boundary = boundary; 3566 crtl->preferred_stack_boundary = boundary;
3547 3567
3548 #ifdef ARGS_GROW_DOWNWARD 3568 #ifdef ARGS_GROW_DOWNWARD
3549 locate->slot_offset.constant = -initial_offset_ptr->constant; 3569 locate->slot_offset.constant = -initial_offset_ptr->constant;
3743 { 3763 {
3744 if (TREE_CODE (decl) == VAR_DECL 3764 if (TREE_CODE (decl) == VAR_DECL
3745 && DECL_RTL_SET_P (decl) 3765 && DECL_RTL_SET_P (decl)
3746 && REG_P (DECL_RTL (decl)) 3766 && REG_P (DECL_RTL (decl))
3747 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) 3767 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3748 warning (OPT_Wclobbered, "variable %q+D might be clobbered by" 3768 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3749 " %<longjmp%> or %<vfork%>", decl); 3769 " %<longjmp%> or %<vfork%>", decl);
3750 } 3770 }
3751 3771
3752 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub)) 3772 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3753 setjmp_vars_warning (setjmp_crosses, sub); 3773 setjmp_vars_warning (setjmp_crosses, sub);
3763 for (decl = DECL_ARGUMENTS (current_function_decl); 3783 for (decl = DECL_ARGUMENTS (current_function_decl);
3764 decl; decl = TREE_CHAIN (decl)) 3784 decl; decl = TREE_CHAIN (decl))
3765 if (DECL_RTL (decl) != 0 3785 if (DECL_RTL (decl) != 0
3766 && REG_P (DECL_RTL (decl)) 3786 && REG_P (DECL_RTL (decl))
3767 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) 3787 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3768 warning (OPT_Wclobbered, 3788 warning (OPT_Wclobbered,
3769 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>", 3789 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3770 decl); 3790 decl);
3771 } 3791 }
3772 3792
3773 /* Generate warning messages for variables live across setjmp. */ 3793 /* Generate warning messages for variables live across setjmp. */
3774 3794
3775 void 3795 void
3776 generate_setjmp_warnings (void) 3796 generate_setjmp_warnings (void)
3777 { 3797 {
3778 bitmap setjmp_crosses = regstat_get_setjmp_crosses (); 3798 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3779 3799
3780 if (n_basic_blocks == NUM_FIXED_BLOCKS 3800 if (n_basic_blocks == NUM_FIXED_BLOCKS
4072 set_cfun (new_cfun); 4092 set_cfun (new_cfun);
4073 } 4093 }
4074 4094
4075 /* Return value of funcdef and increase it. */ 4095 /* Return value of funcdef and increase it. */
4076 int 4096 int
4077 get_next_funcdef_no (void) 4097 get_next_funcdef_no (void)
4078 { 4098 {
4079 return funcdef_no++; 4099 return funcdef_no++;
4080 } 4100 }
4081 4101
4082 /* Allocate a function structure for FNDECL and set its contents 4102 /* Allocate a function structure for FNDECL and set its contents
4131 cfun->stdarg 4151 cfun->stdarg
4132 = (fntype 4152 = (fntype
4133 && TYPE_ARG_TYPES (fntype) != 0 4153 && TYPE_ARG_TYPES (fntype) != 0
4134 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) 4154 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4135 != void_type_node)); 4155 != void_type_node));
4136 4156
4137 /* Assume all registers in stdarg functions need to be saved. */ 4157 /* Assume all registers in stdarg functions need to be saved. */
4138 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 4158 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4139 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; 4159 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4140 } 4160 }
4141 } 4161 }
4212 regardless of which calling convention we are using for it. */ 4232 regardless of which calling convention we are using for it. */
4213 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)))) 4233 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4214 warning (OPT_Waggregate_return, "function returns an aggregate"); 4234 warning (OPT_Waggregate_return, "function returns an aggregate");
4215 } 4235 }
4216 4236
4217 /* Make sure all values used by the optimization passes have sane 4237 /* Make sure all values used by the optimization passes have sane defaults. */
4218 defaults. */
4219 unsigned int 4238 unsigned int
4220 init_function_for_compilation (void) 4239 init_function_for_compilation (void)
4221 { 4240 {
4222 reg_renumber = 0; 4241 reg_renumber = 0;
4223
4224 /* No prologue/epilogue insns yet. Make sure that these vectors are
4225 empty. */
4226 gcc_assert (VEC_length (int, prologue) == 0);
4227 gcc_assert (VEC_length (int, epilogue) == 0);
4228 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
4229 return 0; 4242 return 0;
4230 } 4243 }
4231 4244
4232 struct rtl_opt_pass pass_init_function = 4245 struct rtl_opt_pass pass_init_function =
4233 { 4246 {
4234 { 4247 {
4235 RTL_PASS, 4248 RTL_PASS,
4236 NULL, /* name */ 4249 "*init_function", /* name */
4237 NULL, /* gate */ 4250 NULL, /* gate */
4238 init_function_for_compilation, /* execute */ 4251 init_function_for_compilation, /* execute */
4239 NULL, /* sub */ 4252 NULL, /* sub */
4240 NULL, /* next */ 4253 NULL, /* next */
4241 0, /* static_pass_number */ 4254 0, /* static_pass_number */
4242 0, /* tv_id */ 4255 TV_NONE, /* tv_id */
4243 0, /* properties_required */ 4256 0, /* properties_required */
4244 0, /* properties_provided */ 4257 0, /* properties_provided */
4245 0, /* properties_destroyed */ 4258 0, /* properties_destroyed */
4246 0, /* todo_flags_start */ 4259 0, /* todo_flags_start */
4247 0 /* todo_flags_finish */ 4260 0 /* todo_flags_finish */
4452 4465
4453 /* If function gets a static chain arg, store it. */ 4466 /* If function gets a static chain arg, store it. */
4454 if (cfun->static_chain_decl) 4467 if (cfun->static_chain_decl)
4455 { 4468 {
4456 tree parm = cfun->static_chain_decl; 4469 tree parm = cfun->static_chain_decl;
4457 rtx local = gen_reg_rtx (Pmode); 4470 rtx local, chain, insn;
4458 4471
4459 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false); 4472 local = gen_reg_rtx (Pmode);
4473 chain = targetm.calls.static_chain (current_function_decl, true);
4474
4475 set_decl_incoming_rtl (parm, chain, false);
4460 SET_DECL_RTL (parm, local); 4476 SET_DECL_RTL (parm, local);
4461 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); 4477 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4462 4478
4463 emit_move_insn (local, static_chain_incoming_rtx); 4479 insn = emit_move_insn (local, chain);
4480
4481 /* Mark the register as eliminable, similar to parameters. */
4482 if (MEM_P (chain)
4483 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4484 set_unique_reg_note (insn, REG_EQUIV, chain);
4464 } 4485 }
4465 4486
4466 /* If the function receives a non-local goto, then store the 4487 /* If the function receives a non-local goto, then store the
4467 bits we need to restore the frame pointer. */ 4488 bits we need to restore the frame pointer. */
4468 if (cfun->nonlocal_goto_save_area) 4489 if (cfun->nonlocal_goto_save_area)
4629 rtx insn, seq; 4650 rtx insn, seq;
4630 4651
4631 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 4652 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4632 if (CALL_P (insn)) 4653 if (CALL_P (insn))
4633 { 4654 {
4655 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
4634 start_sequence (); 4656 start_sequence ();
4635 probe_stack_range (STACK_OLD_CHECK_PROTECT, 4657 if (STACK_CHECK_MOVING_SP)
4636 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE)); 4658 anti_adjust_stack_and_probe (max_frame_size, true);
4659 else
4660 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
4637 seq = get_insns (); 4661 seq = get_insns ();
4638 end_sequence (); 4662 end_sequence ();
4639 emit_insn_before (seq, stack_check_probe_note); 4663 emit_insn_before (seq, stack_check_probe_note);
4640 break; 4664 break;
4641 } 4665 }
4726 we may need to re-do the PROMOTE_MODE signed/unsigned 4750 we may need to re-do the PROMOTE_MODE signed/unsigned
4727 extension. */ 4751 extension. */
4728 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl)) 4752 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4729 { 4753 {
4730 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result)); 4754 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4731 4755 promote_function_mode (TREE_TYPE (decl_result),
4732 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl))) 4756 GET_MODE (decl_rtl), &unsignedp,
4733 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl), 4757 TREE_TYPE (current_function_decl), 1);
4734 &unsignedp, 1);
4735 4758
4736 convert_move (real_decl_rtl, decl_rtl, unsignedp); 4759 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4737 } 4760 }
4738 else if (GET_CODE (real_decl_rtl) == PARALLEL) 4761 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4739 { 4762 {
4810 { 4833 {
4811 rtx seq; 4834 rtx seq;
4812 4835
4813 start_sequence (); 4836 start_sequence ();
4814 clobber_return_register (); 4837 clobber_return_register ();
4815 expand_naked_return ();
4816 seq = get_insns (); 4838 seq = get_insns ();
4817 end_sequence (); 4839 end_sequence ();
4818 4840
4819 emit_insn_after (seq, clobber_after); 4841 emit_insn_after (seq, clobber_after);
4820 } 4842 }
4821 4843
4822 /* Output the label for the naked return from the function. */ 4844 /* Output the label for the naked return from the function. */
4823 emit_label (naked_return_label); 4845 if (naked_return_label)
4846 emit_label (naked_return_label);
4824 4847
4825 /* @@@ This is a kludge. We want to ensure that instructions that 4848 /* @@@ This is a kludge. We want to ensure that instructions that
4826 may trap are not moved into the epilogue by scheduling, because 4849 may trap are not moved into the epilogue by scheduling, because
4827 we don't always emit unwind information for the epilogue. */ 4850 we don't always emit unwind information for the epilogue. */
4828 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions) 4851 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4881 } 4904 }
4882 4905
4883 return ret; 4906 return ret;
4884 } 4907 }
4885 4908
4886 /* Extend a vector that records the INSN_UIDs of INSNS 4909 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
4887 (a list of one or more insns). */ 4910 for the first time. */
4888 4911
4889 static void 4912 static void
4890 record_insns (rtx insns, VEC(int,heap) **vecp) 4913 record_insns (rtx insns, rtx end, htab_t *hashp)
4891 { 4914 {
4892 rtx tmp; 4915 rtx tmp;
4893 4916 htab_t hash = *hashp;
4894 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp)) 4917
4895 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp)); 4918 if (hash == NULL)
4919 *hashp = hash
4920 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
4921
4922 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
4923 {
4924 void **slot = htab_find_slot (hash, tmp, INSERT);
4925 gcc_assert (*slot == NULL);
4926 *slot = tmp;
4927 }
4928 }
4929
4930 /* INSN has been duplicated as COPY, as part of duping a basic block.
4931 If INSN is an epilogue insn, then record COPY as epilogue as well. */
4932
4933 void
4934 maybe_copy_epilogue_insn (rtx insn, rtx copy)
4935 {
4936 void **slot;
4937
4938 if (epilogue_insn_hash == NULL
4939 || htab_find (epilogue_insn_hash, insn) == NULL)
4940 return;
4941
4942 slot = htab_find_slot (epilogue_insn_hash, copy, INSERT);
4943 gcc_assert (*slot == NULL);
4944 *slot = copy;
4896 } 4945 }
4897 4946
4898 /* Set the locator of the insn chain starting at INSN to LOC. */ 4947 /* Set the locator of the insn chain starting at INSN to LOC. */
4899 static void 4948 static void
4900 set_insn_locators (rtx insn, int loc) 4949 set_insn_locators (rtx insn, int loc)
4905 INSN_LOCATOR (insn) = loc; 4954 INSN_LOCATOR (insn) = loc;
4906 insn = NEXT_INSN (insn); 4955 insn = NEXT_INSN (insn);
4907 } 4956 }
4908 } 4957 }
4909 4958
4910 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can 4959 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
4911 be running after reorg, SEQUENCE rtl is possible. */ 4960 we can be running after reorg, SEQUENCE rtl is possible. */
4912 4961
4913 static int 4962 static bool
4914 contains (const_rtx insn, VEC(int,heap) **vec) 4963 contains (const_rtx insn, htab_t hash)
4915 { 4964 {
4916 int i, j; 4965 if (hash == NULL)
4917 4966 return false;
4918 if (NONJUMP_INSN_P (insn) 4967
4919 && GET_CODE (PATTERN (insn)) == SEQUENCE) 4968 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4920 { 4969 {
4921 int count = 0; 4970 int i;
4922 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) 4971 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4923 for (j = VEC_length (int, *vec) - 1; j >= 0; --j) 4972 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
4924 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) 4973 return true;
4925 == VEC_index (int, *vec, j)) 4974 return false;
4926 count++; 4975 }
4927 return count; 4976
4928 } 4977 return htab_find (hash, insn) != NULL;
4929 else
4930 {
4931 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4932 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4933 return 1;
4934 }
4935 return 0;
4936 } 4978 }
4937 4979
4938 int 4980 int
4939 prologue_epilogue_contains (const_rtx insn) 4981 prologue_epilogue_contains (const_rtx insn)
4940 { 4982 {
4941 if (contains (insn, &prologue)) 4983 if (contains (insn, prologue_insn_hash))
4942 return 1; 4984 return 1;
4943 if (contains (insn, &epilogue)) 4985 if (contains (insn, epilogue_insn_hash))
4944 return 1; 4986 return 1;
4945 return 0;
4946 }
4947
4948 int
4949 sibcall_epilogue_contains (const_rtx insn)
4950 {
4951 if (sibcall_epilogue)
4952 return contains (insn, &sibcall_epilogue);
4953 return 0; 4987 return 0;
4954 } 4988 }
4955 4989
4956 #ifdef HAVE_return 4990 #ifdef HAVE_return
4957 /* Insert gen_return at the end of block BB. This also means updating 4991 /* Insert gen_return at the end of block BB. This also means updating
4987 { 5021 {
4988 start_sequence (); 5022 start_sequence ();
4989 seq = gen_prologue (); 5023 seq = gen_prologue ();
4990 emit_insn (seq); 5024 emit_insn (seq);
4991 5025
4992 /* Insert an explicit USE for the frame pointer 5026 /* Insert an explicit USE for the frame pointer
4993 if the profiling is on and the frame pointer is required. */ 5027 if the profiling is on and the frame pointer is required. */
4994 if (crtl->profile && frame_pointer_needed) 5028 if (crtl->profile && frame_pointer_needed)
4995 emit_use (hard_frame_pointer_rtx); 5029 emit_use (hard_frame_pointer_rtx);
4996 5030
4997 /* Retain a map of the prologue insns. */ 5031 /* Retain a map of the prologue insns. */
4998 record_insns (seq, &prologue); 5032 record_insns (seq, NULL, &prologue_insn_hash);
4999 emit_note (NOTE_INSN_PROLOGUE_END); 5033 emit_note (NOTE_INSN_PROLOGUE_END);
5000 5034
5001 #ifndef PROFILE_BEFORE_PROLOGUE 5035 #ifndef PROFILE_BEFORE_PROLOGUE
5002 /* Ensure that instructions are not moved into the prologue when 5036 /* Ensure that instructions are not moved into the prologue when
5003 profiling is on. The call to the profiling routine can be 5037 profiling is on. The call to the profiling routine can be
5004 emitted within the live range of a call-clobbered register. */ 5038 emitted within the live range of a call-clobbered register. */
5005 if (crtl->profile) 5039 if (crtl->profile)
5125 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU; 5159 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5126 goto epilogue_done; 5160 goto epilogue_done;
5127 } 5161 }
5128 } 5162 }
5129 #endif 5163 #endif
5164
5165 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5166 this marker for the splits of EH_RETURN patterns, and nothing else
5167 uses the flag in the meantime. */
5168 epilogue_completed = 1;
5169
5170 #ifdef HAVE_eh_return
5171 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5172 some targets, these get split to a special version of the epilogue
5173 code. In order to be able to properly annotate these with unwind
5174 info, try to split them now. If we get a valid split, drop an
5175 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5176 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5177 {
5178 rtx prev, last, trial;
5179
5180 if (e->flags & EDGE_FALLTHRU)
5181 continue;
5182 last = BB_END (e->src);
5183 if (!eh_returnjump_p (last))
5184 continue;
5185
5186 prev = PREV_INSN (last);
5187 trial = try_split (PATTERN (last), last, 1);
5188 if (trial == last)
5189 continue;
5190
5191 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5192 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5193 }
5194 #endif
5195
5130 /* Find the edge that falls through to EXIT. Other edges may exist 5196 /* Find the edge that falls through to EXIT. Other edges may exist
5131 due to RETURN instructions, but those don't need epilogues. 5197 due to RETURN instructions, but those don't need epilogues.
5132 There really shouldn't be a mixture -- either all should have 5198 There really shouldn't be a mixture -- either all should have
5133 been converted or none, however... */ 5199 been converted or none, however... */
5134 5200
5145 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG); 5211 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5146 seq = gen_epilogue (); 5212 seq = gen_epilogue ();
5147 emit_jump_insn (seq); 5213 emit_jump_insn (seq);
5148 5214
5149 /* Retain a map of the epilogue insns. */ 5215 /* Retain a map of the epilogue insns. */
5150 record_insns (seq, &epilogue); 5216 record_insns (seq, NULL, &epilogue_insn_hash);
5151 set_insn_locators (seq, epilogue_locator); 5217 set_insn_locators (seq, epilogue_locator);
5152 5218
5153 seq = get_insns (); 5219 seq = get_insns ();
5154 end_sequence (); 5220 end_sequence ();
5155 5221
5207 ei_next (&ei); 5273 ei_next (&ei);
5208 continue; 5274 continue;
5209 } 5275 }
5210 5276
5211 start_sequence (); 5277 start_sequence ();
5278 emit_note (NOTE_INSN_EPILOGUE_BEG);
5212 emit_insn (gen_sibcall_epilogue ()); 5279 emit_insn (gen_sibcall_epilogue ());
5213 seq = get_insns (); 5280 seq = get_insns ();
5214 end_sequence (); 5281 end_sequence ();
5215 5282
5216 /* Retain a map of the epilogue insns. Used in life analysis to 5283 /* Retain a map of the epilogue insns. Used in life analysis to
5217 avoid getting rid of sibcall epilogue insns. Do this before we 5284 avoid getting rid of sibcall epilogue insns. Do this before we
5218 actually emit the sequence. */ 5285 actually emit the sequence. */
5219 record_insns (seq, &sibcall_epilogue); 5286 record_insns (seq, NULL, &epilogue_insn_hash);
5220 set_insn_locators (seq, epilogue_locator); 5287 set_insn_locators (seq, epilogue_locator);
5221 5288
5222 emit_insn_before (seq, insn); 5289 emit_insn_before (seq, insn);
5223 ei_next (&ei); 5290 ei_next (&ei);
5224 } 5291 }
5235 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug 5302 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5236 info generation. */ 5303 info generation. */
5237 for (insn = epilogue_end; insn; insn = next) 5304 for (insn = epilogue_end; insn; insn = next)
5238 { 5305 {
5239 next = NEXT_INSN (insn); 5306 next = NEXT_INSN (insn);
5240 if (NOTE_P (insn) 5307 if (NOTE_P (insn)
5241 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)) 5308 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5242 reorder_insns (insn, insn, PREV_INSN (epilogue_end)); 5309 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5243 } 5310 }
5244 } 5311 }
5245 #endif 5312 #endif
5248 in the entry and exit blocks. */ 5315 in the entry and exit blocks. */
5249 epilogue_completed = 1; 5316 epilogue_completed = 1;
5250 df_update_entry_exit_and_calls (); 5317 df_update_entry_exit_and_calls ();
5251 } 5318 }
5252 5319
5253 /* Reposition the prologue-end and epilogue-begin notes after instruction 5320 /* Reposition the prologue-end and epilogue-begin notes after
5254 scheduling and delayed branch scheduling. */ 5321 instruction scheduling. */
5255 5322
5256 void 5323 void
5257 reposition_prologue_and_epilogue_notes (void) 5324 reposition_prologue_and_epilogue_notes (void)
5258 { 5325 {
5259 #if defined (HAVE_prologue) || defined (HAVE_epilogue) 5326 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5260 rtx insn, last, note; 5327 || defined (HAVE_sibcall_epilogue)
5261 int len; 5328 /* Since the hash table is created on demand, the fact that it is
5262 5329 non-null is a signal that it is non-empty. */
5263 if ((len = VEC_length (int, prologue)) > 0) 5330 if (prologue_insn_hash != NULL)
5264 { 5331 {
5265 last = 0, note = 0; 5332 size_t len = htab_elements (prologue_insn_hash);
5266 5333 rtx insn, last = NULL, note = NULL;
5267 /* Scan from the beginning until we reach the last prologue insn. 5334
5268 We apparently can't depend on basic_block_{head,end} after 5335 /* Scan from the beginning until we reach the last prologue insn. */
5269 reorg has run. */ 5336 /* ??? While we do have the CFG intact, there are two problems:
5337 (1) The prologue can contain loops (typically probing the stack),
5338 which means that the end of the prologue isn't in the first bb.
5339 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
5270 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 5340 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5271 { 5341 {
5272 if (NOTE_P (insn)) 5342 if (NOTE_P (insn))
5273 { 5343 {
5274 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END) 5344 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5275 note = insn; 5345 note = insn;
5276 } 5346 }
5277 else if (contains (insn, &prologue)) 5347 else if (contains (insn, prologue_insn_hash))
5278 { 5348 {
5279 last = insn; 5349 last = insn;
5280 if (--len == 0) 5350 if (--len == 0)
5281 break; 5351 break;
5282 } 5352 }
5283 } 5353 }
5284 5354
5285 if (last) 5355 if (last)
5286 { 5356 {
5287 /* Find the prologue-end note if we haven't already, and 5357 if (note == NULL)
5288 move it to just after the last prologue insn. */
5289 if (note == 0)
5290 { 5358 {
5291 for (note = last; (note = NEXT_INSN (note));) 5359 /* Scan forward looking for the PROLOGUE_END note. It should
5292 if (NOTE_P (note) 5360 be right at the beginning of the block, possibly with other
5293 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END) 5361 insn notes that got moved there. */
5294 break; 5362 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
5363 {
5364 if (NOTE_P (note)
5365 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5366 break;
5367 }
5295 } 5368 }
5296 5369
5297 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */ 5370 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5298 if (LABEL_P (last)) 5371 if (LABEL_P (last))
5299 last = NEXT_INSN (last); 5372 last = NEXT_INSN (last);
5300 reorder_insns (note, note, last); 5373 reorder_insns (note, note, last);
5301 } 5374 }
5302 } 5375 }
5303 5376
5304 if ((len = VEC_length (int, epilogue)) > 0) 5377 if (epilogue_insn_hash != NULL)
5305 { 5378 {
5306 last = 0, note = 0; 5379 edge_iterator ei;
5307 5380 edge e;
5308 /* Scan from the end until we reach the first epilogue insn. 5381
5309 We apparently can't depend on basic_block_{head,end} after 5382 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5310 reorg has run. */ 5383 {
5311 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) 5384 rtx insn, first = NULL, note = NULL;
5312 { 5385 basic_block bb = e->src;
5313 if (NOTE_P (insn)) 5386
5387 /* Scan from the beginning until we reach the first epilogue insn. */
5388 FOR_BB_INSNS (bb, insn)
5314 { 5389 {
5315 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) 5390 if (NOTE_P (insn))
5316 note = insn; 5391 {
5392 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5393 {
5394 note = insn;
5395 if (first != NULL)
5396 break;
5397 }
5398 }
5399 else if (first == NULL && contains (insn, epilogue_insn_hash))
5400 {
5401 first = insn;
5402 if (note != NULL)
5403 break;
5404 }
5317 } 5405 }
5318 else if (contains (insn, &epilogue)) 5406
5407 if (note)
5319 { 5408 {
5320 last = insn; 5409 /* If the function has a single basic block, and no real
5321 if (--len == 0) 5410 epilogue insns (e.g. sibcall with no cleanup), the
5322 break; 5411 epilogue note can get scheduled before the prologue
5412 note. If we have frame related prologue insns, having
5413 them scanned during the epilogue will result in a crash.
5414 In this case re-order the epilogue note to just before
5415 the last insn in the block. */
5416 if (first == NULL)
5417 first = BB_END (bb);
5418
5419 if (PREV_INSN (first) != note)
5420 reorder_insns (note, note, PREV_INSN (first));
5323 } 5421 }
5324 }
5325
5326 if (last)
5327 {
5328 /* Find the epilogue-begin note if we haven't already, and
5329 move it to just before the first epilogue insn. */
5330 if (note == 0)
5331 {
5332 for (note = insn; (note = PREV_INSN (note));)
5333 if (NOTE_P (note)
5334 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5335 break;
5336 }
5337
5338 if (PREV_INSN (last) != note)
5339 reorder_insns (note, note, PREV_INSN (last));
5340 } 5422 }
5341 } 5423 }
5342 #endif /* HAVE_prologue or HAVE_epilogue */ 5424 #endif /* HAVE_prologue or HAVE_epilogue */
5343 } 5425 }
5344 5426
5345 /* Returns the name of the current function. */ 5427 /* Returns the name of the current function. */
5346 const char * 5428 const char *
5347 current_function_name (void) 5429 current_function_name (void)
5348 { 5430 {
5431 if (cfun == NULL)
5432 return "<none>";
5349 return lang_hooks.decl_printable_name (cfun->decl, 2); 5433 return lang_hooks.decl_printable_name (cfun->decl, 2);
5350 }
5351
5352 /* Returns the raw (mangled) name of the current function. */
5353 const char *
5354 current_function_assembler_name (void)
5355 {
5356 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5357 } 5434 }
5358 5435
5359 5436
5360 static unsigned int 5437 static unsigned int
5361 rest_of_handle_check_leaf_regs (void) 5438 rest_of_handle_check_leaf_regs (void)
5472 5549
5473 struct rtl_opt_pass pass_leaf_regs = 5550 struct rtl_opt_pass pass_leaf_regs =
5474 { 5551 {
5475 { 5552 {
5476 RTL_PASS, 5553 RTL_PASS,
5477 NULL, /* name */ 5554 "*leaf_regs", /* name */
5478 NULL, /* gate */ 5555 NULL, /* gate */
5479 rest_of_handle_check_leaf_regs, /* execute */ 5556 rest_of_handle_check_leaf_regs, /* execute */
5480 NULL, /* sub */ 5557 NULL, /* sub */
5481 NULL, /* next */ 5558 NULL, /* next */
5482 0, /* static_pass_number */ 5559 0, /* static_pass_number */
5483 0, /* tv_id */ 5560 TV_NONE, /* tv_id */
5484 0, /* properties_required */ 5561 0, /* properties_required */
5485 0, /* properties_provided */ 5562 0, /* properties_provided */
5486 0, /* properties_destroyed */ 5563 0, /* properties_destroyed */
5487 0, /* todo_flags_start */ 5564 0, /* todo_flags_start */
5488 0 /* todo_flags_finish */ 5565 0 /* todo_flags_finish */
5525 } 5602 }
5526 }; 5603 };
5527 5604
5528 5605
5529 /* This mini-pass fixes fall-out from SSA in asm statements that have 5606 /* This mini-pass fixes fall-out from SSA in asm statements that have
5530 in-out constraints. Say you start with 5607 in-out constraints. Say you start with
5531 5608
5532 orig = inout; 5609 orig = inout;
5533 asm ("": "+mr" (inout)); 5610 asm ("": "+mr" (inout));
5534 use (orig); 5611 use (orig);
5535 5612
5713 NULL, /* gate */ 5790 NULL, /* gate */
5714 rest_of_match_asm_constraints, /* execute */ 5791 rest_of_match_asm_constraints, /* execute */
5715 NULL, /* sub */ 5792 NULL, /* sub */
5716 NULL, /* next */ 5793 NULL, /* next */
5717 0, /* static_pass_number */ 5794 0, /* static_pass_number */
5718 0, /* tv_id */ 5795 TV_NONE, /* tv_id */
5719 0, /* properties_required */ 5796 0, /* properties_required */
5720 0, /* properties_provided */ 5797 0, /* properties_provided */
5721 0, /* properties_destroyed */ 5798 0, /* properties_destroyed */
5722 0, /* todo_flags_start */ 5799 0, /* todo_flags_start */
5723 TODO_dump_func /* todo_flags_finish */ 5800 TODO_dump_func /* todo_flags_finish */