Mercurial > hg > CbC > CbC_gcc
comparison gcc/cfgexpand.c @ 57:326d9e06c2e3
modify c-parser.c
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Mon, 15 Feb 2010 00:54:17 +0900 |
parents | 9907f3135723 77e2b8dfacca |
children | 1b10fe6932e1 |
comparison
equal
deleted
inserted
replaced
54:f62c169bbc24 | 57:326d9e06c2e3 |
---|---|
1 /* A pass for lowering trees to RTL. | 1 /* A pass for lowering trees to RTL. |
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010 | 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009 |
3 Free Software Foundation, Inc. | 3 Free Software Foundation, Inc. |
4 | 4 |
5 This file is part of GCC. | 5 This file is part of GCC. |
6 | 6 |
7 GCC is free software; you can redistribute it and/or modify | 7 GCC is free software; you can redistribute it and/or modify |
43 #include "value-prof.h" | 43 #include "value-prof.h" |
44 #include "target.h" | 44 #include "target.h" |
45 #ifndef noCbC | 45 #ifndef noCbC |
46 #include "cbc-tree.h" | 46 #include "cbc-tree.h" |
47 #endif | 47 #endif |
48 | 48 #include "ssaexpand.h" |
49 | |
50 | |
51 /* This variable holds information helping the rewriting of SSA trees | |
52 into RTL. */ | |
53 struct ssaexpand SA; | |
54 | |
55 /* This variable holds the currently expanded gimple statement for purposes | |
56 of comminucating the profile info to the builtin expanders. */ | |
57 gimple currently_expanding_gimple_stmt; | |
49 | 58 |
50 /* Return an expression tree corresponding to the RHS of GIMPLE | 59 /* Return an expression tree corresponding to the RHS of GIMPLE |
51 statement STMT. */ | 60 statement STMT. */ |
52 | 61 |
53 tree | 62 tree |
54 gimple_assign_rhs_to_tree (gimple stmt) | 63 gimple_assign_rhs_to_tree (gimple stmt) |
55 { | 64 { |
56 tree t; | 65 tree t; |
57 enum gimple_rhs_class grhs_class; | 66 enum gimple_rhs_class grhs_class; |
58 | 67 |
59 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt)); | 68 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt)); |
60 | 69 |
61 if (grhs_class == GIMPLE_BINARY_RHS) | 70 if (grhs_class == GIMPLE_BINARY_RHS) |
62 t = build2 (gimple_assign_rhs_code (stmt), | 71 t = build2 (gimple_assign_rhs_code (stmt), |
63 TREE_TYPE (gimple_assign_lhs (stmt)), | 72 TREE_TYPE (gimple_assign_lhs (stmt)), |
64 gimple_assign_rhs1 (stmt), | 73 gimple_assign_rhs1 (stmt), |
65 gimple_assign_rhs2 (stmt)); | 74 gimple_assign_rhs2 (stmt)); |
66 else if (grhs_class == GIMPLE_UNARY_RHS) | 75 else if (grhs_class == GIMPLE_UNARY_RHS) |
67 t = build1 (gimple_assign_rhs_code (stmt), | 76 t = build1 (gimple_assign_rhs_code (stmt), |
68 TREE_TYPE (gimple_assign_lhs (stmt)), | 77 TREE_TYPE (gimple_assign_lhs (stmt)), |
69 gimple_assign_rhs1 (stmt)); | 78 gimple_assign_rhs1 (stmt)); |
70 else if (grhs_class == GIMPLE_SINGLE_RHS) | 79 else if (grhs_class == GIMPLE_SINGLE_RHS) |
71 t = gimple_assign_rhs1 (stmt); | 80 { |
81 t = gimple_assign_rhs1 (stmt); | |
82 /* Avoid modifying this tree in place below. */ | |
83 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) | |
84 && gimple_location (stmt) != EXPR_LOCATION (t)) | |
85 t = copy_node (t); | |
86 } | |
72 else | 87 else |
73 gcc_unreachable (); | 88 gcc_unreachable (); |
74 | 89 |
75 return t; | 90 return t; |
76 } | |
77 | |
78 /* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND | |
79 statement STMT. */ | |
80 | |
81 static tree | |
82 gimple_cond_pred_to_tree (gimple stmt) | |
83 { | |
84 return build2 (gimple_cond_code (stmt), boolean_type_node, | |
85 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt)); | |
86 } | |
87 | |
88 /* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression | |
89 inside *TP. DATA is the location to set. */ | |
90 | |
91 static tree | |
92 set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data) | |
93 { | |
94 location_t *loc = (location_t *) data; | |
95 if (EXPR_P (*tp)) | |
96 SET_EXPR_LOCATION (*tp, *loc); | |
97 | |
98 return NULL_TREE; | |
99 } | |
100 | |
101 | |
102 /* RTL expansion has traditionally been done on trees, so the | |
103 transition to doing it on GIMPLE tuples is very invasive to the RTL | |
104 expander. To facilitate the transition, this function takes a | |
105 GIMPLE tuple STMT and returns the same statement in the form of a | |
106 tree. */ | |
107 | |
108 static tree | |
109 gimple_to_tree (gimple stmt) | |
110 { | |
111 tree t; | |
112 int rn; | |
113 tree_ann_common_t ann; | |
114 location_t loc; | |
115 | |
116 switch (gimple_code (stmt)) | |
117 { | |
118 case GIMPLE_ASSIGN: | |
119 { | |
120 tree lhs = gimple_assign_lhs (stmt); | |
121 | |
122 t = gimple_assign_rhs_to_tree (stmt); | |
123 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t); | |
124 if (gimple_assign_nontemporal_move_p (stmt)) | |
125 MOVE_NONTEMPORAL (t) = true; | |
126 } | |
127 break; | |
128 | |
129 case GIMPLE_COND: | |
130 t = gimple_cond_pred_to_tree (stmt); | |
131 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE); | |
132 break; | |
133 | |
134 case GIMPLE_GOTO: | |
135 t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt)); | |
136 break; | |
137 | |
138 case GIMPLE_LABEL: | |
139 t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt)); | |
140 break; | |
141 | |
142 case GIMPLE_RETURN: | |
143 { | |
144 tree retval = gimple_return_retval (stmt); | |
145 | |
146 if (retval && retval != error_mark_node) | |
147 { | |
148 tree result = DECL_RESULT (current_function_decl); | |
149 | |
150 /* If we are not returning the current function's RESULT_DECL, | |
151 build an assignment to it. */ | |
152 if (retval != result) | |
153 { | |
154 /* I believe that a function's RESULT_DECL is unique. */ | |
155 gcc_assert (TREE_CODE (retval) != RESULT_DECL); | |
156 | |
157 retval = build2 (MODIFY_EXPR, TREE_TYPE (result), | |
158 result, retval); | |
159 } | |
160 } | |
161 t = build1 (RETURN_EXPR, void_type_node, retval); | |
162 } | |
163 break; | |
164 | |
165 case GIMPLE_ASM: | |
166 { | |
167 size_t i, n; | |
168 tree out, in, cl; | |
169 const char *s; | |
170 | |
171 out = NULL_TREE; | |
172 n = gimple_asm_noutputs (stmt); | |
173 if (n > 0) | |
174 { | |
175 t = out = gimple_asm_output_op (stmt, 0); | |
176 for (i = 1; i < n; i++) | |
177 { | |
178 TREE_CHAIN (t) = gimple_asm_output_op (stmt, i); | |
179 t = gimple_asm_output_op (stmt, i); | |
180 } | |
181 } | |
182 | |
183 in = NULL_TREE; | |
184 n = gimple_asm_ninputs (stmt); | |
185 if (n > 0) | |
186 { | |
187 t = in = gimple_asm_input_op (stmt, 0); | |
188 for (i = 1; i < n; i++) | |
189 { | |
190 TREE_CHAIN (t) = gimple_asm_input_op (stmt, i); | |
191 t = gimple_asm_input_op (stmt, i); | |
192 } | |
193 } | |
194 | |
195 cl = NULL_TREE; | |
196 n = gimple_asm_nclobbers (stmt); | |
197 if (n > 0) | |
198 { | |
199 t = cl = gimple_asm_clobber_op (stmt, 0); | |
200 for (i = 1; i < n; i++) | |
201 { | |
202 TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i); | |
203 t = gimple_asm_clobber_op (stmt, i); | |
204 } | |
205 } | |
206 | |
207 s = gimple_asm_string (stmt); | |
208 t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s), | |
209 out, in, cl); | |
210 ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt); | |
211 ASM_INPUT_P (t) = gimple_asm_input_p (stmt); | |
212 } | |
213 break; | |
214 | |
215 case GIMPLE_CALL: | |
216 { | |
217 size_t i; | |
218 tree fn; | |
219 tree_ann_common_t ann; | |
220 | |
221 t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); | |
222 | |
223 CALL_EXPR_FN (t) = gimple_call_fn (stmt); | |
224 TREE_TYPE (t) = gimple_call_return_type (stmt); | |
225 CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt); | |
226 | |
227 for (i = 0; i < gimple_call_num_args (stmt); i++) | |
228 CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i); | |
229 | |
230 if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))) | |
231 TREE_SIDE_EFFECTS (t) = 1; | |
232 | |
233 if (gimple_call_flags (stmt) & ECF_NOTHROW) | |
234 TREE_NOTHROW (t) = 1; | |
235 | |
236 CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt); | |
237 CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt); | |
238 CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt); | |
239 CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt); | |
240 CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt); | |
241 #ifndef noCbC | |
242 CALL_EXPR_CbC_GOTO (t) = gimple_call_cbc_goto_p (stmt); | |
243 #endif | |
244 | |
245 /* If the call has a LHS then create a MODIFY_EXPR to hold it. */ | |
246 { | |
247 tree lhs = gimple_call_lhs (stmt); | |
248 | |
249 if (lhs) | |
250 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t); | |
251 } | |
252 | |
253 /* Record the original call statement, as it may be used | |
254 to retrieve profile information during expansion. */ | |
255 | |
256 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE | |
257 && DECL_BUILT_IN (fn)) | |
258 { | |
259 ann = get_tree_common_ann (t); | |
260 ann->stmt = stmt; | |
261 } | |
262 } | |
263 break; | |
264 | |
265 case GIMPLE_SWITCH: | |
266 { | |
267 tree label_vec; | |
268 size_t i; | |
269 tree elt = gimple_switch_label (stmt, 0); | |
270 | |
271 label_vec = make_tree_vec (gimple_switch_num_labels (stmt)); | |
272 | |
273 if (!CASE_LOW (elt) && !CASE_HIGH (elt)) | |
274 { | |
275 for (i = 1; i < gimple_switch_num_labels (stmt); i++) | |
276 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i); | |
277 | |
278 /* The default case in a SWITCH_EXPR must be at the end of | |
279 the label vector. */ | |
280 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0); | |
281 } | |
282 else | |
283 { | |
284 for (i = 0; i < gimple_switch_num_labels (stmt); i++) | |
285 TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i); | |
286 } | |
287 | |
288 t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt), | |
289 NULL, label_vec); | |
290 } | |
291 break; | |
292 | |
293 case GIMPLE_NOP: | |
294 case GIMPLE_PREDICT: | |
295 t = build1 (NOP_EXPR, void_type_node, size_zero_node); | |
296 break; | |
297 | |
298 case GIMPLE_RESX: | |
299 t = build_resx (gimple_resx_region (stmt)); | |
300 break; | |
301 | |
302 default: | |
303 if (errorcount == 0) | |
304 { | |
305 error ("Unrecognized GIMPLE statement during RTL expansion"); | |
306 print_gimple_stmt (stderr, stmt, 4, 0); | |
307 gcc_unreachable (); | |
308 } | |
309 else | |
310 { | |
311 /* Ignore any bad gimple codes if we're going to die anyhow, | |
312 so we can at least set TREE_ASM_WRITTEN and have the rest | |
313 of compilation advance without sudden ICE death. */ | |
314 t = build1 (NOP_EXPR, void_type_node, size_zero_node); | |
315 break; | |
316 } | |
317 } | |
318 | |
319 /* If STMT is inside an exception region, record it in the generated | |
320 expression. */ | |
321 rn = lookup_stmt_eh_region (stmt); | |
322 if (rn >= 0) | |
323 { | |
324 tree call = get_call_expr_in (t); | |
325 | |
326 ann = get_tree_common_ann (t); | |
327 ann->rn = rn; | |
328 | |
329 /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up | |
330 the CALL_EXPR not the assignment statment for EH region number. */ | |
331 if (call && call != t) | |
332 { | |
333 ann = get_tree_common_ann (call); | |
334 ann->rn = rn; | |
335 } | |
336 } | |
337 | |
338 /* Set EXPR_LOCATION in all the embedded expressions. */ | |
339 loc = gimple_location (stmt); | |
340 walk_tree (&t, set_expr_location_r, (void *) &loc, NULL); | |
341 | |
342 TREE_BLOCK (t) = gimple_block (stmt); | |
343 | |
344 return t; | |
345 } | |
346 | |
347 | |
348 /* Release back to GC memory allocated by gimple_to_tree. */ | |
349 | |
350 static void | |
351 release_stmt_tree (gimple stmt, tree stmt_tree) | |
352 { | |
353 tree_ann_common_t ann; | |
354 | |
355 switch (gimple_code (stmt)) | |
356 { | |
357 case GIMPLE_ASSIGN: | |
358 if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS) | |
359 ggc_free (TREE_OPERAND (stmt_tree, 1)); | |
360 break; | |
361 case GIMPLE_COND: | |
362 ggc_free (COND_EXPR_COND (stmt_tree)); | |
363 break; | |
364 case GIMPLE_RETURN: | |
365 if (TREE_OPERAND (stmt_tree, 0) | |
366 && TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR) | |
367 ggc_free (TREE_OPERAND (stmt_tree, 0)); | |
368 break; | |
369 case GIMPLE_CALL: | |
370 if (gimple_call_lhs (stmt)) | |
371 { | |
372 ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1)); | |
373 if (ann) | |
374 ggc_free (ann); | |
375 ggc_free (TREE_OPERAND (stmt_tree, 1)); | |
376 } | |
377 break; | |
378 default: | |
379 break; | |
380 } | |
381 ann = tree_common_ann (stmt_tree); | |
382 if (ann) | |
383 ggc_free (ann); | |
384 ggc_free (stmt_tree); | |
385 } | 91 } |
386 | 92 |
387 | 93 |
388 /* Verify that there is exactly single jump instruction since last and attach | 94 /* Verify that there is exactly single jump instruction since last and attach |
389 REG_BR_PROB note specifying probability. | 95 REG_BR_PROB note specifying probability. |
396 if (profile_status == PROFILE_ABSENT) | 102 if (profile_status == PROFILE_ABSENT) |
397 return; | 103 return; |
398 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last)) | 104 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last)) |
399 if (JUMP_P (last)) | 105 if (JUMP_P (last)) |
400 { | 106 { |
401 /* It is common to emit condjump-around-jump sequence when we don't know | 107 /* It is common to emit condjump-around-jump sequence when we don't know |
402 how to reverse the conditional. Special case this. */ | 108 how to reverse the conditional. Special case this. */ |
403 if (!any_condjump_p (last) | 109 if (!any_condjump_p (last) |
404 || !JUMP_P (NEXT_INSN (last)) | 110 || !JUMP_P (NEXT_INSN (last)) |
405 || !simplejump_p (NEXT_INSN (last)) | 111 || !simplejump_p (NEXT_INSN (last)) |
406 || !NEXT_INSN (NEXT_INSN (last)) | 112 || !NEXT_INSN (NEXT_INSN (last)) |
407 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last))) | 113 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last))) |
408 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last))) | 114 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last))) |
409 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))) | 115 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))) |
410 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))) | 116 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))) |
411 goto failed; | 117 goto failed; |
412 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); | 118 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); |
413 add_reg_note (last, REG_BR_PROB, | 119 add_reg_note (last, REG_BR_PROB, |
414 GEN_INT (REG_BR_PROB_BASE - probability)); | 120 GEN_INT (REG_BR_PROB_BASE - probability)); |
415 return; | 121 return; |
416 } | 122 } |
417 if (!last || !JUMP_P (last) || !any_condjump_p (last)) | 123 if (!last || !JUMP_P (last) || !any_condjump_p (last)) |
418 goto failed; | 124 goto failed; |
419 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); | 125 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); |
420 add_reg_note (last, REG_BR_PROB, GEN_INT (probability)); | 126 add_reg_note (last, REG_BR_PROB, GEN_INT (probability)); |
427 | 133 |
428 #ifndef STACK_ALIGNMENT_NEEDED | 134 #ifndef STACK_ALIGNMENT_NEEDED |
429 #define STACK_ALIGNMENT_NEEDED 1 | 135 #define STACK_ALIGNMENT_NEEDED 1 |
430 #endif | 136 #endif |
431 | 137 |
138 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x) | |
139 | |
140 /* Associate declaration T with storage space X. If T is no | |
141 SSA name this is exactly SET_DECL_RTL, otherwise make the | |
142 partition of T associated with X. */ | |
143 static inline void | |
144 set_rtl (tree t, rtx x) | |
145 { | |
146 if (TREE_CODE (t) == SSA_NAME) | |
147 { | |
148 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x; | |
149 if (x && !MEM_P (x)) | |
150 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x); | |
151 /* For the benefit of debug information at -O0 (where vartracking | |
152 doesn't run) record the place also in the base DECL if it's | |
153 a normal variable (not a parameter). */ | |
154 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL) | |
155 { | |
156 tree var = SSA_NAME_VAR (t); | |
157 /* If we don't yet have something recorded, just record it now. */ | |
158 if (!DECL_RTL_SET_P (var)) | |
159 SET_DECL_RTL (var, x); | |
160 /* If we have it set alrady to "multiple places" don't | |
161 change this. */ | |
162 else if (DECL_RTL (var) == pc_rtx) | |
163 ; | |
164 /* If we have something recorded and it's not the same place | |
165 as we want to record now, we have multiple partitions for the | |
166 same base variable, with different places. We can't just | |
167 randomly chose one, hence we have to say that we don't know. | |
168 This only happens with optimization, and there var-tracking | |
169 will figure out the right thing. */ | |
170 else if (DECL_RTL (var) != x) | |
171 SET_DECL_RTL (var, pc_rtx); | |
172 } | |
173 } | |
174 else | |
175 SET_DECL_RTL (t, x); | |
176 } | |
432 | 177 |
433 /* This structure holds data relevant to one variable that will be | 178 /* This structure holds data relevant to one variable that will be |
434 placed in a stack slot. */ | 179 placed in a stack slot. */ |
435 struct stack_var | 180 struct stack_var |
436 { | 181 { |
453 /* The partition representative. */ | 198 /* The partition representative. */ |
454 size_t representative; | 199 size_t representative; |
455 | 200 |
456 /* The next stack variable in the partition, or EOC. */ | 201 /* The next stack variable in the partition, or EOC. */ |
457 size_t next; | 202 size_t next; |
203 | |
204 /* The numbers of conflicting stack variables. */ | |
205 bitmap conflicts; | |
458 }; | 206 }; |
459 | 207 |
460 #define EOC ((size_t)-1) | 208 #define EOC ((size_t)-1) |
461 | 209 |
462 /* We have an array of such objects while deciding allocation. */ | 210 /* We have an array of such objects while deciding allocation. */ |
466 | 214 |
467 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size | 215 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size |
468 is non-decreasing. */ | 216 is non-decreasing. */ |
469 static size_t *stack_vars_sorted; | 217 static size_t *stack_vars_sorted; |
470 | 218 |
471 /* We have an interference graph between such objects. This graph | |
472 is lower triangular. */ | |
473 static bool *stack_vars_conflict; | |
474 static size_t stack_vars_conflict_alloc; | |
475 | |
476 /* The phase of the stack frame. This is the known misalignment of | 219 /* The phase of the stack frame. This is the known misalignment of |
477 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is, | 220 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is, |
478 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */ | 221 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */ |
479 static int frame_phase; | 222 static int frame_phase; |
480 | 223 |
500 align = MAX_SUPPORTED_STACK_ALIGNMENT; | 243 align = MAX_SUPPORTED_STACK_ALIGNMENT; |
501 | 244 |
502 if (SUPPORTS_STACK_ALIGNMENT) | 245 if (SUPPORTS_STACK_ALIGNMENT) |
503 { | 246 { |
504 if (crtl->stack_alignment_estimated < align) | 247 if (crtl->stack_alignment_estimated < align) |
505 { | 248 { |
506 gcc_assert(!crtl->stack_realign_processed); | 249 gcc_assert(!crtl->stack_realign_processed); |
507 crtl->stack_alignment_estimated = align; | 250 crtl->stack_alignment_estimated = align; |
508 } | 251 } |
509 } | 252 } |
510 | 253 |
511 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted. | 254 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted. |
512 So here we only make sure stack_alignment_needed >= align. */ | 255 So here we only make sure stack_alignment_needed >= align. */ |
513 if (crtl->stack_alignment_needed < align) | 256 if (crtl->stack_alignment_needed < align) |
514 crtl->stack_alignment_needed = align; | 257 crtl->stack_alignment_needed = align; |
515 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed) | 258 if (crtl->max_used_stack_slot_alignment < align) |
516 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed; | 259 crtl->max_used_stack_slot_alignment = align; |
517 | 260 |
518 return align / BITS_PER_UNIT; | 261 return align / BITS_PER_UNIT; |
519 } | 262 } |
520 | 263 |
521 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. | 264 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. |
557 add_stack_var (tree decl) | 300 add_stack_var (tree decl) |
558 { | 301 { |
559 if (stack_vars_num >= stack_vars_alloc) | 302 if (stack_vars_num >= stack_vars_alloc) |
560 { | 303 { |
561 if (stack_vars_alloc) | 304 if (stack_vars_alloc) |
562 stack_vars_alloc = stack_vars_alloc * 3 / 2; | 305 stack_vars_alloc = stack_vars_alloc * 3 / 2; |
563 else | 306 else |
564 stack_vars_alloc = 32; | 307 stack_vars_alloc = 32; |
565 stack_vars | 308 stack_vars |
566 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); | 309 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); |
567 } | 310 } |
568 stack_vars[stack_vars_num].decl = decl; | 311 stack_vars[stack_vars_num].decl = decl; |
569 stack_vars[stack_vars_num].offset = 0; | 312 stack_vars[stack_vars_num].offset = 0; |
570 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1); | 313 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1); |
571 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl); | 314 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl)); |
572 | 315 |
573 /* All variables are initially in their own partition. */ | 316 /* All variables are initially in their own partition. */ |
574 stack_vars[stack_vars_num].representative = stack_vars_num; | 317 stack_vars[stack_vars_num].representative = stack_vars_num; |
575 stack_vars[stack_vars_num].next = EOC; | 318 stack_vars[stack_vars_num].next = EOC; |
576 | 319 |
320 /* All variables initially conflict with no other. */ | |
321 stack_vars[stack_vars_num].conflicts = NULL; | |
322 | |
577 /* Ensure that this decl doesn't get put onto the list twice. */ | 323 /* Ensure that this decl doesn't get put onto the list twice. */ |
578 SET_DECL_RTL (decl, pc_rtx); | 324 set_rtl (decl, pc_rtx); |
579 | 325 |
580 stack_vars_num++; | 326 stack_vars_num++; |
581 } | |
582 | |
583 /* Compute the linear index of a lower-triangular coordinate (I, J). */ | |
584 | |
585 static size_t | |
586 triangular_index (size_t i, size_t j) | |
587 { | |
588 if (i < j) | |
589 { | |
590 size_t t; | |
591 t = i, i = j, j = t; | |
592 } | |
593 return (i * (i + 1)) / 2 + j; | |
594 } | |
595 | |
596 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */ | |
597 | |
598 static void | |
599 resize_stack_vars_conflict (size_t n) | |
600 { | |
601 size_t size = triangular_index (n-1, n-1) + 1; | |
602 | |
603 if (size <= stack_vars_conflict_alloc) | |
604 return; | |
605 | |
606 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size); | |
607 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0, | |
608 (size - stack_vars_conflict_alloc) * sizeof (bool)); | |
609 stack_vars_conflict_alloc = size; | |
610 } | 327 } |
611 | 328 |
612 /* Make the decls associated with luid's X and Y conflict. */ | 329 /* Make the decls associated with luid's X and Y conflict. */ |
613 | 330 |
614 static void | 331 static void |
615 add_stack_var_conflict (size_t x, size_t y) | 332 add_stack_var_conflict (size_t x, size_t y) |
616 { | 333 { |
617 size_t index = triangular_index (x, y); | 334 struct stack_var *a = &stack_vars[x]; |
618 gcc_assert (index < stack_vars_conflict_alloc); | 335 struct stack_var *b = &stack_vars[y]; |
619 stack_vars_conflict[index] = true; | 336 if (!a->conflicts) |
337 a->conflicts = BITMAP_ALLOC (NULL); | |
338 if (!b->conflicts) | |
339 b->conflicts = BITMAP_ALLOC (NULL); | |
340 bitmap_set_bit (a->conflicts, y); | |
341 bitmap_set_bit (b->conflicts, x); | |
620 } | 342 } |
621 | 343 |
622 /* Check whether the decls associated with luid's X and Y conflict. */ | 344 /* Check whether the decls associated with luid's X and Y conflict. */ |
623 | 345 |
624 static bool | 346 static bool |
625 stack_var_conflict_p (size_t x, size_t y) | 347 stack_var_conflict_p (size_t x, size_t y) |
626 { | 348 { |
627 size_t index = triangular_index (x, y); | 349 struct stack_var *a = &stack_vars[x]; |
628 gcc_assert (index < stack_vars_conflict_alloc); | 350 struct stack_var *b = &stack_vars[y]; |
629 return stack_vars_conflict[index]; | 351 if (!a->conflicts || !b->conflicts) |
630 } | 352 return false; |
631 | 353 return bitmap_bit_p (a->conflicts, y); |
354 } | |
355 | |
632 /* Returns true if TYPE is or contains a union type. */ | 356 /* Returns true if TYPE is or contains a union type. */ |
633 | 357 |
634 static bool | 358 static bool |
635 aggregate_contains_union_type (tree type) | 359 aggregate_contains_union_type (tree type) |
636 { | 360 { |
645 return false; | 369 return false; |
646 | 370 |
647 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) | 371 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) |
648 if (TREE_CODE (field) == FIELD_DECL) | 372 if (TREE_CODE (field) == FIELD_DECL) |
649 if (aggregate_contains_union_type (TREE_TYPE (field))) | 373 if (aggregate_contains_union_type (TREE_TYPE (field))) |
650 return true; | 374 return true; |
651 | 375 |
652 return false; | 376 return false; |
653 } | 377 } |
654 | 378 |
655 /* A subroutine of expand_used_vars. If two variables X and Y have alias | 379 /* A subroutine of expand_used_vars. If two variables X and Y have alias |
673 bool aggr_i = AGGREGATE_TYPE_P (type_i); | 397 bool aggr_i = AGGREGATE_TYPE_P (type_i); |
674 bool contains_union; | 398 bool contains_union; |
675 | 399 |
676 contains_union = aggregate_contains_union_type (type_i); | 400 contains_union = aggregate_contains_union_type (type_i); |
677 for (j = 0; j < i; ++j) | 401 for (j = 0; j < i; ++j) |
678 { | 402 { |
679 tree type_j = TREE_TYPE (stack_vars[j].decl); | 403 tree type_j = TREE_TYPE (stack_vars[j].decl); |
680 bool aggr_j = AGGREGATE_TYPE_P (type_j); | 404 bool aggr_j = AGGREGATE_TYPE_P (type_j); |
681 if (aggr_i != aggr_j | 405 if (aggr_i != aggr_j |
682 /* Either the objects conflict by means of type based | 406 /* Either the objects conflict by means of type based |
683 aliasing rules, or we need to add a conflict. */ | 407 aliasing rules, or we need to add a conflict. */ |
684 || !objects_must_conflict_p (type_i, type_j) | 408 || !objects_must_conflict_p (type_i, type_j) |
685 /* In case the types do not conflict ensure that access | 409 /* In case the types do not conflict ensure that access |
686 to elements will conflict. In case of unions we have | 410 to elements will conflict. In case of unions we have |
687 to be careful as type based aliasing rules may say | 411 to be careful as type based aliasing rules may say |
688 access to the same memory does not conflict. So play | 412 access to the same memory does not conflict. So play |
689 safe and add a conflict in this case. */ | 413 safe and add a conflict in this case. */ |
690 || contains_union) | 414 || contains_union) |
691 add_stack_var_conflict (i, j); | 415 add_stack_var_conflict (i, j); |
692 } | 416 } |
693 } | 417 } |
694 } | 418 } |
695 | 419 |
696 /* A subroutine of partition_stack_vars. A comparison function for qsort, | 420 /* A subroutine of partition_stack_vars. A comparison function for qsort, |
697 sorting an array of indices by the size of the object. */ | 421 sorting an array of indices by the size and type of the object. */ |
698 | 422 |
699 static int | 423 static int |
700 stack_var_size_cmp (const void *a, const void *b) | 424 stack_var_size_cmp (const void *a, const void *b) |
701 { | 425 { |
702 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size; | 426 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size; |
703 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size; | 427 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size; |
704 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl); | 428 tree decla, declb; |
705 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl); | 429 unsigned int uida, uidb; |
706 | 430 |
707 if (sa < sb) | 431 if (sa < sb) |
708 return -1; | 432 return -1; |
709 if (sa > sb) | 433 if (sa > sb) |
710 return 1; | 434 return 1; |
711 /* For stack variables of the same size use the uid of the decl | 435 decla = stack_vars[*(const size_t *)a].decl; |
712 to make the sort stable. */ | 436 declb = stack_vars[*(const size_t *)b].decl; |
437 /* For stack variables of the same size use and id of the decls | |
438 to make the sort stable. Two SSA names are compared by their | |
439 version, SSA names come before non-SSA names, and two normal | |
440 decls are compared by their DECL_UID. */ | |
441 if (TREE_CODE (decla) == SSA_NAME) | |
442 { | |
443 if (TREE_CODE (declb) == SSA_NAME) | |
444 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb); | |
445 else | |
446 return -1; | |
447 } | |
448 else if (TREE_CODE (declb) == SSA_NAME) | |
449 return 1; | |
450 else | |
451 uida = DECL_UID (decla), uidb = DECL_UID (declb); | |
713 if (uida < uidb) | 452 if (uida < uidb) |
714 return -1; | 453 return -1; |
715 if (uida > uidb) | 454 if (uida > uidb) |
716 return 1; | 455 return 1; |
717 return 0; | 456 return 0; |
718 } | 457 } |
719 | 458 |
459 | |
460 /* If the points-to solution *PI points to variables that are in a partition | |
461 together with other variables add all partition members to the pointed-to | |
462 variables bitmap. */ | |
463 | |
464 static void | |
465 add_partitioned_vars_to_ptset (struct pt_solution *pt, | |
466 struct pointer_map_t *decls_to_partitions, | |
467 struct pointer_set_t *visited, bitmap temp) | |
468 { | |
469 bitmap_iterator bi; | |
470 unsigned i; | |
471 bitmap *part; | |
472 | |
473 if (pt->anything | |
474 || pt->vars == NULL | |
475 /* The pointed-to vars bitmap is shared, it is enough to | |
476 visit it once. */ | |
477 || pointer_set_insert(visited, pt->vars)) | |
478 return; | |
479 | |
480 bitmap_clear (temp); | |
481 | |
482 /* By using a temporary bitmap to store all members of the partitions | |
483 we have to add we make sure to visit each of the partitions only | |
484 once. */ | |
485 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi) | |
486 if ((!temp | |
487 || !bitmap_bit_p (temp, i)) | |
488 && (part = (bitmap *) pointer_map_contains (decls_to_partitions, | |
489 (void *)(size_t) i))) | |
490 bitmap_ior_into (temp, *part); | |
491 if (!bitmap_empty_p (temp)) | |
492 bitmap_ior_into (pt->vars, temp); | |
493 } | |
494 | |
495 /* Update points-to sets based on partition info, so we can use them on RTL. | |
496 The bitmaps representing stack partitions will be saved until expand, | |
497 where partitioned decls used as bases in memory expressions will be | |
498 rewritten. */ | |
499 | |
500 static void | |
501 update_alias_info_with_stack_vars (void) | |
502 { | |
503 struct pointer_map_t *decls_to_partitions = NULL; | |
504 size_t i, j; | |
505 tree var = NULL_TREE; | |
506 | |
507 for (i = 0; i < stack_vars_num; i++) | |
508 { | |
509 bitmap part = NULL; | |
510 tree name; | |
511 struct ptr_info_def *pi; | |
512 | |
513 /* Not interested in partitions with single variable. */ | |
514 if (stack_vars[i].representative != i | |
515 || stack_vars[i].next == EOC) | |
516 continue; | |
517 | |
518 if (!decls_to_partitions) | |
519 { | |
520 decls_to_partitions = pointer_map_create (); | |
521 cfun->gimple_df->decls_to_pointers = pointer_map_create (); | |
522 } | |
523 | |
524 /* Create an SSA_NAME that points to the partition for use | |
525 as base during alias-oracle queries on RTL for bases that | |
526 have been partitioned. */ | |
527 if (var == NULL_TREE) | |
528 var = create_tmp_var (ptr_type_node, NULL); | |
529 name = make_ssa_name (var, NULL); | |
530 | |
531 /* Create bitmaps representing partitions. They will be used for | |
532 points-to sets later, so use GGC alloc. */ | |
533 part = BITMAP_GGC_ALLOC (); | |
534 for (j = i; j != EOC; j = stack_vars[j].next) | |
535 { | |
536 tree decl = stack_vars[j].decl; | |
537 unsigned int uid = DECL_UID (decl); | |
538 /* We should never end up partitioning SSA names (though they | |
539 may end up on the stack). Neither should we allocate stack | |
540 space to something that is unused and thus unreferenced. */ | |
541 gcc_assert (DECL_P (decl) | |
542 && referenced_var_lookup (uid)); | |
543 bitmap_set_bit (part, uid); | |
544 *((bitmap *) pointer_map_insert (decls_to_partitions, | |
545 (void *)(size_t) uid)) = part; | |
546 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers, | |
547 decl)) = name; | |
548 } | |
549 | |
550 /* Make the SSA name point to all partition members. */ | |
551 pi = get_ptr_info (name); | |
552 pt_solution_set (&pi->pt, part); | |
553 } | |
554 | |
555 /* Make all points-to sets that contain one member of a partition | |
556 contain all members of the partition. */ | |
557 if (decls_to_partitions) | |
558 { | |
559 unsigned i; | |
560 struct pointer_set_t *visited = pointer_set_create (); | |
561 bitmap temp = BITMAP_ALLOC (NULL); | |
562 | |
563 for (i = 1; i < num_ssa_names; i++) | |
564 { | |
565 tree name = ssa_name (i); | |
566 struct ptr_info_def *pi; | |
567 | |
568 if (name | |
569 && POINTER_TYPE_P (TREE_TYPE (name)) | |
570 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL)) | |
571 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, | |
572 visited, temp); | |
573 } | |
574 | |
575 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, | |
576 decls_to_partitions, visited, temp); | |
577 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused, | |
578 decls_to_partitions, visited, temp); | |
579 | |
580 pointer_set_destroy (visited); | |
581 pointer_map_destroy (decls_to_partitions); | |
582 BITMAP_FREE (temp); | |
583 } | |
584 } | |
585 | |
720 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND | 586 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND |
721 partitioning algorithm. Partitions A and B are known to be non-conflicting. | 587 partitioning algorithm. Partitions A and B are known to be non-conflicting. |
722 Merge them into a single partition A. | 588 Merge them into a single partition A. |
723 | 589 |
724 At the same time, add OFFSET to all variables in partition B. At the end | 590 At the same time, add OFFSET to all variables in partition B. At the end |
727 | 593 |
728 static void | 594 static void |
729 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset) | 595 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset) |
730 { | 596 { |
731 size_t i, last; | 597 size_t i, last; |
598 struct stack_var *vb = &stack_vars[b]; | |
599 bitmap_iterator bi; | |
600 unsigned u; | |
732 | 601 |
733 /* Update each element of partition B with the given offset, | 602 /* Update each element of partition B with the given offset, |
734 and merge them into partition A. */ | 603 and merge them into partition A. */ |
735 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next) | 604 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next) |
736 { | 605 { |
743 /* Update the required alignment of partition A to account for B. */ | 612 /* Update the required alignment of partition A to account for B. */ |
744 if (stack_vars[a].alignb < stack_vars[b].alignb) | 613 if (stack_vars[a].alignb < stack_vars[b].alignb) |
745 stack_vars[a].alignb = stack_vars[b].alignb; | 614 stack_vars[a].alignb = stack_vars[b].alignb; |
746 | 615 |
747 /* Update the interference graph and merge the conflicts. */ | 616 /* Update the interference graph and merge the conflicts. */ |
748 for (last = stack_vars_num, i = 0; i < last; ++i) | 617 if (vb->conflicts) |
749 if (stack_var_conflict_p (b, i)) | 618 { |
750 add_stack_var_conflict (a, i); | 619 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi) |
620 add_stack_var_conflict (a, stack_vars[u].representative); | |
621 BITMAP_FREE (vb->conflicts); | |
622 } | |
751 } | 623 } |
752 | 624 |
753 /* A subroutine of expand_used_vars. Binpack the variables into | 625 /* A subroutine of expand_used_vars. Binpack the variables into |
754 partitions constrained by the interference graph. The overall | 626 partitions constrained by the interference graph. The overall |
755 algorithm used is as follows: | 627 algorithm used is as follows: |
756 | 628 |
757 Sort the objects by size. | 629 Sort the objects by size. |
758 For each object A { | 630 For each object A { |
759 S = size(A) | 631 S = size(A) |
760 O = 0 | 632 O = 0 |
761 loop { | 633 loop { |
762 Look for the largest non-conflicting object B with size <= S. | 634 Look for the largest non-conflicting object B with size <= S. |
763 UNION (A, B) | 635 UNION (A, B) |
764 offset(B) = O | 636 offset(B) = O |
765 O += size(B) | 637 O += size(B) |
766 S -= size(B) | 638 S -= size(B) |
767 } | 639 } |
768 } | 640 } |
769 */ | 641 */ |
770 | 642 |
771 static void | 643 static void |
772 partition_stack_vars (void) | 644 partition_stack_vars (void) |
773 { | 645 { |
780 if (n == 1) | 652 if (n == 1) |
781 return; | 653 return; |
782 | 654 |
783 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp); | 655 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp); |
784 | 656 |
785 /* Special case: detect when all variables conflict, and thus we can't | |
786 do anything during the partitioning loop. It isn't uncommon (with | |
787 C code at least) to declare all variables at the top of the function, | |
788 and if we're not inlining, then all variables will be in the same scope. | |
789 Take advantage of very fast libc routines for this scan. */ | |
790 gcc_assert (sizeof(bool) == sizeof(char)); | |
791 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL) | |
792 return; | |
793 | |
794 for (si = 0; si < n; ++si) | 657 for (si = 0; si < n; ++si) |
795 { | 658 { |
796 size_t i = stack_vars_sorted[si]; | 659 size_t i = stack_vars_sorted[si]; |
797 HOST_WIDE_INT isize = stack_vars[i].size; | 660 HOST_WIDE_INT isize = stack_vars[i].size; |
798 HOST_WIDE_INT offset = 0; | 661 HOST_WIDE_INT offset = 0; |
799 | 662 |
800 for (sj = si; sj-- > 0; ) | 663 for (sj = si; sj-- > 0; ) |
801 { | 664 { |
802 size_t j = stack_vars_sorted[sj]; | 665 size_t j = stack_vars_sorted[sj]; |
803 HOST_WIDE_INT jsize = stack_vars[j].size; | 666 HOST_WIDE_INT jsize = stack_vars[j].size; |
804 unsigned int jalign = stack_vars[j].alignb; | 667 unsigned int jalign = stack_vars[j].alignb; |
805 | 668 |
806 /* Ignore objects that aren't partition representatives. */ | 669 /* Ignore objects that aren't partition representatives. */ |
807 if (stack_vars[j].representative != j) | 670 if (stack_vars[j].representative != j) |
808 continue; | 671 continue; |
809 | 672 |
810 /* Ignore objects too large for the remaining space. */ | 673 /* Ignore objects too large for the remaining space. */ |
811 if (isize < jsize) | 674 if (isize < jsize) |
812 continue; | 675 continue; |
813 | 676 |
814 /* Ignore conflicting objects. */ | 677 /* Ignore conflicting objects. */ |
815 if (stack_var_conflict_p (i, j)) | 678 if (stack_var_conflict_p (i, j)) |
816 continue; | 679 continue; |
817 | 680 |
818 /* Refine the remaining space check to include alignment. */ | 681 /* Refine the remaining space check to include alignment. */ |
819 if (offset & (jalign - 1)) | 682 if (offset & (jalign - 1)) |
820 { | 683 { |
821 HOST_WIDE_INT toff = offset; | 684 HOST_WIDE_INT toff = offset; |
822 toff += jalign - 1; | 685 toff += jalign - 1; |
823 toff &= -(HOST_WIDE_INT)jalign; | 686 toff &= -(HOST_WIDE_INT)jalign; |
824 if (isize - (toff - offset) < jsize) | 687 if (isize - (toff - offset) < jsize) |
825 continue; | 688 continue; |
826 | 689 |
827 isize -= toff - offset; | 690 isize -= toff - offset; |
828 offset = toff; | 691 offset = toff; |
829 } | 692 } |
830 | 693 |
831 /* UNION the objects, placing J at OFFSET. */ | 694 /* UNION the objects, placing J at OFFSET. */ |
832 union_stack_vars (i, j, offset); | 695 union_stack_vars (i, j, offset); |
833 | 696 |
834 isize -= jsize; | 697 isize -= jsize; |
835 if (isize == 0) | 698 if (isize == 0) |
836 break; | 699 break; |
837 } | 700 } |
838 } | 701 } |
702 | |
703 if (optimize) | |
704 update_alias_info_with_stack_vars (); | |
839 } | 705 } |
840 | 706 |
841 /* A debugging aid for expand_used_vars. Dump the generated partitions. */ | 707 /* A debugging aid for expand_used_vars. Dump the generated partitions. */ |
842 | 708 |
843 static void | 709 static void |
849 { | 715 { |
850 i = stack_vars_sorted[si]; | 716 i = stack_vars_sorted[si]; |
851 | 717 |
852 /* Skip variables that aren't partition representatives, for now. */ | 718 /* Skip variables that aren't partition representatives, for now. */ |
853 if (stack_vars[i].representative != i) | 719 if (stack_vars[i].representative != i) |
854 continue; | 720 continue; |
855 | 721 |
856 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC | 722 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC |
857 " align %u\n", (unsigned long) i, stack_vars[i].size, | 723 " align %u\n", (unsigned long) i, stack_vars[i].size, |
858 stack_vars[i].alignb); | 724 stack_vars[i].alignb); |
859 | 725 |
860 for (j = i; j != EOC; j = stack_vars[j].next) | 726 for (j = i; j != EOC; j = stack_vars[j].next) |
861 { | 727 { |
862 fputc ('\t', dump_file); | 728 fputc ('\t', dump_file); |
863 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); | 729 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); |
864 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", | 730 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", |
865 stack_vars[j].offset); | 731 stack_vars[j].offset); |
866 } | 732 } |
867 } | 733 } |
868 } | 734 } |
869 | 735 |
870 /* Assign rtl to DECL at frame offset OFFSET. */ | 736 /* Assign rtl to DECL at frame offset OFFSET. */ |
871 | 737 |
872 static void | 738 static void |
873 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset) | 739 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset) |
874 { | 740 { |
875 HOST_WIDE_INT align; | 741 /* Alignment is unsigned. */ |
742 unsigned HOST_WIDE_INT align; | |
876 rtx x; | 743 rtx x; |
877 | 744 |
878 /* If this fails, we've overflowed the stack frame. Error nicely? */ | 745 /* If this fails, we've overflowed the stack frame. Error nicely? */ |
879 gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); | 746 gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); |
880 | 747 |
881 x = plus_constant (virtual_stack_vars_rtx, offset); | 748 x = plus_constant (virtual_stack_vars_rtx, offset); |
882 x = gen_rtx_MEM (DECL_MODE (decl), x); | 749 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x); |
883 | 750 |
884 /* Set alignment we actually gave this decl. */ | 751 if (TREE_CODE (decl) != SSA_NAME) |
885 offset -= frame_phase; | 752 { |
886 align = offset & -offset; | 753 /* Set alignment we actually gave this decl if it isn't an SSA name. |
887 align *= BITS_PER_UNIT; | 754 If it is we generate stack slots only accidentally so it isn't as |
888 if (align > STACK_BOUNDARY || align == 0) | 755 important, we'll simply use the alignment that is already set. */ |
756 offset -= frame_phase; | |
757 align = offset & -offset; | |
758 align *= BITS_PER_UNIT; | |
759 if (align == 0) | |
889 align = STACK_BOUNDARY; | 760 align = STACK_BOUNDARY; |
890 DECL_ALIGN (decl) = align; | 761 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT) |
891 DECL_USER_ALIGN (decl) = 0; | 762 align = MAX_SUPPORTED_STACK_ALIGNMENT; |
892 | 763 |
893 set_mem_attributes (x, decl, true); | 764 DECL_ALIGN (decl) = align; |
894 SET_DECL_RTL (decl, x); | 765 DECL_USER_ALIGN (decl) = 0; |
766 } | |
767 | |
768 set_mem_attributes (x, SSAVAR (decl), true); | |
769 set_rtl (decl, x); | |
895 } | 770 } |
896 | 771 |
897 /* A subroutine of expand_used_vars. Give each partition representative | 772 /* A subroutine of expand_used_vars. Give each partition representative |
898 a unique location within the stack frame. Update each partition member | 773 a unique location within the stack frame. Update each partition member |
899 with that location. */ | 774 with that location. */ |
909 | 784 |
910 i = stack_vars_sorted[si]; | 785 i = stack_vars_sorted[si]; |
911 | 786 |
912 /* Skip variables that aren't partition representatives, for now. */ | 787 /* Skip variables that aren't partition representatives, for now. */ |
913 if (stack_vars[i].representative != i) | 788 if (stack_vars[i].representative != i) |
914 continue; | 789 continue; |
915 | 790 |
916 /* Skip variables that have already had rtl assigned. See also | 791 /* Skip variables that have already had rtl assigned. See also |
917 add_stack_var where we perpetrate this pc_rtx hack. */ | 792 add_stack_var where we perpetrate this pc_rtx hack. */ |
918 if (DECL_RTL (stack_vars[i].decl) != pc_rtx) | 793 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME |
919 continue; | 794 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)] |
795 : DECL_RTL (stack_vars[i].decl)) != pc_rtx) | |
796 continue; | |
920 | 797 |
921 /* Check the predicate to see whether this variable should be | 798 /* Check the predicate to see whether this variable should be |
922 allocated in this pass. */ | 799 allocated in this pass. */ |
923 if (pred && !pred (stack_vars[i].decl)) | 800 if (pred && !pred (stack_vars[i].decl)) |
924 continue; | 801 continue; |
925 | 802 |
926 offset = alloc_stack_frame_space (stack_vars[i].size, | 803 offset = alloc_stack_frame_space (stack_vars[i].size, |
927 stack_vars[i].alignb); | 804 stack_vars[i].alignb); |
928 | 805 |
929 /* Create rtl for each variable based on their location within the | 806 /* Create rtl for each variable based on their location within the |
930 partition. */ | 807 partition. */ |
931 for (j = i; j != EOC; j = stack_vars[j].next) | 808 for (j = i; j != EOC; j = stack_vars[j].next) |
932 { | 809 { |
933 gcc_assert (stack_vars[j].offset <= stack_vars[i].size); | 810 gcc_assert (stack_vars[j].offset <= stack_vars[i].size); |
934 expand_one_stack_var_at (stack_vars[j].decl, | 811 expand_one_stack_var_at (stack_vars[j].decl, |
935 stack_vars[j].offset + offset); | 812 stack_vars[j].offset + offset); |
936 } | 813 } |
937 } | 814 } |
938 } | 815 } |
939 | 816 |
940 /* Take into account all sizes of partitions and reset DECL_RTLs. */ | 817 /* Take into account all sizes of partitions and reset DECL_RTLs. */ |
941 static HOST_WIDE_INT | 818 static HOST_WIDE_INT |
948 { | 825 { |
949 i = stack_vars_sorted[si]; | 826 i = stack_vars_sorted[si]; |
950 | 827 |
951 /* Skip variables that aren't partition representatives, for now. */ | 828 /* Skip variables that aren't partition representatives, for now. */ |
952 if (stack_vars[i].representative != i) | 829 if (stack_vars[i].representative != i) |
953 continue; | 830 continue; |
954 | 831 |
955 size += stack_vars[i].size; | 832 size += stack_vars[i].size; |
956 for (j = i; j != EOC; j = stack_vars[j].next) | 833 for (j = i; j != EOC; j = stack_vars[j].next) |
957 SET_DECL_RTL (stack_vars[j].decl, NULL); | 834 set_rtl (stack_vars[j].decl, NULL); |
958 } | 835 } |
959 return size; | 836 return size; |
960 } | 837 } |
961 | 838 |
962 /* A subroutine of expand_one_var. Called to immediately assign rtl | 839 /* A subroutine of expand_one_var. Called to immediately assign rtl |
965 static void | 842 static void |
966 expand_one_stack_var (tree var) | 843 expand_one_stack_var (tree var) |
967 { | 844 { |
968 HOST_WIDE_INT size, offset, align; | 845 HOST_WIDE_INT size, offset, align; |
969 | 846 |
970 size = tree_low_cst (DECL_SIZE_UNIT (var), 1); | 847 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1); |
971 align = get_decl_align_unit (var); | 848 align = get_decl_align_unit (SSAVAR (var)); |
972 offset = alloc_stack_frame_space (size, align); | 849 offset = alloc_stack_frame_space (size, align); |
973 | 850 |
974 expand_one_stack_var_at (var, offset); | 851 expand_one_stack_var_at (var, offset); |
975 } | 852 } |
976 | 853 |
987 that will reside in a pseudo register. */ | 864 that will reside in a pseudo register. */ |
988 | 865 |
989 static void | 866 static void |
990 expand_one_register_var (tree var) | 867 expand_one_register_var (tree var) |
991 { | 868 { |
992 tree type = TREE_TYPE (var); | 869 tree decl = SSAVAR (var); |
993 int unsignedp = TYPE_UNSIGNED (type); | 870 tree type = TREE_TYPE (decl); |
994 enum machine_mode reg_mode | 871 enum machine_mode reg_mode = promote_decl_mode (decl, NULL); |
995 = promote_mode (type, DECL_MODE (var), &unsignedp, 0); | |
996 rtx x = gen_reg_rtx (reg_mode); | 872 rtx x = gen_reg_rtx (reg_mode); |
997 | 873 |
998 SET_DECL_RTL (var, x); | 874 set_rtl (var, x); |
999 | 875 |
1000 /* Note if the object is a user variable. */ | 876 /* Note if the object is a user variable. */ |
1001 if (!DECL_ARTIFICIAL (var)) | 877 if (!DECL_ARTIFICIAL (decl)) |
1002 mark_user_reg (x); | 878 mark_user_reg (x); |
1003 | 879 |
1004 if (POINTER_TYPE_P (type)) | 880 if (POINTER_TYPE_P (type)) |
1005 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var)))); | 881 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type))); |
1006 } | 882 } |
1007 | 883 |
1008 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that | 884 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that |
1009 has some associated error, e.g. its type is error-mark. We just need | 885 has some associated error, e.g. its type is error-mark. We just need |
1010 to pick something that won't crash the rest of the compiler. */ | 886 to pick something that won't crash the rest of the compiler. */ |
1060 return true; | 936 return true; |
1061 } | 937 } |
1062 | 938 |
1063 /* A subroutine of expand_used_vars. Expand one variable according to | 939 /* A subroutine of expand_used_vars. Expand one variable according to |
1064 its flavor. Variables to be placed on the stack are not actually | 940 its flavor. Variables to be placed on the stack are not actually |
1065 expanded yet, merely recorded. | 941 expanded yet, merely recorded. |
1066 When REALLY_EXPAND is false, only add stack values to be allocated. | 942 When REALLY_EXPAND is false, only add stack values to be allocated. |
1067 Return stack usage this variable is supposed to take. | 943 Return stack usage this variable is supposed to take. |
1068 */ | 944 */ |
1069 | 945 |
1070 static HOST_WIDE_INT | 946 static HOST_WIDE_INT |
1071 expand_one_var (tree var, bool toplevel, bool really_expand) | 947 expand_one_var (tree var, bool toplevel, bool really_expand) |
1072 { | 948 { |
949 tree origvar = var; | |
950 var = SSAVAR (var); | |
951 | |
1073 if (SUPPORTS_STACK_ALIGNMENT | 952 if (SUPPORTS_STACK_ALIGNMENT |
1074 && TREE_TYPE (var) != error_mark_node | 953 && TREE_TYPE (var) != error_mark_node |
1075 && TREE_CODE (var) == VAR_DECL) | 954 && TREE_CODE (var) == VAR_DECL) |
1076 { | 955 { |
1077 unsigned int align; | 956 unsigned int align; |
1078 | 957 |
1079 /* Because we don't know if VAR will be in register or on stack, | 958 /* Because we don't know if VAR will be in register or on stack, |
1080 we conservatively assume it will be on stack even if VAR is | 959 we conservatively assume it will be on stack even if VAR is |
1081 eventually put into register after RA pass. For non-automatic | 960 eventually put into register after RA pass. For non-automatic |
1082 variables, which won't be on stack, we collect alignment of | 961 variables, which won't be on stack, we collect alignment of |
1083 type and ignore user specified alignment. */ | 962 type and ignore user specified alignment. */ |
1084 if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | 963 if (TREE_STATIC (var) || DECL_EXTERNAL (var)) |
1085 align = MINIMUM_ALIGNMENT (TREE_TYPE (var), | 964 align = MINIMUM_ALIGNMENT (TREE_TYPE (var), |
1086 TYPE_MODE (TREE_TYPE (var)), | 965 TYPE_MODE (TREE_TYPE (var)), |
1087 TYPE_ALIGN (TREE_TYPE (var))); | 966 TYPE_ALIGN (TREE_TYPE (var))); |
1088 else | 967 else |
1089 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); | 968 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); |
1090 | 969 |
1091 if (crtl->stack_alignment_estimated < align) | 970 if (crtl->stack_alignment_estimated < align) |
1092 { | 971 { |
1093 /* stack_alignment_estimated shouldn't change after stack | 972 /* stack_alignment_estimated shouldn't change after stack |
1094 realign decision made */ | 973 realign decision made */ |
1095 gcc_assert(!crtl->stack_realign_processed); | 974 gcc_assert(!crtl->stack_realign_processed); |
1096 crtl->stack_alignment_estimated = align; | 975 crtl->stack_alignment_estimated = align; |
1097 } | 976 } |
1098 } | 977 } |
1099 | 978 |
1100 if (TREE_CODE (var) != VAR_DECL) | 979 if (TREE_CODE (origvar) == SSA_NAME) |
980 { | |
981 gcc_assert (TREE_CODE (var) != VAR_DECL | |
982 || (!DECL_EXTERNAL (var) | |
983 && !DECL_HAS_VALUE_EXPR_P (var) | |
984 && !TREE_STATIC (var) | |
985 && TREE_TYPE (var) != error_mark_node | |
986 && !DECL_HARD_REGISTER (var) | |
987 && really_expand)); | |
988 } | |
989 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME) | |
1101 ; | 990 ; |
1102 else if (DECL_EXTERNAL (var)) | 991 else if (DECL_EXTERNAL (var)) |
1103 ; | 992 ; |
1104 else if (DECL_HAS_VALUE_EXPR_P (var)) | 993 else if (DECL_HAS_VALUE_EXPR_P (var)) |
1105 ; | 994 ; |
1106 else if (TREE_STATIC (var)) | 995 else if (TREE_STATIC (var)) |
1107 ; | 996 ; |
1108 else if (DECL_RTL_SET_P (var)) | 997 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var)) |
1109 ; | 998 ; |
1110 else if (TREE_TYPE (var) == error_mark_node) | 999 else if (TREE_TYPE (var) == error_mark_node) |
1111 { | 1000 { |
1112 if (really_expand) | 1001 if (really_expand) |
1113 expand_one_error_var (var); | 1002 expand_one_error_var (var); |
1114 } | 1003 } |
1115 else if (DECL_HARD_REGISTER (var)) | 1004 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var)) |
1116 { | 1005 { |
1117 if (really_expand) | 1006 if (really_expand) |
1118 expand_one_hard_reg_var (var); | 1007 expand_one_hard_reg_var (var); |
1119 } | 1008 } |
1120 else if (use_register_for_decl (var)) | 1009 else if (use_register_for_decl (var)) |
1121 { | 1010 { |
1122 if (really_expand) | 1011 if (really_expand) |
1123 expand_one_register_var (var); | 1012 expand_one_register_var (origvar); |
1124 } | 1013 } |
1125 else if (!host_integerp (DECL_SIZE_UNIT (var), 1)) | 1014 else if (defer_stack_allocation (var, toplevel)) |
1015 add_stack_var (origvar); | |
1016 else | |
1126 { | 1017 { |
1127 if (really_expand) | 1018 if (really_expand) |
1128 { | 1019 expand_one_stack_var (origvar); |
1129 error ("size of variable %q+D is too large", var); | |
1130 expand_one_error_var (var); | |
1131 } | |
1132 } | |
1133 else if (defer_stack_allocation (var, toplevel)) | |
1134 add_stack_var (var); | |
1135 else | |
1136 { | |
1137 if (really_expand) | |
1138 expand_one_stack_var (var); | |
1139 return tree_low_cst (DECL_SIZE_UNIT (var), 1); | 1020 return tree_low_cst (DECL_SIZE_UNIT (var), 1); |
1140 } | 1021 } |
1141 return 0; | 1022 return 0; |
1142 } | 1023 } |
1143 | 1024 |
1167 expand_used_vars_for_block (t, false); | 1048 expand_used_vars_for_block (t, false); |
1168 | 1049 |
1169 /* Since we do not track exact variable lifetimes (which is not even | 1050 /* Since we do not track exact variable lifetimes (which is not even |
1170 possible for variables whose address escapes), we mirror the block | 1051 possible for variables whose address escapes), we mirror the block |
1171 tree in the interference graph. Here we cause all variables at this | 1052 tree in the interference graph. Here we cause all variables at this |
1172 level, and all sublevels, to conflict. Do make certain that a | 1053 level, and all sublevels, to conflict. */ |
1173 variable conflicts with itself. */ | |
1174 if (old_sv_num < this_sv_num) | 1054 if (old_sv_num < this_sv_num) |
1175 { | 1055 { |
1176 new_sv_num = stack_vars_num; | 1056 new_sv_num = stack_vars_num; |
1177 resize_stack_vars_conflict (new_sv_num); | |
1178 | 1057 |
1179 for (i = old_sv_num; i < new_sv_num; ++i) | 1058 for (i = old_sv_num; i < new_sv_num; ++i) |
1180 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;) | 1059 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;) |
1181 add_stack_var_conflict (i, j); | 1060 add_stack_var_conflict (i, j); |
1182 } | 1061 } |
1183 } | 1062 } |
1184 | 1063 |
1185 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | 1064 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree |
1186 and clear TREE_USED on all local variables. */ | 1065 and clear TREE_USED on all local variables. */ |
1198 clear_tree_used (t); | 1077 clear_tree_used (t); |
1199 } | 1078 } |
1200 | 1079 |
1201 /* Examine TYPE and determine a bit mask of the following features. */ | 1080 /* Examine TYPE and determine a bit mask of the following features. */ |
1202 | 1081 |
1203 #define SPCT_HAS_LARGE_CHAR_ARRAY 1 | 1082 #define SPCT_HAS_LARGE_CHAR_ARRAY 1 |
1204 #define SPCT_HAS_SMALL_CHAR_ARRAY 2 | 1083 #define SPCT_HAS_SMALL_CHAR_ARRAY 2 |
1205 #define SPCT_HAS_ARRAY 4 | 1084 #define SPCT_HAS_ARRAY 4 |
1206 #define SPCT_HAS_AGGREGATE 8 | 1085 #define SPCT_HAS_AGGREGATE 8 |
1207 | 1086 |
1208 static unsigned int | 1087 static unsigned int |
1209 stack_protect_classify_type (tree type) | 1088 stack_protect_classify_type (tree type) |
1210 { | 1089 { |
1211 unsigned int ret = 0; | 1090 unsigned int ret = 0; |
1214 switch (TREE_CODE (type)) | 1093 switch (TREE_CODE (type)) |
1215 { | 1094 { |
1216 case ARRAY_TYPE: | 1095 case ARRAY_TYPE: |
1217 t = TYPE_MAIN_VARIANT (TREE_TYPE (type)); | 1096 t = TYPE_MAIN_VARIANT (TREE_TYPE (type)); |
1218 if (t == char_type_node | 1097 if (t == char_type_node |
1219 || t == signed_char_type_node | 1098 || t == signed_char_type_node |
1220 || t == unsigned_char_type_node) | 1099 || t == unsigned_char_type_node) |
1221 { | 1100 { |
1222 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); | 1101 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); |
1223 unsigned HOST_WIDE_INT len; | 1102 unsigned HOST_WIDE_INT len; |
1224 | 1103 |
1225 if (!TYPE_SIZE_UNIT (type) | 1104 if (!TYPE_SIZE_UNIT (type) |
1226 || !host_integerp (TYPE_SIZE_UNIT (type), 1)) | 1105 || !host_integerp (TYPE_SIZE_UNIT (type), 1)) |
1227 len = max; | 1106 len = max; |
1228 else | |
1229 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); | |
1230 | |
1231 if (len < max) | |
1232 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1233 else | |
1234 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1235 } | |
1236 else | 1107 else |
1237 ret = SPCT_HAS_ARRAY; | 1108 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); |
1109 | |
1110 if (len < max) | |
1111 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1112 else | |
1113 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1114 } | |
1115 else | |
1116 ret = SPCT_HAS_ARRAY; | |
1238 break; | 1117 break; |
1239 | 1118 |
1240 case UNION_TYPE: | 1119 case UNION_TYPE: |
1241 case QUAL_UNION_TYPE: | 1120 case QUAL_UNION_TYPE: |
1242 case RECORD_TYPE: | 1121 case RECORD_TYPE: |
1243 ret = SPCT_HAS_AGGREGATE; | 1122 ret = SPCT_HAS_AGGREGATE; |
1244 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t)) | 1123 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t)) |
1245 if (TREE_CODE (t) == FIELD_DECL) | 1124 if (TREE_CODE (t) == FIELD_DECL) |
1246 ret |= stack_protect_classify_type (TREE_TYPE (t)); | 1125 ret |= stack_protect_classify_type (TREE_TYPE (t)); |
1247 break; | 1126 break; |
1248 | 1127 |
1249 default: | 1128 default: |
1250 break; | 1129 break; |
1251 } | 1130 } |
1268 has_short_buffer = true; | 1147 has_short_buffer = true; |
1269 | 1148 |
1270 if (flag_stack_protect == 2) | 1149 if (flag_stack_protect == 2) |
1271 { | 1150 { |
1272 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) | 1151 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) |
1273 && !(bits & SPCT_HAS_AGGREGATE)) | 1152 && !(bits & SPCT_HAS_AGGREGATE)) |
1274 ret = 1; | 1153 ret = 1; |
1275 else if (bits & SPCT_HAS_ARRAY) | 1154 else if (bits & SPCT_HAS_ARRAY) |
1276 ret = 2; | 1155 ret = 2; |
1277 } | 1156 } |
1278 else | 1157 else |
1279 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0; | 1158 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0; |
1280 | 1159 |
1281 if (ret) | 1160 if (ret) |
1314 | 1193 |
1315 for (i = 0; i < n; ++i) | 1194 for (i = 0; i < n; ++i) |
1316 { | 1195 { |
1317 unsigned char ph_i = phase[i]; | 1196 unsigned char ph_i = phase[i]; |
1318 for (j = 0; j < i; ++j) | 1197 for (j = 0; j < i; ++j) |
1319 if (ph_i != phase[j]) | 1198 if (ph_i != phase[j]) |
1320 add_stack_var_conflict (i, j); | 1199 add_stack_var_conflict (i, j); |
1321 } | 1200 } |
1322 | 1201 |
1323 XDELETEVEC (phase); | 1202 XDELETEVEC (phase); |
1324 } | 1203 } |
1325 | 1204 |
1326 /* Create a decl for the guard at the top of the stack frame. */ | 1205 /* Create a decl for the guard at the top of the stack frame. */ |
1327 | 1206 |
1328 static void | 1207 static void |
1329 create_stack_guard (void) | 1208 create_stack_guard (void) |
1330 { | 1209 { |
1331 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node); | 1210 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl), |
1211 VAR_DECL, NULL, ptr_type_node); | |
1332 TREE_THIS_VOLATILE (guard) = 1; | 1212 TREE_THIS_VOLATILE (guard) = 1; |
1333 TREE_USED (guard) = 1; | 1213 TREE_USED (guard) = 1; |
1334 expand_one_stack_var (guard); | 1214 expand_one_stack_var (guard); |
1335 crtl->stack_protect_guard = guard; | 1215 crtl->stack_protect_guard = guard; |
1336 } | 1216 } |
1342 TOPLEVEL is true if this is the outermost BLOCK. */ | 1222 TOPLEVEL is true if this is the outermost BLOCK. */ |
1343 | 1223 |
1344 static HOST_WIDE_INT | 1224 static HOST_WIDE_INT |
1345 account_used_vars_for_block (tree block, bool toplevel) | 1225 account_used_vars_for_block (tree block, bool toplevel) |
1346 { | 1226 { |
1347 size_t i, j, old_sv_num, this_sv_num, new_sv_num; | |
1348 tree t; | 1227 tree t; |
1349 HOST_WIDE_INT size = 0; | 1228 HOST_WIDE_INT size = 0; |
1350 | |
1351 old_sv_num = toplevel ? 0 : stack_vars_num; | |
1352 | 1229 |
1353 /* Expand all variables at this level. */ | 1230 /* Expand all variables at this level. */ |
1354 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | 1231 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) |
1355 if (TREE_USED (t)) | 1232 if (TREE_USED (t)) |
1356 size += expand_one_var (t, toplevel, false); | 1233 size += expand_one_var (t, toplevel, false); |
1357 | 1234 |
1358 this_sv_num = stack_vars_num; | |
1359 | |
1360 /* Expand all variables at containing levels. */ | 1235 /* Expand all variables at containing levels. */ |
1361 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | 1236 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) |
1362 size += account_used_vars_for_block (t, false); | 1237 size += account_used_vars_for_block (t, false); |
1363 | 1238 |
1364 /* Since we do not track exact variable lifetimes (which is not even | |
1365 possible for variables whose address escapes), we mirror the block | |
1366 tree in the interference graph. Here we cause all variables at this | |
1367 level, and all sublevels, to conflict. Do make certain that a | |
1368 variable conflicts with itself. */ | |
1369 if (old_sv_num < this_sv_num) | |
1370 { | |
1371 new_sv_num = stack_vars_num; | |
1372 resize_stack_vars_conflict (new_sv_num); | |
1373 | |
1374 for (i = old_sv_num; i < new_sv_num; ++i) | |
1375 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;) | |
1376 add_stack_var_conflict (i, j); | |
1377 } | |
1378 return size; | 1239 return size; |
1379 } | 1240 } |
1380 | 1241 |
1381 /* Prepare for expanding variables. */ | 1242 /* Prepare for expanding variables. */ |
1382 static void | 1243 static void |
1383 init_vars_expansion (void) | 1244 init_vars_expansion (void) |
1384 { | 1245 { |
1385 tree t; | 1246 tree t; |
1386 /* Set TREE_USED on all variables in the local_decls. */ | 1247 /* Set TREE_USED on all variables in the local_decls. */ |
1387 for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) | 1248 for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) |
1397 | 1258 |
1398 /* Free up stack variable graph data. */ | 1259 /* Free up stack variable graph data. */ |
1399 static void | 1260 static void |
1400 fini_vars_expansion (void) | 1261 fini_vars_expansion (void) |
1401 { | 1262 { |
1263 size_t i, n = stack_vars_num; | |
1264 for (i = 0; i < n; i++) | |
1265 BITMAP_FREE (stack_vars[i].conflicts); | |
1402 XDELETEVEC (stack_vars); | 1266 XDELETEVEC (stack_vars); |
1403 XDELETEVEC (stack_vars_sorted); | 1267 XDELETEVEC (stack_vars_sorted); |
1404 XDELETEVEC (stack_vars_conflict); | |
1405 stack_vars = NULL; | 1268 stack_vars = NULL; |
1406 stack_vars_alloc = stack_vars_num = 0; | 1269 stack_vars_alloc = stack_vars_num = 0; |
1407 stack_vars_conflict = NULL; | |
1408 stack_vars_conflict_alloc = 0; | |
1409 } | 1270 } |
1410 | 1271 |
1411 /* Make a fair guess for the size of the stack frame of the current | 1272 /* Make a fair guess for the size of the stack frame of the current |
1412 function. This doesn't have to be exact, the result is only used | 1273 function. This doesn't have to be exact, the result is only used |
1413 in the inline heuristics. So we don't want to run the full stack | 1274 in the inline heuristics. So we don't want to run the full stack |
1438 if (stack_vars_num > 0) | 1299 if (stack_vars_num > 0) |
1439 { | 1300 { |
1440 /* Fake sorting the stack vars for account_stack_vars (). */ | 1301 /* Fake sorting the stack vars for account_stack_vars (). */ |
1441 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | 1302 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); |
1442 for (i = 0; i < stack_vars_num; ++i) | 1303 for (i = 0; i < stack_vars_num; ++i) |
1443 stack_vars_sorted[i] = i; | 1304 stack_vars_sorted[i] = i; |
1444 size += account_stack_vars (); | 1305 size += account_stack_vars (); |
1445 fini_vars_expansion (); | 1306 fini_vars_expansion (); |
1446 } | 1307 } |
1447 | 1308 |
1448 return size; | 1309 return size; |
1452 | 1313 |
1453 static void | 1314 static void |
1454 expand_used_vars (void) | 1315 expand_used_vars (void) |
1455 { | 1316 { |
1456 tree t, next, outer_block = DECL_INITIAL (current_function_decl); | 1317 tree t, next, outer_block = DECL_INITIAL (current_function_decl); |
1318 unsigned i; | |
1457 | 1319 |
1458 /* Compute the phase of the stack frame for this function. */ | 1320 /* Compute the phase of the stack frame for this function. */ |
1459 { | 1321 { |
1460 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | 1322 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; |
1461 int off = STARTING_FRAME_OFFSET % align; | 1323 int off = STARTING_FRAME_OFFSET % align; |
1462 frame_phase = off ? align - off : 0; | 1324 frame_phase = off ? align - off : 0; |
1463 } | 1325 } |
1464 | 1326 |
1465 init_vars_expansion (); | 1327 init_vars_expansion (); |
1466 | 1328 |
1329 for (i = 0; i < SA.map->num_partitions; i++) | |
1330 { | |
1331 tree var = partition_to_var (SA.map, i); | |
1332 | |
1333 gcc_assert (is_gimple_reg (var)); | |
1334 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL) | |
1335 expand_one_var (var, true, true); | |
1336 else | |
1337 { | |
1338 /* This is a PARM_DECL or RESULT_DECL. For those partitions that | |
1339 contain the default def (representing the parm or result itself) | |
1340 we don't do anything here. But those which don't contain the | |
1341 default def (representing a temporary based on the parm/result) | |
1342 we need to allocate space just like for normal VAR_DECLs. */ | |
1343 if (!bitmap_bit_p (SA.partition_has_default_def, i)) | |
1344 { | |
1345 expand_one_var (var, true, true); | |
1346 gcc_assert (SA.partition_to_pseudo[i]); | |
1347 } | |
1348 } | |
1349 } | |
1350 | |
1467 /* At this point all variables on the local_decls with TREE_USED | 1351 /* At this point all variables on the local_decls with TREE_USED |
1468 set are not associated with any block scope. Lay them out. */ | 1352 set are not associated with any block scope. Lay them out. */ |
1469 t = cfun->local_decls; | 1353 t = cfun->local_decls; |
1470 cfun->local_decls = NULL_TREE; | 1354 cfun->local_decls = NULL_TREE; |
1471 for (; t; t = next) | 1355 for (; t; t = next) |
1473 tree var = TREE_VALUE (t); | 1357 tree var = TREE_VALUE (t); |
1474 bool expand_now = false; | 1358 bool expand_now = false; |
1475 | 1359 |
1476 next = TREE_CHAIN (t); | 1360 next = TREE_CHAIN (t); |
1477 | 1361 |
1362 /* Expanded above already. */ | |
1363 if (is_gimple_reg (var)) | |
1364 { | |
1365 TREE_USED (var) = 0; | |
1366 ggc_free (t); | |
1367 continue; | |
1368 } | |
1478 /* We didn't set a block for static or extern because it's hard | 1369 /* We didn't set a block for static or extern because it's hard |
1479 to tell the difference between a global variable (re)declared | 1370 to tell the difference between a global variable (re)declared |
1480 in a local scope, and one that's really declared there to | 1371 in a local scope, and one that's really declared there to |
1481 begin with. And it doesn't really matter much, since we're | 1372 begin with. And it doesn't really matter much, since we're |
1482 not giving them stack space. Expand them now. */ | 1373 not giving them stack space. Expand them now. */ |
1483 if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | 1374 else if (TREE_STATIC (var) || DECL_EXTERNAL (var)) |
1484 expand_now = true; | 1375 expand_now = true; |
1485 | |
1486 /* Any variable that could have been hoisted into an SSA_NAME | |
1487 will have been propagated anywhere the optimizers chose, | |
1488 i.e. not confined to their original block. Allocate them | |
1489 as if they were defined in the outermost scope. */ | |
1490 else if (is_gimple_reg (var)) | |
1491 expand_now = true; | |
1492 | 1376 |
1493 /* If the variable is not associated with any block, then it | 1377 /* If the variable is not associated with any block, then it |
1494 was created by the optimizers, and could be live anywhere | 1378 was created by the optimizers, and could be live anywhere |
1495 in the function. */ | 1379 in the function. */ |
1496 else if (TREE_USED (var)) | 1380 else if (TREE_USED (var)) |
1497 expand_now = true; | 1381 expand_now = true; |
1498 | 1382 |
1499 /* Finally, mark all variables on the list as used. We'll use | 1383 /* Finally, mark all variables on the list as used. We'll use |
1500 this in a moment when we expand those associated with scopes. */ | 1384 this in a moment when we expand those associated with scopes. */ |
1501 TREE_USED (var) = 1; | 1385 TREE_USED (var) = 1; |
1502 | 1386 |
1503 if (expand_now) | 1387 if (expand_now) |
1504 { | 1388 { |
1505 expand_one_var (var, true, true); | 1389 expand_one_var (var, true, true); |
1506 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var)) | 1390 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var)) |
1507 { | 1391 { |
1508 rtx rtl = DECL_RTL_IF_SET (var); | 1392 rtx rtl = DECL_RTL_IF_SET (var); |
1509 | 1393 |
1510 /* Keep artificial non-ignored vars in cfun->local_decls | 1394 /* Keep artificial non-ignored vars in cfun->local_decls |
1511 chain until instantiate_decls. */ | 1395 chain until instantiate_decls. */ |
1512 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) | 1396 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) |
1513 { | 1397 { |
1514 TREE_CHAIN (t) = cfun->local_decls; | 1398 TREE_CHAIN (t) = cfun->local_decls; |
1515 cfun->local_decls = t; | 1399 cfun->local_decls = t; |
1516 continue; | 1400 continue; |
1517 } | 1401 } |
1518 } | 1402 } |
1519 } | 1403 } |
1520 | 1404 |
1521 ggc_free (t); | 1405 ggc_free (t); |
1522 } | 1406 } |
1523 | 1407 |
1524 /* At this point, all variables within the block tree with TREE_USED | 1408 /* At this point, all variables within the block tree with TREE_USED |
1526 expand_used_vars_for_block (outer_block, true); | 1410 expand_used_vars_for_block (outer_block, true); |
1527 | 1411 |
1528 if (stack_vars_num > 0) | 1412 if (stack_vars_num > 0) |
1529 { | 1413 { |
1530 /* Due to the way alias sets work, no variables with non-conflicting | 1414 /* Due to the way alias sets work, no variables with non-conflicting |
1531 alias sets may be assigned the same address. Add conflicts to | 1415 alias sets may be assigned the same address. Add conflicts to |
1532 reflect this. */ | 1416 reflect this. */ |
1533 add_alias_set_conflicts (); | 1417 add_alias_set_conflicts (); |
1534 | 1418 |
1535 /* If stack protection is enabled, we don't share space between | 1419 /* If stack protection is enabled, we don't share space between |
1536 vulnerable data and non-vulnerable data. */ | 1420 vulnerable data and non-vulnerable data. */ |
1537 if (flag_stack_protect) | 1421 if (flag_stack_protect) |
1538 add_stack_protection_conflicts (); | 1422 add_stack_protection_conflicts (); |
1539 | 1423 |
1540 /* Now that we have collected all stack variables, and have computed a | 1424 /* Now that we have collected all stack variables, and have computed a |
1541 minimal interference graph, attempt to save some stack space. */ | 1425 minimal interference graph, attempt to save some stack space. */ |
1542 partition_stack_vars (); | 1426 partition_stack_vars (); |
1543 if (dump_file) | 1427 if (dump_file) |
1544 dump_stack_var_partition (); | 1428 dump_stack_var_partition (); |
1545 } | 1429 } |
1546 | 1430 |
1547 /* There are several conditions under which we should create a | 1431 /* There are several conditions under which we should create a |
1548 stack guard: protect-all, alloca used, protected decls present. */ | 1432 stack guard: protect-all, alloca used, protected decls present. */ |
1549 if (flag_stack_protect == 2 | 1433 if (flag_stack_protect == 2 |
1550 || (flag_stack_protect | 1434 || (flag_stack_protect |
1551 && (cfun->calls_alloca || has_protected_decls))) | 1435 && (cfun->calls_alloca || has_protected_decls))) |
1552 create_stack_guard (); | 1436 create_stack_guard (); |
1553 | 1437 |
1554 /* Assign rtl to each variable based on these partitions. */ | 1438 /* Assign rtl to each variable based on these partitions. */ |
1555 if (stack_vars_num > 0) | 1439 if (stack_vars_num > 0) |
1556 { | 1440 { |
1557 /* Reorder decls to be protected by iterating over the variables | 1441 /* Reorder decls to be protected by iterating over the variables |
1558 array multiple times, and allocating out of each phase in turn. */ | 1442 array multiple times, and allocating out of each phase in turn. */ |
1559 /* ??? We could probably integrate this into the qsort we did | 1443 /* ??? We could probably integrate this into the qsort we did |
1560 earlier, such that we naturally see these variables first, | 1444 earlier, such that we naturally see these variables first, |
1561 and thus naturally allocate things in the right order. */ | 1445 and thus naturally allocate things in the right order. */ |
1562 if (has_protected_decls) | 1446 if (has_protected_decls) |
1563 { | 1447 { |
1564 /* Phase 1 contains only character arrays. */ | 1448 /* Phase 1 contains only character arrays. */ |
1565 expand_stack_vars (stack_protect_decl_phase_1); | 1449 expand_stack_vars (stack_protect_decl_phase_1); |
1566 | 1450 |
1567 /* Phase 2 contains other kinds of arrays. */ | 1451 /* Phase 2 contains other kinds of arrays. */ |
1568 if (flag_stack_protect == 2) | 1452 if (flag_stack_protect == 2) |
1569 expand_stack_vars (stack_protect_decl_phase_2); | 1453 expand_stack_vars (stack_protect_decl_phase_2); |
1570 } | 1454 } |
1571 | 1455 |
1572 expand_stack_vars (NULL); | 1456 expand_stack_vars (NULL); |
1573 | 1457 |
1574 fini_vars_expansion (); | 1458 fini_vars_expansion (); |
1575 } | 1459 } |
1577 /* If the target requires that FRAME_OFFSET be aligned, do it. */ | 1461 /* If the target requires that FRAME_OFFSET be aligned, do it. */ |
1578 if (STACK_ALIGNMENT_NEEDED) | 1462 if (STACK_ALIGNMENT_NEEDED) |
1579 { | 1463 { |
1580 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | 1464 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; |
1581 if (!FRAME_GROWS_DOWNWARD) | 1465 if (!FRAME_GROWS_DOWNWARD) |
1582 frame_offset += align - 1; | 1466 frame_offset += align - 1; |
1583 frame_offset &= -align; | 1467 frame_offset &= -align; |
1584 } | 1468 } |
1585 } | 1469 } |
1586 | 1470 |
1587 | 1471 |
1593 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since) | 1477 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since) |
1594 { | 1478 { |
1595 if (dump_file && (dump_flags & TDF_DETAILS)) | 1479 if (dump_file && (dump_flags & TDF_DETAILS)) |
1596 { | 1480 { |
1597 fprintf (dump_file, "\n;; "); | 1481 fprintf (dump_file, "\n;; "); |
1598 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | 1482 print_gimple_stmt (dump_file, stmt, 0, |
1483 TDF_SLIM | (dump_flags & TDF_LINENO)); | |
1599 fprintf (dump_file, "\n"); | 1484 fprintf (dump_file, "\n"); |
1600 | 1485 |
1601 print_rtl (dump_file, since ? NEXT_INSN (since) : since); | 1486 print_rtl (dump_file, since ? NEXT_INSN (since) : since); |
1602 } | 1487 } |
1603 } | 1488 } |
1622 elt = pointer_map_contains (lab_rtx_for_bb, bb); | 1507 elt = pointer_map_contains (lab_rtx_for_bb, bb); |
1623 if (elt) | 1508 if (elt) |
1624 return (rtx) *elt; | 1509 return (rtx) *elt; |
1625 | 1510 |
1626 /* Find the tree label if it is present. */ | 1511 /* Find the tree label if it is present. */ |
1627 | 1512 |
1628 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | 1513 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
1629 { | 1514 { |
1630 lab_stmt = gsi_stmt (gsi); | 1515 lab_stmt = gsi_stmt (gsi); |
1631 if (gimple_code (lab_stmt) != GIMPLE_LABEL) | 1516 if (gimple_code (lab_stmt) != GIMPLE_LABEL) |
1632 break; | 1517 break; |
1633 | 1518 |
1634 lab = gimple_label_label (lab_stmt); | 1519 lab = gimple_label_label (lab_stmt); |
1635 if (DECL_NONLOCAL (lab)) | 1520 if (DECL_NONLOCAL (lab)) |
1636 break; | 1521 break; |
1637 | 1522 |
1638 return label_rtx (lab); | 1523 return label_rtx (lab); |
1639 } | 1524 } |
1640 | 1525 |
1641 elt = pointer_map_insert (lab_rtx_for_bb, bb); | 1526 elt = pointer_map_insert (lab_rtx_for_bb, bb); |
1642 *elt = gen_label_rtx (); | 1527 *elt = gen_label_rtx (); |
1643 return (rtx) *elt; | 1528 return (rtx) *elt; |
1644 } | 1529 } |
1645 | 1530 |
1531 | |
1532 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge | |
1533 of a basic block where we just expanded the conditional at the end, | |
1534 possibly clean up the CFG and instruction sequence. LAST is the | |
1535 last instruction before the just emitted jump sequence. */ | |
1536 | |
1537 static void | |
1538 maybe_cleanup_end_of_block (edge e, rtx last) | |
1539 { | |
1540 /* Special case: when jumpif decides that the condition is | |
1541 trivial it emits an unconditional jump (and the necessary | |
1542 barrier). But we still have two edges, the fallthru one is | |
1543 wrong. purge_dead_edges would clean this up later. Unfortunately | |
1544 we have to insert insns (and split edges) before | |
1545 find_many_sub_basic_blocks and hence before purge_dead_edges. | |
1546 But splitting edges might create new blocks which depend on the | |
1547 fact that if there are two edges there's no barrier. So the | |
1548 barrier would get lost and verify_flow_info would ICE. Instead | |
1549 of auditing all edge splitters to care for the barrier (which | |
1550 normally isn't there in a cleaned CFG), fix it here. */ | |
1551 if (BARRIER_P (get_last_insn ())) | |
1552 { | |
1553 rtx insn; | |
1554 remove_edge (e); | |
1555 /* Now, we have a single successor block, if we have insns to | |
1556 insert on the remaining edge we potentially will insert | |
1557 it at the end of this block (if the dest block isn't feasible) | |
1558 in order to avoid splitting the edge. This insertion will take | |
1559 place in front of the last jump. But we might have emitted | |
1560 multiple jumps (conditional and one unconditional) to the | |
1561 same destination. Inserting in front of the last one then | |
1562 is a problem. See PR 40021. We fix this by deleting all | |
1563 jumps except the last unconditional one. */ | |
1564 insn = PREV_INSN (get_last_insn ()); | |
1565 /* Make sure we have an unconditional jump. Otherwise we're | |
1566 confused. */ | |
1567 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn)); | |
1568 for (insn = PREV_INSN (insn); insn != last;) | |
1569 { | |
1570 insn = PREV_INSN (insn); | |
1571 if (JUMP_P (NEXT_INSN (insn))) | |
1572 delete_insn (NEXT_INSN (insn)); | |
1573 } | |
1574 } | |
1575 } | |
1646 | 1576 |
1647 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND. | 1577 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND. |
1648 Returns a new basic block if we've terminated the current basic | 1578 Returns a new basic block if we've terminated the current basic |
1649 block and created a new one. */ | 1579 block and created a new one. */ |
1650 | 1580 |
1653 { | 1583 { |
1654 basic_block new_bb, dest; | 1584 basic_block new_bb, dest; |
1655 edge new_edge; | 1585 edge new_edge; |
1656 edge true_edge; | 1586 edge true_edge; |
1657 edge false_edge; | 1587 edge false_edge; |
1658 tree pred = gimple_cond_pred_to_tree (stmt); | |
1659 rtx last2, last; | 1588 rtx last2, last; |
1589 enum tree_code code; | |
1590 tree op0, op1; | |
1591 | |
1592 code = gimple_cond_code (stmt); | |
1593 op0 = gimple_cond_lhs (stmt); | |
1594 op1 = gimple_cond_rhs (stmt); | |
1595 /* We're sometimes presented with such code: | |
1596 D.123_1 = x < y; | |
1597 if (D.123_1 != 0) | |
1598 ... | |
1599 This would expand to two comparisons which then later might | |
1600 be cleaned up by combine. But some pattern matchers like if-conversion | |
1601 work better when there's only one compare, so make up for this | |
1602 here as special exception if TER would have made the same change. */ | |
1603 if (gimple_cond_single_var_p (stmt) | |
1604 && SA.values | |
1605 && TREE_CODE (op0) == SSA_NAME | |
1606 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0))) | |
1607 { | |
1608 gimple second = SSA_NAME_DEF_STMT (op0); | |
1609 if (gimple_code (second) == GIMPLE_ASSIGN) | |
1610 { | |
1611 enum tree_code code2 = gimple_assign_rhs_code (second); | |
1612 if (TREE_CODE_CLASS (code2) == tcc_comparison) | |
1613 { | |
1614 code = code2; | |
1615 op0 = gimple_assign_rhs1 (second); | |
1616 op1 = gimple_assign_rhs2 (second); | |
1617 } | |
1618 /* If jumps are cheap turn some more codes into | |
1619 jumpy sequences. */ | |
1620 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4) | |
1621 { | |
1622 if ((code2 == BIT_AND_EXPR | |
1623 && TYPE_PRECISION (TREE_TYPE (op0)) == 1 | |
1624 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST) | |
1625 || code2 == TRUTH_AND_EXPR) | |
1626 { | |
1627 code = TRUTH_ANDIF_EXPR; | |
1628 op0 = gimple_assign_rhs1 (second); | |
1629 op1 = gimple_assign_rhs2 (second); | |
1630 } | |
1631 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR) | |
1632 { | |
1633 code = TRUTH_ORIF_EXPR; | |
1634 op0 = gimple_assign_rhs1 (second); | |
1635 op1 = gimple_assign_rhs2 (second); | |
1636 } | |
1637 } | |
1638 } | |
1639 } | |
1660 | 1640 |
1661 last2 = last = get_last_insn (); | 1641 last2 = last = get_last_insn (); |
1662 | 1642 |
1663 extract_true_false_edges_from_block (bb, &true_edge, &false_edge); | 1643 extract_true_false_edges_from_block (bb, &true_edge, &false_edge); |
1664 if (gimple_has_location (stmt)) | 1644 if (gimple_has_location (stmt)) |
1673 | 1653 |
1674 /* We can either have a pure conditional jump with one fallthru edge or | 1654 /* We can either have a pure conditional jump with one fallthru edge or |
1675 two-way jump that needs to be decomposed into two basic blocks. */ | 1655 two-way jump that needs to be decomposed into two basic blocks. */ |
1676 if (false_edge->dest == bb->next_bb) | 1656 if (false_edge->dest == bb->next_bb) |
1677 { | 1657 { |
1678 jumpif (pred, label_rtx_for_bb (true_edge->dest)); | 1658 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); |
1679 add_reg_br_prob_note (last, true_edge->probability); | 1659 add_reg_br_prob_note (last, true_edge->probability); |
1680 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 1660 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
1681 if (true_edge->goto_locus) | 1661 if (true_edge->goto_locus) |
1682 { | 1662 { |
1683 set_curr_insn_source_location (true_edge->goto_locus); | 1663 set_curr_insn_source_location (true_edge->goto_locus); |
1684 set_curr_insn_block (true_edge->goto_block); | 1664 set_curr_insn_block (true_edge->goto_block); |
1685 true_edge->goto_locus = curr_insn_locator (); | 1665 true_edge->goto_locus = curr_insn_locator (); |
1686 } | 1666 } |
1687 true_edge->goto_block = NULL; | 1667 true_edge->goto_block = NULL; |
1688 false_edge->flags |= EDGE_FALLTHRU; | 1668 false_edge->flags |= EDGE_FALLTHRU; |
1689 ggc_free (pred); | 1669 maybe_cleanup_end_of_block (false_edge, last); |
1690 return NULL; | 1670 return NULL; |
1691 } | 1671 } |
1692 if (true_edge->dest == bb->next_bb) | 1672 if (true_edge->dest == bb->next_bb) |
1693 { | 1673 { |
1694 jumpifnot (pred, label_rtx_for_bb (false_edge->dest)); | 1674 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest)); |
1695 add_reg_br_prob_note (last, false_edge->probability); | 1675 add_reg_br_prob_note (last, false_edge->probability); |
1696 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 1676 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
1697 if (false_edge->goto_locus) | 1677 if (false_edge->goto_locus) |
1698 { | 1678 { |
1699 set_curr_insn_source_location (false_edge->goto_locus); | 1679 set_curr_insn_source_location (false_edge->goto_locus); |
1700 set_curr_insn_block (false_edge->goto_block); | 1680 set_curr_insn_block (false_edge->goto_block); |
1701 false_edge->goto_locus = curr_insn_locator (); | 1681 false_edge->goto_locus = curr_insn_locator (); |
1702 } | 1682 } |
1703 false_edge->goto_block = NULL; | 1683 false_edge->goto_block = NULL; |
1704 true_edge->flags |= EDGE_FALLTHRU; | 1684 true_edge->flags |= EDGE_FALLTHRU; |
1705 ggc_free (pred); | 1685 maybe_cleanup_end_of_block (true_edge, last); |
1706 return NULL; | 1686 return NULL; |
1707 } | 1687 } |
1708 | 1688 |
1709 jumpif (pred, label_rtx_for_bb (true_edge->dest)); | 1689 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); |
1710 add_reg_br_prob_note (last, true_edge->probability); | 1690 add_reg_br_prob_note (last, true_edge->probability); |
1711 last = get_last_insn (); | 1691 last = get_last_insn (); |
1712 if (false_edge->goto_locus) | 1692 if (false_edge->goto_locus) |
1713 { | 1693 { |
1714 set_curr_insn_source_location (false_edge->goto_locus); | 1694 set_curr_insn_source_location (false_edge->goto_locus); |
1744 set_curr_insn_block (true_edge->goto_block); | 1724 set_curr_insn_block (true_edge->goto_block); |
1745 true_edge->goto_locus = curr_insn_locator (); | 1725 true_edge->goto_locus = curr_insn_locator (); |
1746 } | 1726 } |
1747 true_edge->goto_block = NULL; | 1727 true_edge->goto_block = NULL; |
1748 | 1728 |
1749 ggc_free (pred); | |
1750 return new_bb; | 1729 return new_bb; |
1730 } | |
1731 | |
1732 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL | |
1733 statement STMT. */ | |
1734 | |
1735 static void | |
1736 expand_call_stmt (gimple stmt) | |
1737 { | |
1738 tree exp; | |
1739 tree lhs = gimple_call_lhs (stmt); | |
1740 size_t i; | |
1741 | |
1742 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); | |
1743 | |
1744 CALL_EXPR_FN (exp) = gimple_call_fn (stmt); | |
1745 TREE_TYPE (exp) = gimple_call_return_type (stmt); | |
1746 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt); | |
1747 | |
1748 for (i = 0; i < gimple_call_num_args (stmt); i++) | |
1749 CALL_EXPR_ARG (exp, i) = gimple_call_arg (stmt, i); | |
1750 | |
1751 if (gimple_has_side_effects (stmt)) | |
1752 TREE_SIDE_EFFECTS (exp) = 1; | |
1753 | |
1754 if (gimple_call_nothrow_p (stmt)) | |
1755 TREE_NOTHROW (exp) = 1; | |
1756 | |
1757 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt); | |
1758 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt); | |
1759 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt); | |
1760 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt); | |
1761 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt); | |
1762 SET_EXPR_LOCATION (exp, gimple_location (stmt)); | |
1763 TREE_BLOCK (exp) = gimple_block (stmt); | |
1764 | |
1765 if (lhs) | |
1766 expand_assignment (lhs, exp, false); | |
1767 else | |
1768 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL); | |
1769 } | |
1770 | |
1771 /* A subroutine of expand_gimple_stmt, expanding one gimple statement | |
1772 STMT that doesn't require special handling for outgoing edges. That | |
1773 is no tailcalls and no GIMPLE_COND. */ | |
1774 | |
1775 static void | |
1776 expand_gimple_stmt_1 (gimple stmt) | |
1777 { | |
1778 tree op0; | |
1779 switch (gimple_code (stmt)) | |
1780 { | |
1781 case GIMPLE_GOTO: | |
1782 op0 = gimple_goto_dest (stmt); | |
1783 if (TREE_CODE (op0) == LABEL_DECL) | |
1784 expand_goto (op0); | |
1785 else | |
1786 expand_computed_goto (op0); | |
1787 break; | |
1788 case GIMPLE_LABEL: | |
1789 expand_label (gimple_label_label (stmt)); | |
1790 break; | |
1791 case GIMPLE_NOP: | |
1792 case GIMPLE_PREDICT: | |
1793 break; | |
1794 case GIMPLE_SWITCH: | |
1795 expand_case (stmt); | |
1796 break; | |
1797 case GIMPLE_ASM: | |
1798 expand_asm_stmt (stmt); | |
1799 break; | |
1800 case GIMPLE_CALL: | |
1801 expand_call_stmt (stmt); | |
1802 break; | |
1803 | |
1804 case GIMPLE_RETURN: | |
1805 op0 = gimple_return_retval (stmt); | |
1806 | |
1807 if (op0 && op0 != error_mark_node) | |
1808 { | |
1809 tree result = DECL_RESULT (current_function_decl); | |
1810 | |
1811 /* If we are not returning the current function's RESULT_DECL, | |
1812 build an assignment to it. */ | |
1813 if (op0 != result) | |
1814 { | |
1815 /* I believe that a function's RESULT_DECL is unique. */ | |
1816 gcc_assert (TREE_CODE (op0) != RESULT_DECL); | |
1817 | |
1818 /* ??? We'd like to use simply expand_assignment here, | |
1819 but this fails if the value is of BLKmode but the return | |
1820 decl is a register. expand_return has special handling | |
1821 for this combination, which eventually should move | |
1822 to common code. See comments there. Until then, let's | |
1823 build a modify expression :-/ */ | |
1824 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result), | |
1825 result, op0); | |
1826 } | |
1827 } | |
1828 if (!op0) | |
1829 expand_null_return (); | |
1830 else | |
1831 expand_return (op0); | |
1832 break; | |
1833 | |
1834 case GIMPLE_ASSIGN: | |
1835 { | |
1836 tree lhs = gimple_assign_lhs (stmt); | |
1837 | |
1838 /* Tree expand used to fiddle with |= and &= of two bitfield | |
1839 COMPONENT_REFs here. This can't happen with gimple, the LHS | |
1840 of binary assigns must be a gimple reg. */ | |
1841 | |
1842 if (TREE_CODE (lhs) != SSA_NAME | |
1843 || get_gimple_rhs_class (gimple_expr_code (stmt)) | |
1844 == GIMPLE_SINGLE_RHS) | |
1845 { | |
1846 tree rhs = gimple_assign_rhs1 (stmt); | |
1847 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt)) | |
1848 == GIMPLE_SINGLE_RHS); | |
1849 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)) | |
1850 SET_EXPR_LOCATION (rhs, gimple_location (stmt)); | |
1851 expand_assignment (lhs, rhs, | |
1852 gimple_assign_nontemporal_move_p (stmt)); | |
1853 } | |
1854 else | |
1855 { | |
1856 rtx target, temp; | |
1857 bool nontemporal = gimple_assign_nontemporal_move_p (stmt); | |
1858 struct separate_ops ops; | |
1859 bool promoted = false; | |
1860 | |
1861 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE); | |
1862 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) | |
1863 promoted = true; | |
1864 | |
1865 ops.code = gimple_assign_rhs_code (stmt); | |
1866 ops.type = TREE_TYPE (lhs); | |
1867 switch (get_gimple_rhs_class (gimple_expr_code (stmt))) | |
1868 { | |
1869 case GIMPLE_BINARY_RHS: | |
1870 ops.op1 = gimple_assign_rhs2 (stmt); | |
1871 /* Fallthru */ | |
1872 case GIMPLE_UNARY_RHS: | |
1873 ops.op0 = gimple_assign_rhs1 (stmt); | |
1874 break; | |
1875 default: | |
1876 gcc_unreachable (); | |
1877 } | |
1878 ops.location = gimple_location (stmt); | |
1879 | |
1880 /* If we want to use a nontemporal store, force the value to | |
1881 register first. If we store into a promoted register, | |
1882 don't directly expand to target. */ | |
1883 temp = nontemporal || promoted ? NULL_RTX : target; | |
1884 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target), | |
1885 EXPAND_NORMAL); | |
1886 | |
1887 if (temp == target) | |
1888 ; | |
1889 else if (promoted) | |
1890 { | |
1891 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target); | |
1892 /* If TEMP is a VOIDmode constant, use convert_modes to make | |
1893 sure that we properly convert it. */ | |
1894 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | |
1895 { | |
1896 temp = convert_modes (GET_MODE (target), | |
1897 TYPE_MODE (ops.type), | |
1898 temp, unsignedp); | |
1899 temp = convert_modes (GET_MODE (SUBREG_REG (target)), | |
1900 GET_MODE (target), temp, unsignedp); | |
1901 } | |
1902 | |
1903 convert_move (SUBREG_REG (target), temp, unsignedp); | |
1904 } | |
1905 else if (nontemporal && emit_storent_insn (target, temp)) | |
1906 ; | |
1907 else | |
1908 { | |
1909 temp = force_operand (temp, target); | |
1910 if (temp != target) | |
1911 emit_move_insn (target, temp); | |
1912 } | |
1913 } | |
1914 } | |
1915 break; | |
1916 | |
1917 default: | |
1918 gcc_unreachable (); | |
1919 } | |
1920 } | |
1921 | |
1922 /* Expand one gimple statement STMT and return the last RTL instruction | |
1923 before any of the newly generated ones. | |
1924 | |
1925 In addition to generating the necessary RTL instructions this also | |
1926 sets REG_EH_REGION notes if necessary and sets the current source | |
1927 location for diagnostics. */ | |
1928 | |
1929 static rtx | |
1930 expand_gimple_stmt (gimple stmt) | |
1931 { | |
1932 int lp_nr = 0; | |
1933 rtx last = NULL; | |
1934 location_t saved_location = input_location; | |
1935 | |
1936 last = get_last_insn (); | |
1937 | |
1938 /* If this is an expression of some kind and it has an associated line | |
1939 number, then emit the line number before expanding the expression. | |
1940 | |
1941 We need to save and restore the file and line information so that | |
1942 errors discovered during expansion are emitted with the right | |
1943 information. It would be better of the diagnostic routines | |
1944 used the file/line information embedded in the tree nodes rather | |
1945 than globals. */ | |
1946 gcc_assert (cfun); | |
1947 | |
1948 if (gimple_has_location (stmt)) | |
1949 { | |
1950 input_location = gimple_location (stmt); | |
1951 set_curr_insn_source_location (input_location); | |
1952 | |
1953 /* Record where the insns produced belong. */ | |
1954 set_curr_insn_block (gimple_block (stmt)); | |
1955 } | |
1956 | |
1957 expand_gimple_stmt_1 (stmt); | |
1958 /* Free any temporaries used to evaluate this statement. */ | |
1959 free_temp_slots (); | |
1960 | |
1961 input_location = saved_location; | |
1962 | |
1963 /* Mark all insns that may trap. */ | |
1964 lp_nr = lookup_stmt_eh_lp (stmt); | |
1965 if (lp_nr) | |
1966 { | |
1967 rtx insn; | |
1968 for (insn = next_real_insn (last); insn; | |
1969 insn = next_real_insn (insn)) | |
1970 { | |
1971 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) | |
1972 /* If we want exceptions for non-call insns, any | |
1973 may_trap_p instruction may throw. */ | |
1974 && GET_CODE (PATTERN (insn)) != CLOBBER | |
1975 && GET_CODE (PATTERN (insn)) != USE | |
1976 && insn_could_throw_p (insn)) | |
1977 make_reg_eh_region_note (insn, 0, lp_nr); | |
1978 } | |
1979 } | |
1980 | |
1981 return last; | |
1751 } | 1982 } |
1752 | 1983 |
1753 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL | 1984 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL |
1754 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually | 1985 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually |
1755 generated a tail call (something that might be denied by the ABI | 1986 generated a tail call (something that might be denied by the ABI |
1766 rtx last2, last; | 1997 rtx last2, last; |
1767 edge e; | 1998 edge e; |
1768 edge_iterator ei; | 1999 edge_iterator ei; |
1769 int probability; | 2000 int probability; |
1770 gcov_type count; | 2001 gcov_type count; |
1771 tree stmt_tree = gimple_to_tree (stmt); | 2002 |
1772 | 2003 last2 = last = expand_gimple_stmt (stmt); |
1773 last2 = last = get_last_insn (); | |
1774 | |
1775 expand_expr_stmt (stmt_tree); | |
1776 | |
1777 release_stmt_tree (stmt, stmt_tree); | |
1778 | 2004 |
1779 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last)) | 2005 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last)) |
1780 if (CALL_P (last) && SIBLING_CALL_P (last)) | 2006 if (CALL_P (last) && SIBLING_CALL_P (last)) |
1781 goto found; | 2007 goto found; |
1782 | 2008 |
1801 count = 0; | 2027 count = 0; |
1802 | 2028 |
1803 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | 2029 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
1804 { | 2030 { |
1805 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) | 2031 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) |
1806 { | 2032 { |
1807 if (e->dest != EXIT_BLOCK_PTR) | 2033 if (e->dest != EXIT_BLOCK_PTR) |
1808 { | 2034 { |
1809 e->dest->count -= e->count; | 2035 e->dest->count -= e->count; |
1810 e->dest->frequency -= EDGE_FREQUENCY (e); | 2036 e->dest->frequency -= EDGE_FREQUENCY (e); |
1811 if (e->dest->count < 0) | 2037 if (e->dest->count < 0) |
1812 e->dest->count = 0; | 2038 e->dest->count = 0; |
1813 if (e->dest->frequency < 0) | 2039 if (e->dest->frequency < 0) |
1814 e->dest->frequency = 0; | 2040 e->dest->frequency = 0; |
1815 } | 2041 } |
1816 count += e->count; | 2042 count += e->count; |
1817 probability += e->probability; | 2043 probability += e->probability; |
1818 remove_edge (e); | 2044 remove_edge (e); |
1819 } | 2045 } |
1820 else | 2046 else |
1821 ei_next (&ei); | 2047 ei_next (&ei); |
1822 } | 2048 } |
1823 | 2049 |
1824 /* This is somewhat ugly: the call_expr expander often emits instructions | 2050 /* This is somewhat ugly: the call_expr expander often emits instructions |
1825 after the sibcall (to perform the function return). These confuse the | 2051 after the sibcall (to perform the function return). These confuse the |
1826 find_many_sub_basic_blocks code, so we need to get rid of these. */ | 2052 find_many_sub_basic_blocks code, so we need to get rid of these. */ |
1829 | 2055 |
1830 *can_fallthru = false; | 2056 *can_fallthru = false; |
1831 while (NEXT_INSN (last)) | 2057 while (NEXT_INSN (last)) |
1832 { | 2058 { |
1833 /* For instance an sqrt builtin expander expands if with | 2059 /* For instance an sqrt builtin expander expands if with |
1834 sibcall in the then and label for `else`. */ | 2060 sibcall in the then and label for `else`. */ |
1835 if (LABEL_P (NEXT_INSN (last))) | 2061 if (LABEL_P (NEXT_INSN (last))) |
1836 { | 2062 { |
1837 *can_fallthru = true; | 2063 *can_fallthru = true; |
1838 break; | 2064 break; |
1839 } | 2065 } |
1840 delete_insn (NEXT_INSN (last)); | 2066 delete_insn (NEXT_INSN (last)); |
1841 } | 2067 } |
1842 | 2068 |
1843 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL); | 2069 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL); |
1844 e->probability += probability; | 2070 e->probability += probability; |
1850 { | 2076 { |
1851 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); | 2077 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); |
1852 | 2078 |
1853 last = BB_END (bb); | 2079 last = BB_END (bb); |
1854 if (BARRIER_P (last)) | 2080 if (BARRIER_P (last)) |
1855 BB_END (bb) = PREV_INSN (last); | 2081 BB_END (bb) = PREV_INSN (last); |
1856 } | 2082 } |
1857 | 2083 |
1858 maybe_dump_rtl_for_gimple_stmt (stmt, last2); | 2084 maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
1859 | 2085 |
1860 return bb; | 2086 return bb; |
2087 } | |
2088 | |
2089 /* Return the difference between the floor and the truncated result of | |
2090 a signed division by OP1 with remainder MOD. */ | |
2091 static rtx | |
2092 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2093 { | |
2094 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */ | |
2095 return gen_rtx_IF_THEN_ELSE | |
2096 (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2097 gen_rtx_IF_THEN_ELSE | |
2098 (mode, gen_rtx_LT (BImode, | |
2099 gen_rtx_DIV (mode, op1, mod), | |
2100 const0_rtx), | |
2101 constm1_rtx, const0_rtx), | |
2102 const0_rtx); | |
2103 } | |
2104 | |
2105 /* Return the difference between the ceil and the truncated result of | |
2106 a signed division by OP1 with remainder MOD. */ | |
2107 static rtx | |
2108 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2109 { | |
2110 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */ | |
2111 return gen_rtx_IF_THEN_ELSE | |
2112 (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2113 gen_rtx_IF_THEN_ELSE | |
2114 (mode, gen_rtx_GT (BImode, | |
2115 gen_rtx_DIV (mode, op1, mod), | |
2116 const0_rtx), | |
2117 const1_rtx, const0_rtx), | |
2118 const0_rtx); | |
2119 } | |
2120 | |
2121 /* Return the difference between the ceil and the truncated result of | |
2122 an unsigned division by OP1 with remainder MOD. */ | |
2123 static rtx | |
2124 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED) | |
2125 { | |
2126 /* (mod != 0 ? 1 : 0) */ | |
2127 return gen_rtx_IF_THEN_ELSE | |
2128 (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2129 const1_rtx, const0_rtx); | |
2130 } | |
2131 | |
2132 /* Return the difference between the rounded and the truncated result | |
2133 of a signed division by OP1 with remainder MOD. Halfway cases are | |
2134 rounded away from zero, rather than to the nearest even number. */ | |
2135 static rtx | |
2136 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2137 { | |
2138 /* (abs (mod) >= abs (op1) - abs (mod) | |
2139 ? (op1 / mod > 0 ? 1 : -1) | |
2140 : 0) */ | |
2141 return gen_rtx_IF_THEN_ELSE | |
2142 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod), | |
2143 gen_rtx_MINUS (mode, | |
2144 gen_rtx_ABS (mode, op1), | |
2145 gen_rtx_ABS (mode, mod))), | |
2146 gen_rtx_IF_THEN_ELSE | |
2147 (mode, gen_rtx_GT (BImode, | |
2148 gen_rtx_DIV (mode, op1, mod), | |
2149 const0_rtx), | |
2150 const1_rtx, constm1_rtx), | |
2151 const0_rtx); | |
2152 } | |
2153 | |
2154 /* Return the difference between the rounded and the truncated result | |
2155 of a unsigned division by OP1 with remainder MOD. Halfway cases | |
2156 are rounded away from zero, rather than to the nearest even | |
2157 number. */ | |
2158 static rtx | |
2159 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2160 { | |
2161 /* (mod >= op1 - mod ? 1 : 0) */ | |
2162 return gen_rtx_IF_THEN_ELSE | |
2163 (mode, gen_rtx_GE (BImode, mod, | |
2164 gen_rtx_MINUS (mode, op1, mod)), | |
2165 const1_rtx, const0_rtx); | |
2166 } | |
2167 | |
2168 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting | |
2169 any rtl. */ | |
2170 | |
2171 static rtx | |
2172 convert_debug_memory_address (enum machine_mode mode, rtx x) | |
2173 { | |
2174 enum machine_mode xmode = GET_MODE (x); | |
2175 | |
2176 #ifndef POINTERS_EXTEND_UNSIGNED | |
2177 gcc_assert (mode == Pmode); | |
2178 gcc_assert (xmode == mode || xmode == VOIDmode); | |
2179 #else | |
2180 gcc_assert (mode == Pmode || mode == ptr_mode); | |
2181 | |
2182 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode) | |
2183 return x; | |
2184 | |
2185 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode)) | |
2186 x = simplify_gen_subreg (mode, x, xmode, | |
2187 subreg_lowpart_offset | |
2188 (mode, xmode)); | |
2189 else if (POINTERS_EXTEND_UNSIGNED > 0) | |
2190 x = gen_rtx_ZERO_EXTEND (mode, x); | |
2191 else if (!POINTERS_EXTEND_UNSIGNED) | |
2192 x = gen_rtx_SIGN_EXTEND (mode, x); | |
2193 else | |
2194 gcc_unreachable (); | |
2195 #endif /* POINTERS_EXTEND_UNSIGNED */ | |
2196 | |
2197 return x; | |
2198 } | |
2199 | |
2200 /* Return an RTX equivalent to the value of the tree expression | |
2201 EXP. */ | |
2202 | |
2203 static rtx | |
2204 expand_debug_expr (tree exp) | |
2205 { | |
2206 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX; | |
2207 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); | |
2208 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); | |
2209 addr_space_t as; | |
2210 enum machine_mode address_mode; | |
2211 | |
2212 switch (TREE_CODE_CLASS (TREE_CODE (exp))) | |
2213 { | |
2214 case tcc_expression: | |
2215 switch (TREE_CODE (exp)) | |
2216 { | |
2217 case COND_EXPR: | |
2218 goto ternary; | |
2219 | |
2220 case TRUTH_ANDIF_EXPR: | |
2221 case TRUTH_ORIF_EXPR: | |
2222 case TRUTH_AND_EXPR: | |
2223 case TRUTH_OR_EXPR: | |
2224 case TRUTH_XOR_EXPR: | |
2225 goto binary; | |
2226 | |
2227 case TRUTH_NOT_EXPR: | |
2228 goto unary; | |
2229 | |
2230 default: | |
2231 break; | |
2232 } | |
2233 break; | |
2234 | |
2235 ternary: | |
2236 op2 = expand_debug_expr (TREE_OPERAND (exp, 2)); | |
2237 if (!op2) | |
2238 return NULL_RTX; | |
2239 /* Fall through. */ | |
2240 | |
2241 binary: | |
2242 case tcc_binary: | |
2243 case tcc_comparison: | |
2244 op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); | |
2245 if (!op1) | |
2246 return NULL_RTX; | |
2247 /* Fall through. */ | |
2248 | |
2249 unary: | |
2250 case tcc_unary: | |
2251 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2252 if (!op0) | |
2253 return NULL_RTX; | |
2254 break; | |
2255 | |
2256 case tcc_type: | |
2257 case tcc_statement: | |
2258 gcc_unreachable (); | |
2259 | |
2260 case tcc_constant: | |
2261 case tcc_exceptional: | |
2262 case tcc_declaration: | |
2263 case tcc_reference: | |
2264 case tcc_vl_exp: | |
2265 break; | |
2266 } | |
2267 | |
2268 switch (TREE_CODE (exp)) | |
2269 { | |
2270 case STRING_CST: | |
2271 if (!lookup_constant_def (exp)) | |
2272 { | |
2273 if (strlen (TREE_STRING_POINTER (exp)) + 1 | |
2274 != (size_t) TREE_STRING_LENGTH (exp)) | |
2275 return NULL_RTX; | |
2276 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp)); | |
2277 op0 = gen_rtx_MEM (BLKmode, op0); | |
2278 set_mem_attributes (op0, exp, 0); | |
2279 return op0; | |
2280 } | |
2281 /* Fall through... */ | |
2282 | |
2283 case INTEGER_CST: | |
2284 case REAL_CST: | |
2285 case FIXED_CST: | |
2286 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER); | |
2287 return op0; | |
2288 | |
2289 case COMPLEX_CST: | |
2290 gcc_assert (COMPLEX_MODE_P (mode)); | |
2291 op0 = expand_debug_expr (TREE_REALPART (exp)); | |
2292 op1 = expand_debug_expr (TREE_IMAGPART (exp)); | |
2293 return gen_rtx_CONCAT (mode, op0, op1); | |
2294 | |
2295 case DEBUG_EXPR_DECL: | |
2296 op0 = DECL_RTL_IF_SET (exp); | |
2297 | |
2298 if (op0) | |
2299 return op0; | |
2300 | |
2301 op0 = gen_rtx_DEBUG_EXPR (mode); | |
2302 DEBUG_EXPR_TREE_DECL (op0) = exp; | |
2303 SET_DECL_RTL (exp, op0); | |
2304 | |
2305 return op0; | |
2306 | |
2307 case VAR_DECL: | |
2308 case PARM_DECL: | |
2309 case FUNCTION_DECL: | |
2310 case LABEL_DECL: | |
2311 case CONST_DECL: | |
2312 case RESULT_DECL: | |
2313 op0 = DECL_RTL_IF_SET (exp); | |
2314 | |
2315 /* This decl was probably optimized away. */ | |
2316 if (!op0) | |
2317 { | |
2318 if (TREE_CODE (exp) != VAR_DECL | |
2319 || DECL_EXTERNAL (exp) | |
2320 || !TREE_STATIC (exp) | |
2321 || !DECL_NAME (exp) | |
2322 || DECL_HARD_REGISTER (exp) | |
2323 || mode == VOIDmode) | |
2324 return NULL; | |
2325 | |
2326 op0 = DECL_RTL (exp); | |
2327 SET_DECL_RTL (exp, NULL); | |
2328 if (!MEM_P (op0) | |
2329 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF | |
2330 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp) | |
2331 return NULL; | |
2332 } | |
2333 else | |
2334 op0 = copy_rtx (op0); | |
2335 | |
2336 if (GET_MODE (op0) == BLKmode) | |
2337 { | |
2338 gcc_assert (MEM_P (op0)); | |
2339 op0 = adjust_address_nv (op0, mode, 0); | |
2340 return op0; | |
2341 } | |
2342 | |
2343 /* Fall through. */ | |
2344 | |
2345 adjust_mode: | |
2346 case PAREN_EXPR: | |
2347 case NOP_EXPR: | |
2348 case CONVERT_EXPR: | |
2349 { | |
2350 enum machine_mode inner_mode = GET_MODE (op0); | |
2351 | |
2352 if (mode == inner_mode) | |
2353 return op0; | |
2354 | |
2355 if (inner_mode == VOIDmode) | |
2356 { | |
2357 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
2358 if (mode == inner_mode) | |
2359 return op0; | |
2360 } | |
2361 | |
2362 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode)) | |
2363 { | |
2364 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode)) | |
2365 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0); | |
2366 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode)) | |
2367 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode); | |
2368 else | |
2369 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode); | |
2370 } | |
2371 else if (FLOAT_MODE_P (mode)) | |
2372 { | |
2373 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
2374 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode); | |
2375 else | |
2376 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode); | |
2377 } | |
2378 else if (FLOAT_MODE_P (inner_mode)) | |
2379 { | |
2380 if (unsignedp) | |
2381 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode); | |
2382 else | |
2383 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode); | |
2384 } | |
2385 else if (CONSTANT_P (op0) | |
2386 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode)) | |
2387 op0 = simplify_gen_subreg (mode, op0, inner_mode, | |
2388 subreg_lowpart_offset (mode, | |
2389 inner_mode)); | |
2390 else if (unsignedp) | |
2391 op0 = gen_rtx_ZERO_EXTEND (mode, op0); | |
2392 else | |
2393 op0 = gen_rtx_SIGN_EXTEND (mode, op0); | |
2394 | |
2395 return op0; | |
2396 } | |
2397 | |
2398 case INDIRECT_REF: | |
2399 case ALIGN_INDIRECT_REF: | |
2400 case MISALIGNED_INDIRECT_REF: | |
2401 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2402 if (!op0) | |
2403 return NULL; | |
2404 | |
2405 if (POINTER_TYPE_P (TREE_TYPE (exp))) | |
2406 { | |
2407 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); | |
2408 address_mode = targetm.addr_space.address_mode (as); | |
2409 } | |
2410 else | |
2411 { | |
2412 as = ADDR_SPACE_GENERIC; | |
2413 address_mode = Pmode; | |
2414 } | |
2415 | |
2416 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF) | |
2417 { | |
2418 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp)); | |
2419 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align)); | |
2420 } | |
2421 | |
2422 op0 = gen_rtx_MEM (mode, op0); | |
2423 | |
2424 set_mem_attributes (op0, exp, 0); | |
2425 set_mem_addr_space (op0, as); | |
2426 | |
2427 return op0; | |
2428 | |
2429 case TARGET_MEM_REF: | |
2430 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp))) | |
2431 return NULL; | |
2432 | |
2433 op0 = expand_debug_expr | |
2434 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); | |
2435 if (!op0) | |
2436 return NULL; | |
2437 | |
2438 as = TYPE_ADDR_SPACE (TREE_TYPE (exp)); | |
2439 | |
2440 op0 = gen_rtx_MEM (mode, op0); | |
2441 | |
2442 set_mem_attributes (op0, exp, 0); | |
2443 set_mem_addr_space (op0, as); | |
2444 | |
2445 return op0; | |
2446 | |
2447 case ARRAY_REF: | |
2448 case ARRAY_RANGE_REF: | |
2449 case COMPONENT_REF: | |
2450 case BIT_FIELD_REF: | |
2451 case REALPART_EXPR: | |
2452 case IMAGPART_EXPR: | |
2453 case VIEW_CONVERT_EXPR: | |
2454 { | |
2455 enum machine_mode mode1; | |
2456 HOST_WIDE_INT bitsize, bitpos; | |
2457 tree offset; | |
2458 int volatilep = 0; | |
2459 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, | |
2460 &mode1, &unsignedp, &volatilep, false); | |
2461 rtx orig_op0; | |
2462 | |
2463 if (bitsize == 0) | |
2464 return NULL; | |
2465 | |
2466 orig_op0 = op0 = expand_debug_expr (tem); | |
2467 | |
2468 if (!op0) | |
2469 return NULL; | |
2470 | |
2471 if (offset) | |
2472 { | |
2473 enum machine_mode addrmode, offmode; | |
2474 | |
2475 gcc_assert (MEM_P (op0)); | |
2476 | |
2477 op0 = XEXP (op0, 0); | |
2478 addrmode = GET_MODE (op0); | |
2479 if (addrmode == VOIDmode) | |
2480 addrmode = Pmode; | |
2481 | |
2482 op1 = expand_debug_expr (offset); | |
2483 if (!op1) | |
2484 return NULL; | |
2485 | |
2486 offmode = GET_MODE (op1); | |
2487 if (offmode == VOIDmode) | |
2488 offmode = TYPE_MODE (TREE_TYPE (offset)); | |
2489 | |
2490 if (addrmode != offmode) | |
2491 op1 = simplify_gen_subreg (addrmode, op1, offmode, | |
2492 subreg_lowpart_offset (addrmode, | |
2493 offmode)); | |
2494 | |
2495 /* Don't use offset_address here, we don't need a | |
2496 recognizable address, and we don't want to generate | |
2497 code. */ | |
2498 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1)); | |
2499 } | |
2500 | |
2501 if (MEM_P (op0)) | |
2502 { | |
2503 if (mode1 == VOIDmode) | |
2504 /* Bitfield. */ | |
2505 mode1 = smallest_mode_for_size (bitsize, MODE_INT); | |
2506 if (bitpos >= BITS_PER_UNIT) | |
2507 { | |
2508 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); | |
2509 bitpos %= BITS_PER_UNIT; | |
2510 } | |
2511 else if (bitpos < 0) | |
2512 { | |
2513 HOST_WIDE_INT units | |
2514 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT; | |
2515 op0 = adjust_address_nv (op0, mode1, units); | |
2516 bitpos += units * BITS_PER_UNIT; | |
2517 } | |
2518 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode)) | |
2519 op0 = adjust_address_nv (op0, mode, 0); | |
2520 else if (GET_MODE (op0) != mode1) | |
2521 op0 = adjust_address_nv (op0, mode1, 0); | |
2522 else | |
2523 op0 = copy_rtx (op0); | |
2524 if (op0 == orig_op0) | |
2525 op0 = shallow_copy_rtx (op0); | |
2526 set_mem_attributes (op0, exp, 0); | |
2527 } | |
2528 | |
2529 if (bitpos == 0 && mode == GET_MODE (op0)) | |
2530 return op0; | |
2531 | |
2532 if (bitpos < 0) | |
2533 return NULL; | |
2534 | |
2535 if ((bitpos % BITS_PER_UNIT) == 0 | |
2536 && bitsize == GET_MODE_BITSIZE (mode1)) | |
2537 { | |
2538 enum machine_mode opmode = GET_MODE (op0); | |
2539 | |
2540 gcc_assert (opmode != BLKmode); | |
2541 | |
2542 if (opmode == VOIDmode) | |
2543 opmode = mode1; | |
2544 | |
2545 /* This condition may hold if we're expanding the address | |
2546 right past the end of an array that turned out not to | |
2547 be addressable (i.e., the address was only computed in | |
2548 debug stmts). The gen_subreg below would rightfully | |
2549 crash, and the address doesn't really exist, so just | |
2550 drop it. */ | |
2551 if (bitpos >= GET_MODE_BITSIZE (opmode)) | |
2552 return NULL; | |
2553 | |
2554 return simplify_gen_subreg (mode, op0, opmode, | |
2555 bitpos / BITS_PER_UNIT); | |
2556 } | |
2557 | |
2558 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0)) | |
2559 && TYPE_UNSIGNED (TREE_TYPE (exp)) | |
2560 ? SIGN_EXTRACT | |
2561 : ZERO_EXTRACT, mode, | |
2562 GET_MODE (op0) != VOIDmode | |
2563 ? GET_MODE (op0) : mode1, | |
2564 op0, GEN_INT (bitsize), GEN_INT (bitpos)); | |
2565 } | |
2566 | |
2567 case ABS_EXPR: | |
2568 return gen_rtx_ABS (mode, op0); | |
2569 | |
2570 case NEGATE_EXPR: | |
2571 return gen_rtx_NEG (mode, op0); | |
2572 | |
2573 case BIT_NOT_EXPR: | |
2574 return gen_rtx_NOT (mode, op0); | |
2575 | |
2576 case FLOAT_EXPR: | |
2577 if (unsignedp) | |
2578 return gen_rtx_UNSIGNED_FLOAT (mode, op0); | |
2579 else | |
2580 return gen_rtx_FLOAT (mode, op0); | |
2581 | |
2582 case FIX_TRUNC_EXPR: | |
2583 if (unsignedp) | |
2584 return gen_rtx_UNSIGNED_FIX (mode, op0); | |
2585 else | |
2586 return gen_rtx_FIX (mode, op0); | |
2587 | |
2588 case POINTER_PLUS_EXPR: | |
2589 case PLUS_EXPR: | |
2590 return gen_rtx_PLUS (mode, op0, op1); | |
2591 | |
2592 case MINUS_EXPR: | |
2593 return gen_rtx_MINUS (mode, op0, op1); | |
2594 | |
2595 case MULT_EXPR: | |
2596 return gen_rtx_MULT (mode, op0, op1); | |
2597 | |
2598 case RDIV_EXPR: | |
2599 case TRUNC_DIV_EXPR: | |
2600 case EXACT_DIV_EXPR: | |
2601 if (unsignedp) | |
2602 return gen_rtx_UDIV (mode, op0, op1); | |
2603 else | |
2604 return gen_rtx_DIV (mode, op0, op1); | |
2605 | |
2606 case TRUNC_MOD_EXPR: | |
2607 if (unsignedp) | |
2608 return gen_rtx_UMOD (mode, op0, op1); | |
2609 else | |
2610 return gen_rtx_MOD (mode, op0, op1); | |
2611 | |
2612 case FLOOR_DIV_EXPR: | |
2613 if (unsignedp) | |
2614 return gen_rtx_UDIV (mode, op0, op1); | |
2615 else | |
2616 { | |
2617 rtx div = gen_rtx_DIV (mode, op0, op1); | |
2618 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2619 rtx adj = floor_sdiv_adjust (mode, mod, op1); | |
2620 return gen_rtx_PLUS (mode, div, adj); | |
2621 } | |
2622 | |
2623 case FLOOR_MOD_EXPR: | |
2624 if (unsignedp) | |
2625 return gen_rtx_UMOD (mode, op0, op1); | |
2626 else | |
2627 { | |
2628 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2629 rtx adj = floor_sdiv_adjust (mode, mod, op1); | |
2630 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2631 return gen_rtx_PLUS (mode, mod, adj); | |
2632 } | |
2633 | |
2634 case CEIL_DIV_EXPR: | |
2635 if (unsignedp) | |
2636 { | |
2637 rtx div = gen_rtx_UDIV (mode, op0, op1); | |
2638 rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2639 rtx adj = ceil_udiv_adjust (mode, mod, op1); | |
2640 return gen_rtx_PLUS (mode, div, adj); | |
2641 } | |
2642 else | |
2643 { | |
2644 rtx div = gen_rtx_DIV (mode, op0, op1); | |
2645 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2646 rtx adj = ceil_sdiv_adjust (mode, mod, op1); | |
2647 return gen_rtx_PLUS (mode, div, adj); | |
2648 } | |
2649 | |
2650 case CEIL_MOD_EXPR: | |
2651 if (unsignedp) | |
2652 { | |
2653 rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2654 rtx adj = ceil_udiv_adjust (mode, mod, op1); | |
2655 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2656 return gen_rtx_PLUS (mode, mod, adj); | |
2657 } | |
2658 else | |
2659 { | |
2660 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2661 rtx adj = ceil_sdiv_adjust (mode, mod, op1); | |
2662 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2663 return gen_rtx_PLUS (mode, mod, adj); | |
2664 } | |
2665 | |
2666 case ROUND_DIV_EXPR: | |
2667 if (unsignedp) | |
2668 { | |
2669 rtx div = gen_rtx_UDIV (mode, op0, op1); | |
2670 rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2671 rtx adj = round_udiv_adjust (mode, mod, op1); | |
2672 return gen_rtx_PLUS (mode, div, adj); | |
2673 } | |
2674 else | |
2675 { | |
2676 rtx div = gen_rtx_DIV (mode, op0, op1); | |
2677 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2678 rtx adj = round_sdiv_adjust (mode, mod, op1); | |
2679 return gen_rtx_PLUS (mode, div, adj); | |
2680 } | |
2681 | |
2682 case ROUND_MOD_EXPR: | |
2683 if (unsignedp) | |
2684 { | |
2685 rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2686 rtx adj = round_udiv_adjust (mode, mod, op1); | |
2687 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2688 return gen_rtx_PLUS (mode, mod, adj); | |
2689 } | |
2690 else | |
2691 { | |
2692 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2693 rtx adj = round_sdiv_adjust (mode, mod, op1); | |
2694 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2695 return gen_rtx_PLUS (mode, mod, adj); | |
2696 } | |
2697 | |
2698 case LSHIFT_EXPR: | |
2699 return gen_rtx_ASHIFT (mode, op0, op1); | |
2700 | |
2701 case RSHIFT_EXPR: | |
2702 if (unsignedp) | |
2703 return gen_rtx_LSHIFTRT (mode, op0, op1); | |
2704 else | |
2705 return gen_rtx_ASHIFTRT (mode, op0, op1); | |
2706 | |
2707 case LROTATE_EXPR: | |
2708 return gen_rtx_ROTATE (mode, op0, op1); | |
2709 | |
2710 case RROTATE_EXPR: | |
2711 return gen_rtx_ROTATERT (mode, op0, op1); | |
2712 | |
2713 case MIN_EXPR: | |
2714 if (unsignedp) | |
2715 return gen_rtx_UMIN (mode, op0, op1); | |
2716 else | |
2717 return gen_rtx_SMIN (mode, op0, op1); | |
2718 | |
2719 case MAX_EXPR: | |
2720 if (unsignedp) | |
2721 return gen_rtx_UMAX (mode, op0, op1); | |
2722 else | |
2723 return gen_rtx_SMAX (mode, op0, op1); | |
2724 | |
2725 case BIT_AND_EXPR: | |
2726 case TRUTH_AND_EXPR: | |
2727 return gen_rtx_AND (mode, op0, op1); | |
2728 | |
2729 case BIT_IOR_EXPR: | |
2730 case TRUTH_OR_EXPR: | |
2731 return gen_rtx_IOR (mode, op0, op1); | |
2732 | |
2733 case BIT_XOR_EXPR: | |
2734 case TRUTH_XOR_EXPR: | |
2735 return gen_rtx_XOR (mode, op0, op1); | |
2736 | |
2737 case TRUTH_ANDIF_EXPR: | |
2738 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx); | |
2739 | |
2740 case TRUTH_ORIF_EXPR: | |
2741 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1); | |
2742 | |
2743 case TRUTH_NOT_EXPR: | |
2744 return gen_rtx_EQ (mode, op0, const0_rtx); | |
2745 | |
2746 case LT_EXPR: | |
2747 if (unsignedp) | |
2748 return gen_rtx_LTU (mode, op0, op1); | |
2749 else | |
2750 return gen_rtx_LT (mode, op0, op1); | |
2751 | |
2752 case LE_EXPR: | |
2753 if (unsignedp) | |
2754 return gen_rtx_LEU (mode, op0, op1); | |
2755 else | |
2756 return gen_rtx_LE (mode, op0, op1); | |
2757 | |
2758 case GT_EXPR: | |
2759 if (unsignedp) | |
2760 return gen_rtx_GTU (mode, op0, op1); | |
2761 else | |
2762 return gen_rtx_GT (mode, op0, op1); | |
2763 | |
2764 case GE_EXPR: | |
2765 if (unsignedp) | |
2766 return gen_rtx_GEU (mode, op0, op1); | |
2767 else | |
2768 return gen_rtx_GE (mode, op0, op1); | |
2769 | |
2770 case EQ_EXPR: | |
2771 return gen_rtx_EQ (mode, op0, op1); | |
2772 | |
2773 case NE_EXPR: | |
2774 return gen_rtx_NE (mode, op0, op1); | |
2775 | |
2776 case UNORDERED_EXPR: | |
2777 return gen_rtx_UNORDERED (mode, op0, op1); | |
2778 | |
2779 case ORDERED_EXPR: | |
2780 return gen_rtx_ORDERED (mode, op0, op1); | |
2781 | |
2782 case UNLT_EXPR: | |
2783 return gen_rtx_UNLT (mode, op0, op1); | |
2784 | |
2785 case UNLE_EXPR: | |
2786 return gen_rtx_UNLE (mode, op0, op1); | |
2787 | |
2788 case UNGT_EXPR: | |
2789 return gen_rtx_UNGT (mode, op0, op1); | |
2790 | |
2791 case UNGE_EXPR: | |
2792 return gen_rtx_UNGE (mode, op0, op1); | |
2793 | |
2794 case UNEQ_EXPR: | |
2795 return gen_rtx_UNEQ (mode, op0, op1); | |
2796 | |
2797 case LTGT_EXPR: | |
2798 return gen_rtx_LTGT (mode, op0, op1); | |
2799 | |
2800 case COND_EXPR: | |
2801 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2); | |
2802 | |
2803 case COMPLEX_EXPR: | |
2804 gcc_assert (COMPLEX_MODE_P (mode)); | |
2805 if (GET_MODE (op0) == VOIDmode) | |
2806 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0); | |
2807 if (GET_MODE (op1) == VOIDmode) | |
2808 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1); | |
2809 return gen_rtx_CONCAT (mode, op0, op1); | |
2810 | |
2811 case CONJ_EXPR: | |
2812 if (GET_CODE (op0) == CONCAT) | |
2813 return gen_rtx_CONCAT (mode, XEXP (op0, 0), | |
2814 gen_rtx_NEG (GET_MODE_INNER (mode), | |
2815 XEXP (op0, 1))); | |
2816 else | |
2817 { | |
2818 enum machine_mode imode = GET_MODE_INNER (mode); | |
2819 rtx re, im; | |
2820 | |
2821 if (MEM_P (op0)) | |
2822 { | |
2823 re = adjust_address_nv (op0, imode, 0); | |
2824 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode)); | |
2825 } | |
2826 else | |
2827 { | |
2828 enum machine_mode ifmode = int_mode_for_mode (mode); | |
2829 enum machine_mode ihmode = int_mode_for_mode (imode); | |
2830 rtx halfsize; | |
2831 if (ifmode == BLKmode || ihmode == BLKmode) | |
2832 return NULL; | |
2833 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode)); | |
2834 re = op0; | |
2835 if (mode != ifmode) | |
2836 re = gen_rtx_SUBREG (ifmode, re, 0); | |
2837 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx); | |
2838 if (imode != ihmode) | |
2839 re = gen_rtx_SUBREG (imode, re, 0); | |
2840 im = copy_rtx (op0); | |
2841 if (mode != ifmode) | |
2842 im = gen_rtx_SUBREG (ifmode, im, 0); | |
2843 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize); | |
2844 if (imode != ihmode) | |
2845 im = gen_rtx_SUBREG (imode, im, 0); | |
2846 } | |
2847 im = gen_rtx_NEG (imode, im); | |
2848 return gen_rtx_CONCAT (mode, re, im); | |
2849 } | |
2850 | |
2851 case ADDR_EXPR: | |
2852 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2853 if (!op0 || !MEM_P (op0)) | |
2854 return NULL; | |
2855 | |
2856 op0 = convert_debug_memory_address (mode, XEXP (op0, 0)); | |
2857 | |
2858 return op0; | |
2859 | |
2860 case VECTOR_CST: | |
2861 exp = build_constructor_from_list (TREE_TYPE (exp), | |
2862 TREE_VECTOR_CST_ELTS (exp)); | |
2863 /* Fall through. */ | |
2864 | |
2865 case CONSTRUCTOR: | |
2866 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) | |
2867 { | |
2868 unsigned i; | |
2869 tree val; | |
2870 | |
2871 op0 = gen_rtx_CONCATN | |
2872 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))); | |
2873 | |
2874 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val) | |
2875 { | |
2876 op1 = expand_debug_expr (val); | |
2877 if (!op1) | |
2878 return NULL; | |
2879 XVECEXP (op0, 0, i) = op1; | |
2880 } | |
2881 | |
2882 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))) | |
2883 { | |
2884 op1 = expand_debug_expr | |
2885 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node)); | |
2886 | |
2887 if (!op1) | |
2888 return NULL; | |
2889 | |
2890 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++) | |
2891 XVECEXP (op0, 0, i) = op1; | |
2892 } | |
2893 | |
2894 return op0; | |
2895 } | |
2896 else | |
2897 goto flag_unsupported; | |
2898 | |
2899 case CALL_EXPR: | |
2900 /* ??? Maybe handle some builtins? */ | |
2901 return NULL; | |
2902 | |
2903 case SSA_NAME: | |
2904 { | |
2905 int part = var_to_partition (SA.map, exp); | |
2906 | |
2907 if (part == NO_PARTITION) | |
2908 return NULL; | |
2909 | |
2910 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions); | |
2911 | |
2912 op0 = SA.partition_to_pseudo[part]; | |
2913 goto adjust_mode; | |
2914 } | |
2915 | |
2916 case ERROR_MARK: | |
2917 return NULL; | |
2918 | |
2919 default: | |
2920 flag_unsupported: | |
2921 #ifdef ENABLE_CHECKING | |
2922 debug_tree (exp); | |
2923 gcc_unreachable (); | |
2924 #else | |
2925 return NULL; | |
2926 #endif | |
2927 } | |
2928 } | |
2929 | |
2930 /* Expand the _LOCs in debug insns. We run this after expanding all | |
2931 regular insns, so that any variables referenced in the function | |
2932 will have their DECL_RTLs set. */ | |
2933 | |
2934 static void | |
2935 expand_debug_locations (void) | |
2936 { | |
2937 rtx insn; | |
2938 rtx last = get_last_insn (); | |
2939 int save_strict_alias = flag_strict_aliasing; | |
2940 | |
2941 /* New alias sets while setting up memory attributes cause | |
2942 -fcompare-debug failures, even though it doesn't bring about any | |
2943 codegen changes. */ | |
2944 flag_strict_aliasing = 0; | |
2945 | |
2946 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
2947 if (DEBUG_INSN_P (insn)) | |
2948 { | |
2949 tree value = (tree)INSN_VAR_LOCATION_LOC (insn); | |
2950 rtx val; | |
2951 enum machine_mode mode; | |
2952 | |
2953 if (value == NULL_TREE) | |
2954 val = NULL_RTX; | |
2955 else | |
2956 { | |
2957 val = expand_debug_expr (value); | |
2958 gcc_assert (last == get_last_insn ()); | |
2959 } | |
2960 | |
2961 if (!val) | |
2962 val = gen_rtx_UNKNOWN_VAR_LOC (); | |
2963 else | |
2964 { | |
2965 mode = GET_MODE (INSN_VAR_LOCATION (insn)); | |
2966 | |
2967 gcc_assert (mode == GET_MODE (val) | |
2968 || (GET_MODE (val) == VOIDmode | |
2969 && (CONST_INT_P (val) | |
2970 || GET_CODE (val) == CONST_FIXED | |
2971 || GET_CODE (val) == CONST_DOUBLE | |
2972 || GET_CODE (val) == LABEL_REF))); | |
2973 } | |
2974 | |
2975 INSN_VAR_LOCATION_LOC (insn) = val; | |
2976 } | |
2977 | |
2978 flag_strict_aliasing = save_strict_alias; | |
1861 } | 2979 } |
1862 | 2980 |
1863 /* Expand basic block BB from GIMPLE trees to RTL. */ | 2981 /* Expand basic block BB from GIMPLE trees to RTL. */ |
1864 | 2982 |
1865 static basic_block | 2983 static basic_block |
1873 edge_iterator ei; | 2991 edge_iterator ei; |
1874 void **elt; | 2992 void **elt; |
1875 | 2993 |
1876 if (dump_file) | 2994 if (dump_file) |
1877 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", | 2995 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", |
1878 bb->index); | 2996 bb->index); |
1879 | 2997 |
1880 /* Note that since we are now transitioning from GIMPLE to RTL, we | 2998 /* Note that since we are now transitioning from GIMPLE to RTL, we |
1881 cannot use the gsi_*_bb() routines because they expect the basic | 2999 cannot use the gsi_*_bb() routines because they expect the basic |
1882 block to be in GIMPLE, instead of RTL. Therefore, we need to | 3000 block to be in GIMPLE, instead of RTL. Therefore, we need to |
1883 access the BB sequence directly. */ | 3001 access the BB sequence directly. */ |
1897 | 3015 |
1898 gcc_assert (single_succ_p (bb)); | 3016 gcc_assert (single_succ_p (bb)); |
1899 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR); | 3017 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR); |
1900 | 3018 |
1901 if (bb->next_bb == EXIT_BLOCK_PTR | 3019 if (bb->next_bb == EXIT_BLOCK_PTR |
1902 && !gimple_return_retval (ret_stmt)) | 3020 && !gimple_return_retval (ret_stmt)) |
1903 { | 3021 { |
1904 gsi_remove (&gsi, false); | 3022 gsi_remove (&gsi, false); |
1905 single_succ_edge (bb)->flags |= EDGE_FALLTHRU; | 3023 single_succ_edge (bb)->flags |= EDGE_FALLTHRU; |
1906 } | 3024 } |
1907 } | 3025 } |
1908 | 3026 |
1909 gsi = gsi_start (stmts); | 3027 gsi = gsi_start (stmts); |
1910 if (!gsi_end_p (gsi)) | 3028 if (!gsi_end_p (gsi)) |
1911 { | 3029 { |
1912 stmt = gsi_stmt (gsi); | 3030 stmt = gsi_stmt (gsi); |
1913 if (gimple_code (stmt) != GIMPLE_LABEL) | 3031 if (gimple_code (stmt) != GIMPLE_LABEL) |
1914 stmt = NULL; | 3032 stmt = NULL; |
1915 } | 3033 } |
1916 | 3034 |
1917 elt = pointer_map_contains (lab_rtx_for_bb, bb); | 3035 elt = pointer_map_contains (lab_rtx_for_bb, bb); |
1918 | 3036 |
1919 if (stmt || elt) | 3037 if (stmt || elt) |
1920 { | 3038 { |
1921 last = get_last_insn (); | 3039 last = get_last_insn (); |
1922 | 3040 |
1923 if (stmt) | 3041 if (stmt) |
1924 { | 3042 { |
1925 tree stmt_tree = gimple_to_tree (stmt); | 3043 expand_gimple_stmt (stmt); |
1926 expand_expr_stmt (stmt_tree); | 3044 gsi_next (&gsi); |
1927 release_stmt_tree (stmt, stmt_tree); | 3045 } |
1928 gsi_next (&gsi); | |
1929 } | |
1930 | 3046 |
1931 if (elt) | 3047 if (elt) |
1932 emit_label ((rtx) *elt); | 3048 emit_label ((rtx) *elt); |
1933 | 3049 |
1934 /* Java emits line number notes in the top of labels. | 3050 /* Java emits line number notes in the top of labels. |
1935 ??? Make this go away once line number notes are obsoleted. */ | 3051 ??? Make this go away once line number notes are obsoleted. */ |
1936 BB_HEAD (bb) = NEXT_INSN (last); | 3052 BB_HEAD (bb) = NEXT_INSN (last); |
1937 if (NOTE_P (BB_HEAD (bb))) | 3053 if (NOTE_P (BB_HEAD (bb))) |
1938 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); | 3054 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); |
1939 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); | 3055 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); |
1940 | 3056 |
1941 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 3057 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
1942 } | 3058 } |
1943 else | 3059 else |
1944 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK); | 3060 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK); |
1945 | 3061 |
1946 NOTE_BASIC_BLOCK (note) = bb; | 3062 NOTE_BASIC_BLOCK (note) = bb; |
1947 | 3063 |
1948 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | 3064 for (; !gsi_end_p (gsi); gsi_next (&gsi)) |
1949 { | 3065 { |
1950 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */ | 3066 basic_block new_bb; |
1951 e->flags &= ~EDGE_EXECUTABLE; | 3067 |
1952 | 3068 stmt = gsi_stmt (gsi); |
1953 /* At the moment not all abnormal edges match the RTL representation. | 3069 currently_expanding_gimple_stmt = stmt; |
1954 It is safe to remove them here as find_many_sub_basic_blocks will | 3070 |
1955 rediscover them. In the future we should get this fixed properly. */ | 3071 /* Expand this statement, then evaluate the resulting RTL and |
1956 if (e->flags & EDGE_ABNORMAL) | 3072 fixup the CFG accordingly. */ |
1957 remove_edge (e); | 3073 if (gimple_code (stmt) == GIMPLE_COND) |
3074 { | |
3075 new_bb = expand_gimple_cond (bb, stmt); | |
3076 if (new_bb) | |
3077 return new_bb; | |
3078 } | |
3079 else if (gimple_debug_bind_p (stmt)) | |
3080 { | |
3081 location_t sloc = get_curr_insn_source_location (); | |
3082 tree sblock = get_curr_insn_block (); | |
3083 gimple_stmt_iterator nsi = gsi; | |
3084 | |
3085 for (;;) | |
3086 { | |
3087 tree var = gimple_debug_bind_get_var (stmt); | |
3088 tree value; | |
3089 rtx val; | |
3090 enum machine_mode mode; | |
3091 | |
3092 if (gimple_debug_bind_has_value_p (stmt)) | |
3093 value = gimple_debug_bind_get_value (stmt); | |
3094 else | |
3095 value = NULL_TREE; | |
3096 | |
3097 last = get_last_insn (); | |
3098 | |
3099 set_curr_insn_source_location (gimple_location (stmt)); | |
3100 set_curr_insn_block (gimple_block (stmt)); | |
3101 | |
3102 if (DECL_P (var)) | |
3103 mode = DECL_MODE (var); | |
3104 else | |
3105 mode = TYPE_MODE (TREE_TYPE (var)); | |
3106 | |
3107 val = gen_rtx_VAR_LOCATION | |
3108 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED); | |
3109 | |
3110 val = emit_debug_insn (val); | |
3111 | |
3112 if (dump_file && (dump_flags & TDF_DETAILS)) | |
3113 { | |
3114 /* We can't dump the insn with a TREE where an RTX | |
3115 is expected. */ | |
3116 INSN_VAR_LOCATION_LOC (val) = const0_rtx; | |
3117 maybe_dump_rtl_for_gimple_stmt (stmt, last); | |
3118 INSN_VAR_LOCATION_LOC (val) = (rtx)value; | |
3119 } | |
3120 | |
3121 gsi = nsi; | |
3122 gsi_next (&nsi); | |
3123 if (gsi_end_p (nsi)) | |
3124 break; | |
3125 stmt = gsi_stmt (nsi); | |
3126 if (!gimple_debug_bind_p (stmt)) | |
3127 break; | |
3128 } | |
3129 | |
3130 set_curr_insn_source_location (sloc); | |
3131 set_curr_insn_block (sblock); | |
3132 } | |
1958 else | 3133 else |
1959 ei_next (&ei); | 3134 { |
1960 } | 3135 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) |
1961 | 3136 { |
1962 for (; !gsi_end_p (gsi); gsi_next (&gsi)) | 3137 bool can_fallthru; |
1963 { | 3138 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); |
1964 gimple stmt = gsi_stmt (gsi); | 3139 if (new_bb) |
1965 basic_block new_bb; | 3140 { |
1966 | 3141 if (can_fallthru) |
1967 /* Expand this statement, then evaluate the resulting RTL and | 3142 bb = new_bb; |
1968 fixup the CFG accordingly. */ | 3143 else |
1969 if (gimple_code (stmt) == GIMPLE_COND) | 3144 return new_bb; |
1970 { | 3145 } |
1971 new_bb = expand_gimple_cond (bb, stmt); | 3146 } |
1972 if (new_bb) | |
1973 return new_bb; | |
1974 } | |
1975 else | 3147 else |
1976 { | 3148 { |
1977 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) | 3149 def_operand_p def_p; |
1978 { | 3150 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF); |
1979 bool can_fallthru; | 3151 |
1980 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); | 3152 if (def_p != NULL) |
1981 if (new_bb) | 3153 { |
1982 { | 3154 /* Ignore this stmt if it is in the list of |
1983 if (can_fallthru) | 3155 replaceable expressions. */ |
1984 bb = new_bb; | 3156 if (SA.values |
1985 else | 3157 && bitmap_bit_p (SA.values, |
1986 return new_bb; | 3158 SSA_NAME_VERSION (DEF_FROM_PTR (def_p)))) |
1987 } | 3159 continue; |
1988 } | 3160 } |
1989 else if (gimple_code (stmt) != GIMPLE_CHANGE_DYNAMIC_TYPE) | 3161 last = expand_gimple_stmt (stmt); |
1990 { | 3162 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
1991 tree stmt_tree = gimple_to_tree (stmt); | 3163 } |
1992 last = get_last_insn (); | 3164 } |
1993 expand_expr_stmt (stmt_tree); | 3165 } |
1994 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 3166 |
1995 release_stmt_tree (stmt, stmt_tree); | 3167 currently_expanding_gimple_stmt = NULL; |
1996 } | |
1997 } | |
1998 } | |
1999 | 3168 |
2000 /* Expand implicit goto and convert goto_locus. */ | 3169 /* Expand implicit goto and convert goto_locus. */ |
2001 FOR_EACH_EDGE (e, ei, bb->succs) | 3170 FOR_EACH_EDGE (e, ei, bb->succs) |
2002 { | 3171 { |
2003 if (e->goto_locus && e->goto_block) | 3172 if (e->goto_locus && e->goto_block) |
2004 { | 3173 { |
2005 set_curr_insn_source_location (e->goto_locus); | 3174 set_curr_insn_source_location (e->goto_locus); |
2006 set_curr_insn_block (e->goto_block); | 3175 set_curr_insn_block (e->goto_block); |
2007 e->goto_locus = curr_insn_locator (); | 3176 e->goto_locus = curr_insn_locator (); |
2008 } | 3177 } |
2009 e->goto_block = NULL; | 3178 e->goto_block = NULL; |
2010 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb) | 3179 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb) |
2011 { | 3180 { |
2012 emit_jump (label_rtx_for_bb (e->dest)); | 3181 emit_jump (label_rtx_for_bb (e->dest)); |
2013 e->flags &= ~EDGE_FALLTHRU; | 3182 e->flags &= ~EDGE_FALLTHRU; |
2014 } | 3183 } |
3184 } | |
3185 | |
3186 /* Expanded RTL can create a jump in the last instruction of block. | |
3187 This later might be assumed to be a jump to successor and break edge insertion. | |
3188 We need to insert dummy move to prevent this. PR41440. */ | |
3189 if (single_succ_p (bb) | |
3190 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU) | |
3191 && (last = get_last_insn ()) | |
3192 && JUMP_P (last)) | |
3193 { | |
3194 rtx dummy = gen_reg_rtx (SImode); | |
3195 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL); | |
2015 } | 3196 } |
2016 | 3197 |
2017 do_pending_stack_adjust (); | 3198 do_pending_stack_adjust (); |
2018 | 3199 |
2019 /* Find the block tail. The last insn in the block is the insn | 3200 /* Find the block tail. The last insn in the block is the insn |
2060 } | 3241 } |
2061 else | 3242 else |
2062 flags = EDGE_FALLTHRU; | 3243 flags = EDGE_FALLTHRU; |
2063 | 3244 |
2064 init_block = create_basic_block (NEXT_INSN (get_insns ()), | 3245 init_block = create_basic_block (NEXT_INSN (get_insns ()), |
2065 get_last_insn (), | 3246 get_last_insn (), |
2066 ENTRY_BLOCK_PTR); | 3247 ENTRY_BLOCK_PTR); |
2067 init_block->frequency = ENTRY_BLOCK_PTR->frequency; | 3248 init_block->frequency = ENTRY_BLOCK_PTR->frequency; |
2068 init_block->count = ENTRY_BLOCK_PTR->count; | 3249 init_block->count = ENTRY_BLOCK_PTR->count; |
2069 if (e) | 3250 if (e) |
2070 { | 3251 { |
2071 first_block = e->dest; | 3252 first_block = e->dest; |
2128 */ | 3309 */ |
2129 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end; | 3310 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end; |
2130 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) | 3311 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) |
2131 head = NEXT_INSN (head); | 3312 head = NEXT_INSN (head); |
2132 exit_block = create_basic_block (NEXT_INSN (head), end, | 3313 exit_block = create_basic_block (NEXT_INSN (head), end, |
2133 EXIT_BLOCK_PTR->prev_bb); | 3314 EXIT_BLOCK_PTR->prev_bb); |
2134 exit_block->frequency = EXIT_BLOCK_PTR->frequency; | 3315 exit_block->frequency = EXIT_BLOCK_PTR->frequency; |
2135 exit_block->count = EXIT_BLOCK_PTR->count; | 3316 exit_block->count = EXIT_BLOCK_PTR->count; |
2136 | 3317 |
2137 ix = 0; | 3318 ix = 0; |
2138 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds)) | 3319 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds)) |
2139 { | 3320 { |
2140 e = EDGE_PRED (EXIT_BLOCK_PTR, ix); | 3321 e = EDGE_PRED (EXIT_BLOCK_PTR, ix); |
2141 if (!(e->flags & EDGE_ABNORMAL)) | 3322 if (!(e->flags & EDGE_ABNORMAL)) |
2142 redirect_edge_succ (e, exit_block); | 3323 redirect_edge_succ (e, exit_block); |
2143 else | 3324 else |
2144 ix++; | 3325 ix++; |
2145 } | 3326 } |
2146 | 3327 |
2147 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); | 3328 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); |
2148 e->probability = REG_BR_PROB_BASE; | 3329 e->probability = REG_BR_PROB_BASE; |
2149 e->count = EXIT_BLOCK_PTR->count; | 3330 e->count = EXIT_BLOCK_PTR->count; |
2150 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds) | 3331 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds) |
2151 if (e2 != e) | 3332 if (e2 != e) |
2152 { | 3333 { |
2153 e->count -= e2->count; | 3334 e->count -= e2->count; |
2154 exit_block->count -= e2->count; | 3335 exit_block->count -= e2->count; |
2155 exit_block->frequency -= EDGE_FREQUENCY (e2); | 3336 exit_block->frequency -= EDGE_FREQUENCY (e2); |
2156 } | 3337 } |
2157 if (e->count < 0) | 3338 if (e->count < 0) |
2158 e->count = 0; | 3339 e->count = 0; |
2159 if (exit_block->count < 0) | 3340 if (exit_block->count < 0) |
2160 exit_block->count = 0; | 3341 exit_block->count = 0; |
2167 Look for ARRAY_REF nodes with non-constant indexes and mark them | 3348 Look for ARRAY_REF nodes with non-constant indexes and mark them |
2168 addressable. */ | 3349 addressable. */ |
2169 | 3350 |
2170 static tree | 3351 static tree |
2171 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees, | 3352 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees, |
2172 void *data ATTRIBUTE_UNUSED) | 3353 void *data ATTRIBUTE_UNUSED) |
2173 { | 3354 { |
2174 tree t = *tp; | 3355 tree t = *tp; |
2175 | 3356 |
2176 if (IS_TYPE_OR_DECL_P (t)) | 3357 if (IS_TYPE_OR_DECL_P (t)) |
2177 *walk_subtrees = 0; | 3358 *walk_subtrees = 0; |
2178 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | 3359 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
2179 { | 3360 { |
2180 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | 3361 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
2181 && is_gimple_min_invariant (TREE_OPERAND (t, 1)) | 3362 && is_gimple_min_invariant (TREE_OPERAND (t, 1)) |
2182 && (!TREE_OPERAND (t, 2) | 3363 && (!TREE_OPERAND (t, 2) |
2183 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | 3364 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) |
2184 || (TREE_CODE (t) == COMPONENT_REF | 3365 || (TREE_CODE (t) == COMPONENT_REF |
2185 && (!TREE_OPERAND (t,2) | 3366 && (!TREE_OPERAND (t,2) |
2186 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | 3367 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) |
2187 || TREE_CODE (t) == BIT_FIELD_REF | 3368 || TREE_CODE (t) == BIT_FIELD_REF |
2188 || TREE_CODE (t) == REALPART_EXPR | 3369 || TREE_CODE (t) == REALPART_EXPR |
2189 || TREE_CODE (t) == IMAGPART_EXPR | 3370 || TREE_CODE (t) == IMAGPART_EXPR |
2190 || TREE_CODE (t) == VIEW_CONVERT_EXPR | 3371 || TREE_CODE (t) == VIEW_CONVERT_EXPR |
2191 || CONVERT_EXPR_P (t)) | 3372 || CONVERT_EXPR_P (t)) |
2192 t = TREE_OPERAND (t, 0); | 3373 t = TREE_OPERAND (t, 0); |
2193 | 3374 |
2194 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | 3375 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
2195 { | 3376 { |
2196 t = get_base_address (t); | 3377 t = get_base_address (t); |
2197 if (t && DECL_P (t)) | 3378 if (t && DECL_P (t) |
2198 TREE_ADDRESSABLE (t) = 1; | 3379 && DECL_MODE (t) != BLKmode) |
2199 } | 3380 TREE_ADDRESSABLE (t) = 1; |
3381 } | |
2200 | 3382 |
2201 *walk_subtrees = 0; | 3383 *walk_subtrees = 0; |
2202 } | 3384 } |
2203 | 3385 |
2204 return NULL_TREE; | 3386 return NULL_TREE; |
2216 gimple_stmt_iterator gsi; | 3398 gimple_stmt_iterator gsi; |
2217 | 3399 |
2218 FOR_EACH_BB (bb) | 3400 FOR_EACH_BB (bb) |
2219 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | 3401 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
2220 { | 3402 { |
2221 gimple stmt = gsi_stmt (gsi); | 3403 gimple stmt = gsi_stmt (gsi); |
2222 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); | 3404 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); |
2223 } | 3405 } |
2224 } | 3406 } |
2225 | 3407 |
2226 /* This function sets crtl->args.internal_arg_pointer to a virtual | 3408 /* This function sets crtl->args.internal_arg_pointer to a virtual |
2227 register if DRAP is needed. Local register allocator will replace | 3409 register if DRAP is needed. Local register allocator will replace |
2233 rtx drap_rtx; | 3415 rtx drap_rtx; |
2234 unsigned int preferred_stack_boundary; | 3416 unsigned int preferred_stack_boundary; |
2235 | 3417 |
2236 if (! SUPPORTS_STACK_ALIGNMENT) | 3418 if (! SUPPORTS_STACK_ALIGNMENT) |
2237 return; | 3419 return; |
2238 | 3420 |
2239 if (cfun->calls_alloca | 3421 if (cfun->calls_alloca |
2240 || cfun->has_nonlocal_label | 3422 || cfun->has_nonlocal_label |
2241 || crtl->has_nonlocal_goto) | 3423 || crtl->has_nonlocal_goto) |
2242 crtl->need_drap = true; | 3424 crtl->need_drap = true; |
2243 | 3425 |
2244 gcc_assert (crtl->stack_alignment_needed | 3426 /* Call update_stack_boundary here again to update incoming stack |
2245 <= crtl->stack_alignment_estimated); | 3427 boundary. It may set incoming stack alignment to a different |
3428 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may | |
3429 use the minimum incoming stack alignment to check if it is OK | |
3430 to perform sibcall optimization since sibcall optimization will | |
3431 only align the outgoing stack to incoming stack boundary. */ | |
3432 if (targetm.calls.update_stack_boundary) | |
3433 targetm.calls.update_stack_boundary (); | |
3434 | |
3435 /* The incoming stack frame has to be aligned at least at | |
3436 parm_stack_boundary. */ | |
3437 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY); | |
2246 | 3438 |
2247 /* Update crtl->stack_alignment_estimated and use it later to align | 3439 /* Update crtl->stack_alignment_estimated and use it later to align |
2248 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call | 3440 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call |
2249 exceptions since callgraph doesn't collect incoming stack alignment | 3441 exceptions since callgraph doesn't collect incoming stack alignment |
2250 in this case. */ | 3442 in this case. */ |
2256 if (preferred_stack_boundary > crtl->stack_alignment_estimated) | 3448 if (preferred_stack_boundary > crtl->stack_alignment_estimated) |
2257 crtl->stack_alignment_estimated = preferred_stack_boundary; | 3449 crtl->stack_alignment_estimated = preferred_stack_boundary; |
2258 if (preferred_stack_boundary > crtl->stack_alignment_needed) | 3450 if (preferred_stack_boundary > crtl->stack_alignment_needed) |
2259 crtl->stack_alignment_needed = preferred_stack_boundary; | 3451 crtl->stack_alignment_needed = preferred_stack_boundary; |
2260 | 3452 |
3453 gcc_assert (crtl->stack_alignment_needed | |
3454 <= crtl->stack_alignment_estimated); | |
3455 | |
2261 crtl->stack_realign_needed | 3456 crtl->stack_realign_needed |
2262 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated; | 3457 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated; |
2263 crtl->stack_realign_tried = crtl->stack_realign_needed; | 3458 crtl->stack_realign_tried = crtl->stack_realign_needed; |
2264 | 3459 |
2265 crtl->stack_realign_processed = true; | 3460 crtl->stack_realign_processed = true; |
2266 | 3461 |
2267 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack | 3462 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack |
2268 alignment. */ | 3463 alignment. */ |
2269 gcc_assert (targetm.calls.get_drap_rtx != NULL); | 3464 gcc_assert (targetm.calls.get_drap_rtx != NULL); |
2270 drap_rtx = targetm.calls.get_drap_rtx (); | 3465 drap_rtx = targetm.calls.get_drap_rtx (); |
2271 | 3466 |
2272 /* stack_realign_drap and drap_rtx must match. */ | 3467 /* stack_realign_drap and drap_rtx must match. */ |
2273 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL)); | 3468 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL)); |
2274 | 3469 |
2275 /* Do nothing if NULL is returned, which means DRAP is not needed. */ | 3470 /* Do nothing if NULL is returned, which means DRAP is not needed. */ |
2297 { | 3492 { |
2298 basic_block bb, init_block; | 3493 basic_block bb, init_block; |
2299 sbitmap blocks; | 3494 sbitmap blocks; |
2300 edge_iterator ei; | 3495 edge_iterator ei; |
2301 edge e; | 3496 edge e; |
3497 unsigned i; | |
3498 | |
3499 rewrite_out_of_ssa (&SA); | |
3500 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions, | |
3501 sizeof (rtx)); | |
2302 | 3502 |
2303 /* Some backends want to know that we are expanding to RTL. */ | 3503 /* Some backends want to know that we are expanding to RTL. */ |
2304 currently_expanding_to_rtl = 1; | 3504 currently_expanding_to_rtl = 1; |
2305 | 3505 |
2306 rtl_profile_for_bb (ENTRY_BLOCK_PTR); | 3506 rtl_profile_for_bb (ENTRY_BLOCK_PTR); |
2307 | 3507 |
2308 insn_locators_alloc (); | 3508 insn_locators_alloc (); |
2309 if (!DECL_BUILT_IN (current_function_decl)) | 3509 if (!DECL_IS_BUILTIN (current_function_decl)) |
2310 { | 3510 { |
2311 /* Eventually, all FEs should explicitly set function_start_locus. */ | 3511 /* Eventually, all FEs should explicitly set function_start_locus. */ |
2312 if (cfun->function_start_locus == UNKNOWN_LOCATION) | 3512 if (cfun->function_start_locus == UNKNOWN_LOCATION) |
2313 set_curr_insn_source_location | 3513 set_curr_insn_source_location |
2314 (DECL_SOURCE_LOCATION (current_function_decl)); | 3514 (DECL_SOURCE_LOCATION (current_function_decl)); |
2327 discover_nonconstant_array_refs (); | 3527 discover_nonconstant_array_refs (); |
2328 | 3528 |
2329 targetm.expand_to_rtl_hook (); | 3529 targetm.expand_to_rtl_hook (); |
2330 crtl->stack_alignment_needed = STACK_BOUNDARY; | 3530 crtl->stack_alignment_needed = STACK_BOUNDARY; |
2331 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY; | 3531 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY; |
2332 crtl->stack_alignment_estimated = STACK_BOUNDARY; | 3532 crtl->stack_alignment_estimated = 0; |
2333 crtl->preferred_stack_boundary = STACK_BOUNDARY; | 3533 crtl->preferred_stack_boundary = STACK_BOUNDARY; |
2334 cfun->cfg->max_jumptable_ents = 0; | 3534 cfun->cfg->max_jumptable_ents = 0; |
2335 | 3535 |
2336 | 3536 |
2337 /* Expand the variables recorded during gimple lowering. */ | 3537 /* Expand the variables recorded during gimple lowering. */ |
2339 | 3539 |
2340 /* Honor stack protection warnings. */ | 3540 /* Honor stack protection warnings. */ |
2341 if (warn_stack_protect) | 3541 if (warn_stack_protect) |
2342 { | 3542 { |
2343 if (cfun->calls_alloca) | 3543 if (cfun->calls_alloca) |
2344 warning (OPT_Wstack_protector, | 3544 warning (OPT_Wstack_protector, |
2345 "not protecting local variables: variable length buffer"); | 3545 "not protecting local variables: variable length buffer"); |
2346 if (has_short_buffer && !crtl->stack_protect_guard) | 3546 if (has_short_buffer && !crtl->stack_protect_guard) |
2347 warning (OPT_Wstack_protector, | 3547 warning (OPT_Wstack_protector, |
2348 "not protecting function: no buffer at least %d bytes long", | 3548 "not protecting function: no buffer at least %d bytes long", |
2349 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); | 3549 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); |
2350 } | 3550 } |
2351 | 3551 |
2352 /* Set up parameters and prepare for return, for the function. */ | 3552 /* Set up parameters and prepare for return, for the function. */ |
2353 expand_function_start (current_function_decl); | 3553 expand_function_start (current_function_decl); |
3554 | |
3555 /* Now that we also have the parameter RTXs, copy them over to our | |
3556 partitions. */ | |
3557 for (i = 0; i < SA.map->num_partitions; i++) | |
3558 { | |
3559 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i)); | |
3560 | |
3561 if (TREE_CODE (var) != VAR_DECL | |
3562 && !SA.partition_to_pseudo[i]) | |
3563 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var); | |
3564 gcc_assert (SA.partition_to_pseudo[i]); | |
3565 | |
3566 /* If this decl was marked as living in multiple places, reset | |
3567 this now to NULL. */ | |
3568 if (DECL_RTL_IF_SET (var) == pc_rtx) | |
3569 SET_DECL_RTL (var, NULL); | |
3570 | |
3571 /* Some RTL parts really want to look at DECL_RTL(x) when x | |
3572 was a decl marked in REG_ATTR or MEM_ATTR. We could use | |
3573 SET_DECL_RTL here making this available, but that would mean | |
3574 to select one of the potentially many RTLs for one DECL. Instead | |
3575 of doing that we simply reset the MEM_EXPR of the RTL in question, | |
3576 then nobody can get at it and hence nobody can call DECL_RTL on it. */ | |
3577 if (!DECL_RTL_SET_P (var)) | |
3578 { | |
3579 if (MEM_P (SA.partition_to_pseudo[i])) | |
3580 set_mem_expr (SA.partition_to_pseudo[i], NULL); | |
3581 } | |
3582 } | |
2354 | 3583 |
2355 /* If this function is `main', emit a call to `__main' | 3584 /* If this function is `main', emit a call to `__main' |
2356 to run global initializers, etc. */ | 3585 to run global initializers, etc. */ |
2357 if (DECL_NAME (current_function_decl) | 3586 if (DECL_NAME (current_function_decl) |
2358 && MAIN_NAME_P (DECL_NAME (current_function_decl)) | 3587 && MAIN_NAME_P (DECL_NAME (current_function_decl)) |
2362 /* Initialize the stack_protect_guard field. This must happen after the | 3591 /* Initialize the stack_protect_guard field. This must happen after the |
2363 call to __main (if any) so that the external decl is initialized. */ | 3592 call to __main (if any) so that the external decl is initialized. */ |
2364 if (crtl->stack_protect_guard) | 3593 if (crtl->stack_protect_guard) |
2365 stack_protect_prologue (); | 3594 stack_protect_prologue (); |
2366 | 3595 |
2367 /* Update stack boundary if needed. */ | 3596 expand_phi_nodes (&SA); |
2368 if (SUPPORTS_STACK_ALIGNMENT) | |
2369 { | |
2370 /* Call update_stack_boundary here to update incoming stack | |
2371 boundary before TARGET_FUNCTION_OK_FOR_SIBCALL is called. | |
2372 TARGET_FUNCTION_OK_FOR_SIBCALL needs to know the accurate | |
2373 incoming stack alignment to check if it is OK to perform | |
2374 sibcall optimization since sibcall optimization will only | |
2375 align the outgoing stack to incoming stack boundary. */ | |
2376 if (targetm.calls.update_stack_boundary) | |
2377 targetm.calls.update_stack_boundary (); | |
2378 | |
2379 /* The incoming stack frame has to be aligned at least at | |
2380 parm_stack_boundary. */ | |
2381 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY); | |
2382 } | |
2383 | 3597 |
2384 /* Register rtl specific functions for cfg. */ | 3598 /* Register rtl specific functions for cfg. */ |
2385 rtl_register_cfg_hooks (); | 3599 rtl_register_cfg_hooks (); |
2386 | 3600 |
2387 init_block = construct_init_block (); | 3601 init_block = construct_init_block (); |
2388 | 3602 |
2389 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the | 3603 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the |
2390 remaining edges in expand_gimple_basic_block. */ | 3604 remaining edges later. */ |
2391 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) | 3605 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) |
2392 e->flags &= ~EDGE_EXECUTABLE; | 3606 e->flags &= ~EDGE_EXECUTABLE; |
2393 | 3607 |
2394 lab_rtx_for_bb = pointer_map_create (); | 3608 lab_rtx_for_bb = pointer_map_create (); |
2395 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb) | 3609 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb) |
2396 bb = expand_gimple_basic_block (bb); | 3610 bb = expand_gimple_basic_block (bb); |
3611 | |
3612 if (MAY_HAVE_DEBUG_INSNS) | |
3613 expand_debug_locations (); | |
3614 | |
3615 execute_free_datastructures (); | |
3616 finish_out_of_ssa (&SA); | |
2397 | 3617 |
2398 /* Expansion is used by optimization passes too, set maybe_hot_insn_p | 3618 /* Expansion is used by optimization passes too, set maybe_hot_insn_p |
2399 conservatively to true until they are all profile aware. */ | 3619 conservatively to true until they are all profile aware. */ |
2400 pointer_map_destroy (lab_rtx_for_bb); | 3620 pointer_map_destroy (lab_rtx_for_bb); |
2401 free_histograms (); | 3621 free_histograms (); |
2402 | 3622 |
2403 construct_exit_block (); | 3623 construct_exit_block (); |
2404 set_curr_insn_block (DECL_INITIAL (current_function_decl)); | 3624 set_curr_insn_block (DECL_INITIAL (current_function_decl)); |
2405 insn_locators_finalize (); | 3625 insn_locators_finalize (); |
2406 | 3626 |
3627 /* Zap the tree EH table. */ | |
3628 set_eh_throw_stmt_table (cfun, NULL); | |
3629 | |
3630 rebuild_jump_labels (get_insns ()); | |
3631 | |
3632 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) | |
3633 { | |
3634 edge e; | |
3635 edge_iterator ei; | |
3636 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
3637 { | |
3638 if (e->insns.r) | |
3639 commit_one_edge_insertion (e); | |
3640 else | |
3641 ei_next (&ei); | |
3642 } | |
3643 } | |
3644 | |
2407 /* We're done expanding trees to RTL. */ | 3645 /* We're done expanding trees to RTL. */ |
2408 currently_expanding_to_rtl = 0; | 3646 currently_expanding_to_rtl = 0; |
2409 | 3647 |
2410 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */ | 3648 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb) |
2411 convert_from_eh_region_ranges (); | 3649 { |
2412 set_eh_throw_stmt_table (cfun, NULL); | 3650 edge e; |
2413 | 3651 edge_iterator ei; |
2414 rebuild_jump_labels (get_insns ()); | 3652 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
2415 find_exception_handler_labels (); | 3653 { |
3654 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */ | |
3655 e->flags &= ~EDGE_EXECUTABLE; | |
3656 | |
3657 /* At the moment not all abnormal edges match the RTL | |
3658 representation. It is safe to remove them here as | |
3659 find_many_sub_basic_blocks will rediscover them. | |
3660 In the future we should get this fixed properly. */ | |
3661 if ((e->flags & EDGE_ABNORMAL) | |
3662 && !(e->flags & EDGE_SIBCALL)) | |
3663 remove_edge (e); | |
3664 else | |
3665 ei_next (&ei); | |
3666 } | |
3667 } | |
2416 | 3668 |
2417 blocks = sbitmap_alloc (last_basic_block); | 3669 blocks = sbitmap_alloc (last_basic_block); |
2418 sbitmap_ones (blocks); | 3670 sbitmap_ones (blocks); |
2419 find_many_sub_basic_blocks (blocks); | 3671 find_many_sub_basic_blocks (blocks); |
3672 sbitmap_free (blocks); | |
2420 purge_all_dead_edges (); | 3673 purge_all_dead_edges (); |
2421 sbitmap_free (blocks); | |
2422 | 3674 |
2423 compact_blocks (); | 3675 compact_blocks (); |
2424 | 3676 |
2425 expand_stack_alignment (); | 3677 expand_stack_alignment (); |
2426 | 3678 |
2437 generating_concat_p = 0; | 3689 generating_concat_p = 0; |
2438 | 3690 |
2439 if (dump_file) | 3691 if (dump_file) |
2440 { | 3692 { |
2441 fprintf (dump_file, | 3693 fprintf (dump_file, |
2442 "\n\n;;\n;; Full RTL generated for this function:\n;;\n"); | 3694 "\n\n;;\n;; Full RTL generated for this function:\n;;\n"); |
2443 /* And the pass manager will dump RTL for us. */ | 3695 /* And the pass manager will dump RTL for us. */ |
2444 } | 3696 } |
2445 | 3697 |
2446 /* If we're emitting a nested function, make sure its parent gets | 3698 /* If we're emitting a nested function, make sure its parent gets |
2447 emitted as well. Doing otherwise confuses debug info. */ | 3699 emitted as well. Doing otherwise confuses debug info. */ |
2448 { | 3700 { |
2449 tree parent; | 3701 tree parent; |
2450 for (parent = DECL_CONTEXT (current_function_decl); | 3702 for (parent = DECL_CONTEXT (current_function_decl); |
2451 parent != NULL_TREE; | 3703 parent != NULL_TREE; |
2452 parent = get_containing_scope (parent)) | 3704 parent = get_containing_scope (parent)) |
2453 if (TREE_CODE (parent) == FUNCTION_DECL) | 3705 if (TREE_CODE (parent) == FUNCTION_DECL) |
2454 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; | 3706 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; |
2455 } | 3707 } |
2456 | 3708 |
2457 /* We are now committed to emitting code for this function. Do any | 3709 /* We are now committed to emitting code for this function. Do any |
2458 preparation, such as emitting abstract debug info for the inline | 3710 preparation, such as emitting abstract debug info for the inline |
2459 before it gets mangled by optimization. */ | 3711 before it gets mangled by optimization. */ |
2474 | 3726 |
2475 struct rtl_opt_pass pass_expand = | 3727 struct rtl_opt_pass pass_expand = |
2476 { | 3728 { |
2477 { | 3729 { |
2478 RTL_PASS, | 3730 RTL_PASS, |
2479 "expand", /* name */ | 3731 "expand", /* name */ |
2480 NULL, /* gate */ | 3732 NULL, /* gate */ |
2481 gimple_expand_cfg, /* execute */ | 3733 gimple_expand_cfg, /* execute */ |
2482 NULL, /* sub */ | 3734 NULL, /* sub */ |
2483 NULL, /* next */ | 3735 NULL, /* next */ |
2484 0, /* static_pass_number */ | 3736 0, /* static_pass_number */ |
2485 TV_EXPAND, /* tv_id */ | 3737 TV_EXPAND, /* tv_id */ |
2486 /* ??? If TER is enabled, we actually receive GENERIC. */ | 3738 PROP_ssa | PROP_gimple_leh | PROP_cfg,/* properties_required */ |
2487 PROP_gimple_leh | PROP_cfg, /* properties_required */ | |
2488 PROP_rtl, /* properties_provided */ | 3739 PROP_rtl, /* properties_provided */ |
2489 PROP_trees, /* properties_destroyed */ | 3740 PROP_ssa | PROP_trees, /* properties_destroyed */ |
2490 0, /* todo_flags_start */ | 3741 TODO_verify_ssa | TODO_verify_flow |
2491 TODO_dump_func, /* todo_flags_finish */ | 3742 | TODO_verify_stmts, /* todo_flags_start */ |
3743 TODO_dump_func | |
3744 | TODO_ggc_collect /* todo_flags_finish */ | |
2492 } | 3745 } |
2493 }; | 3746 }; |