Mercurial > hg > CbC > CbC_gcc
comparison gcc/cfgexpand.c @ 111:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | f6334be47118 |
children | ab0bcb71f44d 84e7813d76e9 |
comparison
equal
deleted
inserted
replaced
68:561a7518be6b | 111:04ced10e8804 |
---|---|
1 /* A pass for lowering trees to RTL. | 1 /* A pass for lowering trees to RTL. |
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 | 2 Copyright (C) 2004-2017 Free Software Foundation, Inc. |
3 Free Software Foundation, Inc. | |
4 | 3 |
5 This file is part of GCC. | 4 This file is part of GCC. |
6 | 5 |
7 GCC is free software; you can redistribute it and/or modify | 6 GCC is free software; you can redistribute it and/or modify |
8 it under the terms of the GNU General Public License as published by | 7 it under the terms of the GNU General Public License as published by |
19 <http://www.gnu.org/licenses/>. */ | 18 <http://www.gnu.org/licenses/>. */ |
20 | 19 |
21 #include "config.h" | 20 #include "config.h" |
22 #include "system.h" | 21 #include "system.h" |
23 #include "coretypes.h" | 22 #include "coretypes.h" |
24 #include "tm.h" | 23 #include "backend.h" |
24 #include "target.h" | |
25 #include "rtl.h" | |
25 #include "tree.h" | 26 #include "tree.h" |
26 #include "rtl.h" | 27 #include "gimple.h" |
28 #include "cfghooks.h" | |
29 #include "tree-pass.h" | |
30 #include "memmodel.h" | |
27 #include "tm_p.h" | 31 #include "tm_p.h" |
28 #include "basic-block.h" | 32 #include "ssa.h" |
29 #include "function.h" | 33 #include "optabs.h" |
34 #include "regs.h" /* For reg_renumber. */ | |
35 #include "emit-rtl.h" | |
36 #include "recog.h" | |
37 #include "cgraph.h" | |
38 #include "diagnostic.h" | |
39 #include "fold-const.h" | |
40 #include "varasm.h" | |
41 #include "stor-layout.h" | |
42 #include "stmt.h" | |
43 #include "print-tree.h" | |
44 #include "cfgrtl.h" | |
45 #include "cfganal.h" | |
46 #include "cfgbuild.h" | |
47 #include "cfgcleanup.h" | |
48 #include "dojump.h" | |
49 #include "explow.h" | |
50 #include "calls.h" | |
30 #include "expr.h" | 51 #include "expr.h" |
31 #include "langhooks.h" | 52 #include "internal-fn.h" |
32 #include "tree-flow.h" | 53 #include "tree-eh.h" |
33 #include "timevar.h" | 54 #include "gimple-iterator.h" |
34 #include "tree-dump.h" | 55 #include "gimple-expr.h" |
35 #include "tree-pass.h" | 56 #include "gimple-walk.h" |
57 #include "tree-cfg.h" | |
58 #include "tree-dfa.h" | |
59 #include "tree-ssa.h" | |
36 #include "except.h" | 60 #include "except.h" |
37 #include "flags.h" | |
38 #include "diagnostic.h" | |
39 #include "tree-pretty-print.h" | |
40 #include "gimple-pretty-print.h" | 61 #include "gimple-pretty-print.h" |
41 #include "toplev.h" | 62 #include "toplev.h" |
42 #include "debug.h" | 63 #include "debug.h" |
43 #include "params.h" | 64 #include "params.h" |
44 #include "tree-inline.h" | 65 #include "tree-inline.h" |
45 #include "value-prof.h" | 66 #include "value-prof.h" |
46 #include "target.h" | 67 #include "tree-ssa-live.h" |
47 #include "ssaexpand.h" | 68 #include "tree-outof-ssa.h" |
48 #include "bitmap.h" | 69 #include "cfgloop.h" |
49 #include "sbitmap.h" | |
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */ | 70 #include "insn-attr.h" /* For INSN_SCHEDULING. */ |
71 #include "stringpool.h" | |
72 #include "attribs.h" | |
73 #include "asan.h" | |
74 #include "tree-ssa-address.h" | |
75 #include "output.h" | |
76 #include "builtins.h" | |
77 #include "tree-chkp.h" | |
78 #include "rtl-chkp.h" | |
79 | |
80 /* Some systems use __main in a way incompatible with its use in gcc, in these | |
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to | |
82 give the same symbol without quotes for an alternative entry point. You | |
83 must define both, or neither. */ | |
84 #ifndef NAME__MAIN | |
85 #define NAME__MAIN "__main" | |
86 #endif | |
51 | 87 |
52 /* This variable holds information helping the rewriting of SSA trees | 88 /* This variable holds information helping the rewriting of SSA trees |
53 into RTL. */ | 89 into RTL. */ |
54 struct ssaexpand SA; | 90 struct ssaexpand SA; |
55 | 91 |
56 /* This variable holds the currently expanded gimple statement for purposes | 92 /* This variable holds the currently expanded gimple statement for purposes |
57 of comminucating the profile info to the builtin expanders. */ | 93 of comminucating the profile info to the builtin expanders. */ |
58 gimple currently_expanding_gimple_stmt; | 94 gimple *currently_expanding_gimple_stmt; |
95 | |
96 static rtx expand_debug_expr (tree); | |
97 | |
98 static bool defer_stack_allocation (tree, bool); | |
99 | |
100 static void record_alignment_for_reg_var (unsigned int); | |
59 | 101 |
60 /* Return an expression tree corresponding to the RHS of GIMPLE | 102 /* Return an expression tree corresponding to the RHS of GIMPLE |
61 statement STMT. */ | 103 statement STMT. */ |
62 | 104 |
63 tree | 105 tree |
64 gimple_assign_rhs_to_tree (gimple stmt) | 106 gimple_assign_rhs_to_tree (gimple *stmt) |
65 { | 107 { |
66 tree t; | 108 tree t; |
67 enum gimple_rhs_class grhs_class; | 109 enum gimple_rhs_class grhs_class; |
68 | 110 |
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt)); | 111 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt)); |
89 /* Avoid modifying this tree in place below. */ | 131 /* Avoid modifying this tree in place below. */ |
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) | 132 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) |
91 && gimple_location (stmt) != EXPR_LOCATION (t)) | 133 && gimple_location (stmt) != EXPR_LOCATION (t)) |
92 || (gimple_block (stmt) | 134 || (gimple_block (stmt) |
93 && currently_expanding_to_rtl | 135 && currently_expanding_to_rtl |
94 && EXPR_P (t) | 136 && EXPR_P (t))) |
95 && gimple_block (stmt) != TREE_BLOCK (t))) | |
96 t = copy_node (t); | 137 t = copy_node (t); |
97 } | 138 } |
98 else | 139 else |
99 gcc_unreachable (); | 140 gcc_unreachable (); |
100 | 141 |
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)) | 142 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)) |
102 SET_EXPR_LOCATION (t, gimple_location (stmt)); | 143 SET_EXPR_LOCATION (t, gimple_location (stmt)); |
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t)) | |
104 TREE_BLOCK (t) = gimple_block (stmt); | |
105 | 144 |
106 return t; | 145 return t; |
107 } | 146 } |
108 | 147 |
109 | 148 |
110 #ifndef STACK_ALIGNMENT_NEEDED | 149 #ifndef STACK_ALIGNMENT_NEEDED |
111 #define STACK_ALIGNMENT_NEEDED 1 | 150 #define STACK_ALIGNMENT_NEEDED 1 |
112 #endif | 151 #endif |
113 | 152 |
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x) | 153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x) |
154 | |
155 /* Choose either CUR or NEXT as the leader DECL for a partition. | |
156 Prefer ignored decls, to simplify debug dumps and reduce ambiguity | |
157 out of the same user variable being in multiple partitions (this is | |
158 less likely for compiler-introduced temps). */ | |
159 | |
160 static tree | |
161 leader_merge (tree cur, tree next) | |
162 { | |
163 if (cur == NULL || cur == next) | |
164 return next; | |
165 | |
166 if (DECL_P (cur) && DECL_IGNORED_P (cur)) | |
167 return cur; | |
168 | |
169 if (DECL_P (next) && DECL_IGNORED_P (next)) | |
170 return next; | |
171 | |
172 return cur; | |
173 } | |
115 | 174 |
116 /* Associate declaration T with storage space X. If T is no | 175 /* Associate declaration T with storage space X. If T is no |
117 SSA name this is exactly SET_DECL_RTL, otherwise make the | 176 SSA name this is exactly SET_DECL_RTL, otherwise make the |
118 partition of T associated with X. */ | 177 partition of T associated with X. */ |
119 static inline void | 178 static inline void |
120 set_rtl (tree t, rtx x) | 179 set_rtl (tree t, rtx x) |
121 { | 180 { |
181 gcc_checking_assert (!x | |
182 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t)) | |
183 || (use_register_for_decl (t) | |
184 ? (REG_P (x) | |
185 || (GET_CODE (x) == CONCAT | |
186 && (REG_P (XEXP (x, 0)) | |
187 || SUBREG_P (XEXP (x, 0))) | |
188 && (REG_P (XEXP (x, 1)) | |
189 || SUBREG_P (XEXP (x, 1)))) | |
190 /* We need to accept PARALLELs for RESUT_DECLs | |
191 because of vector types with BLKmode returned | |
192 in multiple registers, but they are supposed | |
193 to be uncoalesced. */ | |
194 || (GET_CODE (x) == PARALLEL | |
195 && SSAVAR (t) | |
196 && TREE_CODE (SSAVAR (t)) == RESULT_DECL | |
197 && (GET_MODE (x) == BLKmode | |
198 || !flag_tree_coalesce_vars))) | |
199 : (MEM_P (x) || x == pc_rtx | |
200 || (GET_CODE (x) == CONCAT | |
201 && MEM_P (XEXP (x, 0)) | |
202 && MEM_P (XEXP (x, 1)))))); | |
203 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and | |
204 RESULT_DECLs has the expected mode. For memory, we accept | |
205 unpromoted modes, since that's what we're likely to get. For | |
206 PARM_DECLs and RESULT_DECLs, we'll have been called by | |
207 set_parm_rtl, which will give us the default def, so we don't | |
208 have to compute it ourselves. For RESULT_DECLs, we accept mode | |
209 mismatches too, as long as we have BLKmode or are not coalescing | |
210 across variables, so that we don't reject BLKmode PARALLELs or | |
211 unpromoted REGs. */ | |
212 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME | |
213 || (SSAVAR (t) | |
214 && TREE_CODE (SSAVAR (t)) == RESULT_DECL | |
215 && (promote_ssa_mode (t, NULL) == BLKmode | |
216 || !flag_tree_coalesce_vars)) | |
217 || !use_register_for_decl (t) | |
218 || GET_MODE (x) == promote_ssa_mode (t, NULL)); | |
219 | |
220 if (x) | |
221 { | |
222 bool skip = false; | |
223 tree cur = NULL_TREE; | |
224 rtx xm = x; | |
225 | |
226 retry: | |
227 if (MEM_P (xm)) | |
228 cur = MEM_EXPR (xm); | |
229 else if (REG_P (xm)) | |
230 cur = REG_EXPR (xm); | |
231 else if (SUBREG_P (xm)) | |
232 { | |
233 gcc_assert (subreg_lowpart_p (xm)); | |
234 xm = SUBREG_REG (xm); | |
235 goto retry; | |
236 } | |
237 else if (GET_CODE (xm) == CONCAT) | |
238 { | |
239 xm = XEXP (xm, 0); | |
240 goto retry; | |
241 } | |
242 else if (GET_CODE (xm) == PARALLEL) | |
243 { | |
244 xm = XVECEXP (xm, 0, 0); | |
245 gcc_assert (GET_CODE (xm) == EXPR_LIST); | |
246 xm = XEXP (xm, 0); | |
247 goto retry; | |
248 } | |
249 else if (xm == pc_rtx) | |
250 skip = true; | |
251 else | |
252 gcc_unreachable (); | |
253 | |
254 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t); | |
255 | |
256 if (cur != next) | |
257 { | |
258 if (MEM_P (x)) | |
259 set_mem_attributes (x, | |
260 next && TREE_CODE (next) == SSA_NAME | |
261 ? TREE_TYPE (next) | |
262 : next, true); | |
263 else | |
264 set_reg_attrs_for_decl_rtl (next, x); | |
265 } | |
266 } | |
267 | |
122 if (TREE_CODE (t) == SSA_NAME) | 268 if (TREE_CODE (t) == SSA_NAME) |
123 { | 269 { |
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x; | 270 int part = var_to_partition (SA.map, t); |
125 if (x && !MEM_P (x)) | 271 if (part != NO_PARTITION) |
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x); | 272 { |
127 /* For the benefit of debug information at -O0 (where vartracking | 273 if (SA.partition_to_pseudo[part]) |
128 doesn't run) record the place also in the base DECL if it's | 274 gcc_assert (SA.partition_to_pseudo[part] == x); |
129 a normal variable (not a parameter). */ | 275 else if (x != pc_rtx) |
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL) | 276 SA.partition_to_pseudo[part] = x; |
277 } | |
278 /* For the benefit of debug information at -O0 (where | |
279 vartracking doesn't run) record the place also in the base | |
280 DECL. For PARMs and RESULTs, do so only when setting the | |
281 default def. */ | |
282 if (x && x != pc_rtx && SSA_NAME_VAR (t) | |
283 && (VAR_P (SSA_NAME_VAR (t)) | |
284 || SSA_NAME_IS_DEFAULT_DEF (t))) | |
131 { | 285 { |
132 tree var = SSA_NAME_VAR (t); | 286 tree var = SSA_NAME_VAR (t); |
133 /* If we don't yet have something recorded, just record it now. */ | 287 /* If we don't yet have something recorded, just record it now. */ |
134 if (!DECL_RTL_SET_P (var)) | 288 if (!DECL_RTL_SET_P (var)) |
135 SET_DECL_RTL (var, x); | 289 SET_DECL_RTL (var, x); |
136 /* If we have it set alrady to "multiple places" don't | 290 /* If we have it set already to "multiple places" don't |
137 change this. */ | 291 change this. */ |
138 else if (DECL_RTL (var) == pc_rtx) | 292 else if (DECL_RTL (var) == pc_rtx) |
139 ; | 293 ; |
140 /* If we have something recorded and it's not the same place | 294 /* If we have something recorded and it's not the same place |
141 as we want to record now, we have multiple partitions for the | 295 as we want to record now, we have multiple partitions for the |
156 struct stack_var | 310 struct stack_var |
157 { | 311 { |
158 /* The Variable. */ | 312 /* The Variable. */ |
159 tree decl; | 313 tree decl; |
160 | 314 |
161 /* The offset of the variable. During partitioning, this is the | |
162 offset relative to the partition. After partitioning, this | |
163 is relative to the stack frame. */ | |
164 HOST_WIDE_INT offset; | |
165 | |
166 /* Initially, the size of the variable. Later, the size of the partition, | 315 /* Initially, the size of the variable. Later, the size of the partition, |
167 if this variable becomes it's partition's representative. */ | 316 if this variable becomes it's partition's representative. */ |
168 HOST_WIDE_INT size; | 317 HOST_WIDE_INT size; |
169 | 318 |
170 /* The *byte* alignment required for this variable. Or as, with the | 319 /* The *byte* alignment required for this variable. Or as, with the |
185 | 334 |
186 /* We have an array of such objects while deciding allocation. */ | 335 /* We have an array of such objects while deciding allocation. */ |
187 static struct stack_var *stack_vars; | 336 static struct stack_var *stack_vars; |
188 static size_t stack_vars_alloc; | 337 static size_t stack_vars_alloc; |
189 static size_t stack_vars_num; | 338 static size_t stack_vars_num; |
339 static hash_map<tree, size_t> *decl_to_stack_part; | |
340 | |
341 /* Conflict bitmaps go on this obstack. This allows us to destroy | |
342 all of them in one big sweep. */ | |
343 static bitmap_obstack stack_var_bitmap_obstack; | |
190 | 344 |
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size | 345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size |
192 is non-decreasing. */ | 346 is non-decreasing. */ |
193 static size_t *stack_vars_sorted; | 347 static size_t *stack_vars_sorted; |
194 | 348 |
203 | 357 |
204 /* Used during expand_used_vars. Remember if we say a character buffer | 358 /* Used during expand_used_vars. Remember if we say a character buffer |
205 smaller than our cutoff threshold. Used for -Wstack-protector. */ | 359 smaller than our cutoff threshold. Used for -Wstack-protector. */ |
206 static bool has_short_buffer; | 360 static bool has_short_buffer; |
207 | 361 |
208 /* Discover the byte alignment to use for DECL. Ignore alignment | 362 /* Compute the byte alignment to use for DECL. Ignore alignment |
209 we can't do with expected alignment of the stack boundary. */ | 363 we can't do with expected alignment of the stack boundary. */ |
210 | 364 |
211 static unsigned int | 365 static unsigned int |
212 get_decl_align_unit (tree decl) | 366 align_local_variable (tree decl) |
213 { | 367 { |
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl); | 368 unsigned int align; |
369 | |
370 if (TREE_CODE (decl) == SSA_NAME) | |
371 align = TYPE_ALIGN (TREE_TYPE (decl)); | |
372 else | |
373 { | |
374 align = LOCAL_DECL_ALIGNMENT (decl); | |
375 SET_DECL_ALIGN (decl, align); | |
376 } | |
215 return align / BITS_PER_UNIT; | 377 return align / BITS_PER_UNIT; |
378 } | |
379 | |
380 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true, | |
381 down otherwise. Return truncated BASE value. */ | |
382 | |
383 static inline unsigned HOST_WIDE_INT | |
384 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up) | |
385 { | |
386 return align_up ? (base + align - 1) & -align : base & -align; | |
216 } | 387 } |
217 | 388 |
218 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. | 389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. |
219 Return the frame offset. */ | 390 Return the frame offset. */ |
220 | 391 |
221 static HOST_WIDE_INT | 392 static HOST_WIDE_INT |
222 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align) | 393 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align) |
223 { | 394 { |
224 HOST_WIDE_INT offset, new_frame_offset; | 395 HOST_WIDE_INT offset, new_frame_offset; |
225 | 396 |
226 new_frame_offset = frame_offset; | |
227 if (FRAME_GROWS_DOWNWARD) | 397 if (FRAME_GROWS_DOWNWARD) |
228 { | 398 { |
229 new_frame_offset -= size + frame_phase; | 399 new_frame_offset |
230 new_frame_offset &= -align; | 400 = align_base (frame_offset - frame_phase - size, |
231 new_frame_offset += frame_phase; | 401 align, false) + frame_phase; |
232 offset = new_frame_offset; | 402 offset = new_frame_offset; |
233 } | 403 } |
234 else | 404 else |
235 { | 405 { |
236 new_frame_offset -= frame_phase; | 406 new_frame_offset |
237 new_frame_offset += align - 1; | 407 = align_base (frame_offset - frame_phase, align, true) + frame_phase; |
238 new_frame_offset &= -align; | |
239 new_frame_offset += frame_phase; | |
240 offset = new_frame_offset; | 408 offset = new_frame_offset; |
241 new_frame_offset += size; | 409 new_frame_offset += size; |
242 } | 410 } |
243 frame_offset = new_frame_offset; | 411 frame_offset = new_frame_offset; |
244 | 412 |
262 else | 430 else |
263 stack_vars_alloc = 32; | 431 stack_vars_alloc = 32; |
264 stack_vars | 432 stack_vars |
265 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); | 433 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); |
266 } | 434 } |
435 if (!decl_to_stack_part) | |
436 decl_to_stack_part = new hash_map<tree, size_t>; | |
437 | |
267 v = &stack_vars[stack_vars_num]; | 438 v = &stack_vars[stack_vars_num]; |
439 decl_to_stack_part->put (decl, stack_vars_num); | |
268 | 440 |
269 v->decl = decl; | 441 v->decl = decl; |
270 v->offset = 0; | 442 tree size = TREE_CODE (decl) == SSA_NAME |
271 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1); | 443 ? TYPE_SIZE_UNIT (TREE_TYPE (decl)) |
444 : DECL_SIZE_UNIT (decl); | |
445 v->size = tree_to_uhwi (size); | |
272 /* Ensure that all variables have size, so that &a != &b for any two | 446 /* Ensure that all variables have size, so that &a != &b for any two |
273 variables that are simultaneously live. */ | 447 variables that are simultaneously live. */ |
274 if (v->size == 0) | 448 if (v->size == 0) |
275 v->size = 1; | 449 v->size = 1; |
276 v->alignb = get_decl_align_unit (SSAVAR (decl)); | 450 v->alignb = align_local_variable (decl); |
451 /* An alignment of zero can mightily confuse us later. */ | |
452 gcc_assert (v->alignb != 0); | |
277 | 453 |
278 /* All variables are initially in their own partition. */ | 454 /* All variables are initially in their own partition. */ |
279 v->representative = stack_vars_num; | 455 v->representative = stack_vars_num; |
280 v->next = EOC; | 456 v->next = EOC; |
281 | 457 |
294 add_stack_var_conflict (size_t x, size_t y) | 470 add_stack_var_conflict (size_t x, size_t y) |
295 { | 471 { |
296 struct stack_var *a = &stack_vars[x]; | 472 struct stack_var *a = &stack_vars[x]; |
297 struct stack_var *b = &stack_vars[y]; | 473 struct stack_var *b = &stack_vars[y]; |
298 if (!a->conflicts) | 474 if (!a->conflicts) |
299 a->conflicts = BITMAP_ALLOC (NULL); | 475 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack); |
300 if (!b->conflicts) | 476 if (!b->conflicts) |
301 b->conflicts = BITMAP_ALLOC (NULL); | 477 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack); |
302 bitmap_set_bit (a->conflicts, y); | 478 bitmap_set_bit (a->conflicts, y); |
303 bitmap_set_bit (b->conflicts, x); | 479 bitmap_set_bit (b->conflicts, x); |
304 } | 480 } |
305 | 481 |
306 /* Check whether the decls associated with luid's X and Y conflict. */ | 482 /* Check whether the decls associated with luid's X and Y conflict. */ |
308 static bool | 484 static bool |
309 stack_var_conflict_p (size_t x, size_t y) | 485 stack_var_conflict_p (size_t x, size_t y) |
310 { | 486 { |
311 struct stack_var *a = &stack_vars[x]; | 487 struct stack_var *a = &stack_vars[x]; |
312 struct stack_var *b = &stack_vars[y]; | 488 struct stack_var *b = &stack_vars[y]; |
489 if (x == y) | |
490 return false; | |
491 /* Partitions containing an SSA name result from gimple registers | |
492 with things like unsupported modes. They are top-level and | |
493 hence conflict with everything else. */ | |
494 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME) | |
495 return true; | |
496 | |
313 if (!a->conflicts || !b->conflicts) | 497 if (!a->conflicts || !b->conflicts) |
314 return false; | 498 return false; |
315 return bitmap_bit_p (a->conflicts, y); | 499 return bitmap_bit_p (a->conflicts, y); |
316 } | 500 } |
317 | 501 |
318 /* Returns true if TYPE is or contains a union type. */ | 502 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var |
503 enter its partition number into bitmap DATA. */ | |
319 | 504 |
320 static bool | 505 static bool |
321 aggregate_contains_union_type (tree type) | 506 visit_op (gimple *, tree op, tree, void *data) |
322 { | 507 { |
323 tree field; | 508 bitmap active = (bitmap)data; |
324 | 509 op = get_base_address (op); |
325 if (TREE_CODE (type) == UNION_TYPE | 510 if (op |
326 || TREE_CODE (type) == QUAL_UNION_TYPE) | 511 && DECL_P (op) |
327 return true; | 512 && DECL_RTL_IF_SET (op) == pc_rtx) |
328 if (TREE_CODE (type) == ARRAY_TYPE) | 513 { |
329 return aggregate_contains_union_type (TREE_TYPE (type)); | 514 size_t *v = decl_to_stack_part->get (op); |
330 if (TREE_CODE (type) != RECORD_TYPE) | 515 if (v) |
331 return false; | 516 bitmap_set_bit (active, *v); |
332 | 517 } |
333 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
334 if (TREE_CODE (field) == FIELD_DECL) | |
335 if (aggregate_contains_union_type (TREE_TYPE (field))) | |
336 return true; | |
337 | |
338 return false; | 518 return false; |
339 } | 519 } |
340 | 520 |
341 /* A subroutine of expand_used_vars. If two variables X and Y have alias | 521 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var |
342 sets that do not conflict, then do add a conflict for these variables | 522 record conflicts between it and all currently active other partitions |
343 in the interference graph. We also need to make sure to add conflicts | 523 from bitmap DATA. */ |
344 for union containing structures. Else RTL alias analysis comes along | 524 |
345 and due to type based aliasing rules decides that for two overlapping | 525 static bool |
346 union temporaries { short s; int i; } accesses to the same mem through | 526 visit_conflict (gimple *, tree op, tree, void *data) |
347 different types may not alias and happily reorders stores across | 527 { |
348 life-time boundaries of the temporaries (See PR25654). | 528 bitmap active = (bitmap)data; |
349 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */ | 529 op = get_base_address (op); |
530 if (op | |
531 && DECL_P (op) | |
532 && DECL_RTL_IF_SET (op) == pc_rtx) | |
533 { | |
534 size_t *v = decl_to_stack_part->get (op); | |
535 if (v && bitmap_set_bit (active, *v)) | |
536 { | |
537 size_t num = *v; | |
538 bitmap_iterator bi; | |
539 unsigned i; | |
540 gcc_assert (num < stack_vars_num); | |
541 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi) | |
542 add_stack_var_conflict (num, i); | |
543 } | |
544 } | |
545 return false; | |
546 } | |
547 | |
548 /* Helper routine for add_scope_conflicts, calculating the active partitions | |
549 at the end of BB, leaving the result in WORK. We're called to generate | |
550 conflicts when FOR_CONFLICT is true, otherwise we're just tracking | |
551 liveness. */ | |
350 | 552 |
351 static void | 553 static void |
352 add_alias_set_conflicts (void) | 554 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict) |
353 { | 555 { |
354 size_t i, j, n = stack_vars_num; | 556 edge e; |
355 | 557 edge_iterator ei; |
356 for (i = 0; i < n; ++i) | 558 gimple_stmt_iterator gsi; |
357 { | 559 walk_stmt_load_store_addr_fn visit; |
358 tree type_i = TREE_TYPE (stack_vars[i].decl); | 560 |
359 bool aggr_i = AGGREGATE_TYPE_P (type_i); | 561 bitmap_clear (work); |
360 bool contains_union; | 562 FOR_EACH_EDGE (e, ei, bb->preds) |
361 | 563 bitmap_ior_into (work, (bitmap)e->src->aux); |
362 contains_union = aggregate_contains_union_type (type_i); | 564 |
363 for (j = 0; j < i; ++j) | 565 visit = visit_op; |
364 { | 566 |
365 tree type_j = TREE_TYPE (stack_vars[j].decl); | 567 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
366 bool aggr_j = AGGREGATE_TYPE_P (type_j); | 568 { |
367 if (aggr_i != aggr_j | 569 gimple *stmt = gsi_stmt (gsi); |
368 /* Either the objects conflict by means of type based | 570 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit); |
369 aliasing rules, or we need to add a conflict. */ | 571 } |
370 || !objects_must_conflict_p (type_i, type_j) | 572 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
371 /* In case the types do not conflict ensure that access | 573 { |
372 to elements will conflict. In case of unions we have | 574 gimple *stmt = gsi_stmt (gsi); |
373 to be careful as type based aliasing rules may say | 575 |
374 access to the same memory does not conflict. So play | 576 if (gimple_clobber_p (stmt)) |
375 safe and add a conflict in this case. */ | 577 { |
376 || contains_union) | 578 tree lhs = gimple_assign_lhs (stmt); |
377 add_stack_var_conflict (i, j); | 579 size_t *v; |
378 } | 580 /* Nested function lowering might introduce LHSs |
379 } | 581 that are COMPONENT_REFs. */ |
582 if (!VAR_P (lhs)) | |
583 continue; | |
584 if (DECL_RTL_IF_SET (lhs) == pc_rtx | |
585 && (v = decl_to_stack_part->get (lhs))) | |
586 bitmap_clear_bit (work, *v); | |
587 } | |
588 else if (!is_gimple_debug (stmt)) | |
589 { | |
590 if (for_conflict | |
591 && visit == visit_op) | |
592 { | |
593 /* If this is the first real instruction in this BB we need | |
594 to add conflicts for everything live at this point now. | |
595 Unlike classical liveness for named objects we can't | |
596 rely on seeing a def/use of the names we're interested in. | |
597 There might merely be indirect loads/stores. We'd not add any | |
598 conflicts for such partitions. */ | |
599 bitmap_iterator bi; | |
600 unsigned i; | |
601 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi) | |
602 { | |
603 struct stack_var *a = &stack_vars[i]; | |
604 if (!a->conflicts) | |
605 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack); | |
606 bitmap_ior_into (a->conflicts, work); | |
607 } | |
608 visit = visit_conflict; | |
609 } | |
610 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit); | |
611 } | |
612 } | |
613 } | |
614 | |
615 /* Generate stack partition conflicts between all partitions that are | |
616 simultaneously live. */ | |
617 | |
618 static void | |
619 add_scope_conflicts (void) | |
620 { | |
621 basic_block bb; | |
622 bool changed; | |
623 bitmap work = BITMAP_ALLOC (NULL); | |
624 int *rpo; | |
625 int n_bbs; | |
626 | |
627 /* We approximate the live range of a stack variable by taking the first | |
628 mention of its name as starting point(s), and by the end-of-scope | |
629 death clobber added by gimplify as ending point(s) of the range. | |
630 This overapproximates in the case we for instance moved an address-taken | |
631 operation upward, without also moving a dereference to it upwards. | |
632 But it's conservatively correct as a variable never can hold values | |
633 before its name is mentioned at least once. | |
634 | |
635 We then do a mostly classical bitmap liveness algorithm. */ | |
636 | |
637 FOR_ALL_BB_FN (bb, cfun) | |
638 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack); | |
639 | |
640 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun)); | |
641 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false); | |
642 | |
643 changed = true; | |
644 while (changed) | |
645 { | |
646 int i; | |
647 changed = false; | |
648 for (i = 0; i < n_bbs; i++) | |
649 { | |
650 bitmap active; | |
651 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]); | |
652 active = (bitmap)bb->aux; | |
653 add_scope_conflicts_1 (bb, work, false); | |
654 if (bitmap_ior_into (active, work)) | |
655 changed = true; | |
656 } | |
657 } | |
658 | |
659 FOR_EACH_BB_FN (bb, cfun) | |
660 add_scope_conflicts_1 (bb, work, true); | |
661 | |
662 free (rpo); | |
663 BITMAP_FREE (work); | |
664 FOR_ALL_BB_FN (bb, cfun) | |
665 BITMAP_FREE (bb->aux); | |
380 } | 666 } |
381 | 667 |
382 /* A subroutine of partition_stack_vars. A comparison function for qsort, | 668 /* A subroutine of partition_stack_vars. A comparison function for qsort, |
383 sorting an array of indices by the properties of the object. */ | 669 sorting an array of indices by the properties of the object. */ |
384 | 670 |
401 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT); | 687 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT); |
402 if (largea != largeb) | 688 if (largea != largeb) |
403 return (int)largeb - (int)largea; | 689 return (int)largeb - (int)largea; |
404 | 690 |
405 /* Secondary compare on size, decreasing */ | 691 /* Secondary compare on size, decreasing */ |
692 if (sizea > sizeb) | |
693 return -1; | |
406 if (sizea < sizeb) | 694 if (sizea < sizeb) |
407 return -1; | |
408 if (sizea > sizeb) | |
409 return 1; | 695 return 1; |
410 | 696 |
411 /* Tertiary compare on true alignment, decreasing. */ | 697 /* Tertiary compare on true alignment, decreasing. */ |
412 if (aligna < alignb) | 698 if (aligna < alignb) |
413 return -1; | 699 return -1; |
433 if (uida > uidb) | 719 if (uida > uidb) |
434 return -1; | 720 return -1; |
435 return 0; | 721 return 0; |
436 } | 722 } |
437 | 723 |
724 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {}; | |
725 typedef hash_map<size_t, bitmap, part_traits> part_hashmap; | |
438 | 726 |
439 /* If the points-to solution *PI points to variables that are in a partition | 727 /* If the points-to solution *PI points to variables that are in a partition |
440 together with other variables add all partition members to the pointed-to | 728 together with other variables add all partition members to the pointed-to |
441 variables bitmap. */ | 729 variables bitmap. */ |
442 | 730 |
443 static void | 731 static void |
444 add_partitioned_vars_to_ptset (struct pt_solution *pt, | 732 add_partitioned_vars_to_ptset (struct pt_solution *pt, |
445 struct pointer_map_t *decls_to_partitions, | 733 part_hashmap *decls_to_partitions, |
446 struct pointer_set_t *visited, bitmap temp) | 734 hash_set<bitmap> *visited, bitmap temp) |
447 { | 735 { |
448 bitmap_iterator bi; | 736 bitmap_iterator bi; |
449 unsigned i; | 737 unsigned i; |
450 bitmap *part; | 738 bitmap *part; |
451 | 739 |
452 if (pt->anything | 740 if (pt->anything |
453 || pt->vars == NULL | 741 || pt->vars == NULL |
454 /* The pointed-to vars bitmap is shared, it is enough to | 742 /* The pointed-to vars bitmap is shared, it is enough to |
455 visit it once. */ | 743 visit it once. */ |
456 || pointer_set_insert(visited, pt->vars)) | 744 || visited->add (pt->vars)) |
457 return; | 745 return; |
458 | 746 |
459 bitmap_clear (temp); | 747 bitmap_clear (temp); |
460 | 748 |
461 /* By using a temporary bitmap to store all members of the partitions | 749 /* By using a temporary bitmap to store all members of the partitions |
462 we have to add we make sure to visit each of the partitions only | 750 we have to add we make sure to visit each of the partitions only |
463 once. */ | 751 once. */ |
464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi) | 752 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi) |
465 if ((!temp | 753 if ((!temp |
466 || !bitmap_bit_p (temp, i)) | 754 || !bitmap_bit_p (temp, i)) |
467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions, | 755 && (part = decls_to_partitions->get (i))) |
468 (void *)(size_t) i))) | |
469 bitmap_ior_into (temp, *part); | 756 bitmap_ior_into (temp, *part); |
470 if (!bitmap_empty_p (temp)) | 757 if (!bitmap_empty_p (temp)) |
471 bitmap_ior_into (pt->vars, temp); | 758 bitmap_ior_into (pt->vars, temp); |
472 } | 759 } |
473 | 760 |
477 rewritten. */ | 764 rewritten. */ |
478 | 765 |
479 static void | 766 static void |
480 update_alias_info_with_stack_vars (void) | 767 update_alias_info_with_stack_vars (void) |
481 { | 768 { |
482 struct pointer_map_t *decls_to_partitions = NULL; | 769 part_hashmap *decls_to_partitions = NULL; |
483 size_t i, j; | 770 size_t i, j; |
484 tree var = NULL_TREE; | 771 tree var = NULL_TREE; |
485 | 772 |
486 for (i = 0; i < stack_vars_num; i++) | 773 for (i = 0; i < stack_vars_num; i++) |
487 { | 774 { |
494 || stack_vars[i].next == EOC) | 781 || stack_vars[i].next == EOC) |
495 continue; | 782 continue; |
496 | 783 |
497 if (!decls_to_partitions) | 784 if (!decls_to_partitions) |
498 { | 785 { |
499 decls_to_partitions = pointer_map_create (); | 786 decls_to_partitions = new part_hashmap; |
500 cfun->gimple_df->decls_to_pointers = pointer_map_create (); | 787 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>; |
501 } | 788 } |
502 | 789 |
503 /* Create an SSA_NAME that points to the partition for use | 790 /* Create an SSA_NAME that points to the partition for use |
504 as base during alias-oracle queries on RTL for bases that | 791 as base during alias-oracle queries on RTL for bases that |
505 have been partitioned. */ | 792 have been partitioned. */ |
506 if (var == NULL_TREE) | 793 if (var == NULL_TREE) |
507 var = create_tmp_var (ptr_type_node, NULL); | 794 var = create_tmp_var (ptr_type_node); |
508 name = make_ssa_name (var, NULL); | 795 name = make_ssa_name (var); |
509 | 796 |
510 /* Create bitmaps representing partitions. They will be used for | 797 /* Create bitmaps representing partitions. They will be used for |
511 points-to sets later, so use GGC alloc. */ | 798 points-to sets later, so use GGC alloc. */ |
512 part = BITMAP_GGC_ALLOC (); | 799 part = BITMAP_GGC_ALLOC (); |
513 for (j = i; j != EOC; j = stack_vars[j].next) | 800 for (j = i; j != EOC; j = stack_vars[j].next) |
514 { | 801 { |
515 tree decl = stack_vars[j].decl; | 802 tree decl = stack_vars[j].decl; |
516 unsigned int uid = DECL_PT_UID (decl); | 803 unsigned int uid = DECL_PT_UID (decl); |
517 /* We should never end up partitioning SSA names (though they | |
518 may end up on the stack). Neither should we allocate stack | |
519 space to something that is unused and thus unreferenced, except | |
520 for -O0 where we are preserving even unreferenced variables. */ | |
521 gcc_assert (DECL_P (decl) | |
522 && (!optimize | |
523 || referenced_var_lookup (cfun, DECL_UID (decl)))); | |
524 bitmap_set_bit (part, uid); | 804 bitmap_set_bit (part, uid); |
525 *((bitmap *) pointer_map_insert (decls_to_partitions, | 805 decls_to_partitions->put (uid, part); |
526 (void *)(size_t) uid)) = part; | 806 cfun->gimple_df->decls_to_pointers->put (decl, name); |
527 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers, | 807 if (TREE_ADDRESSABLE (decl)) |
528 decl)) = name; | 808 TREE_ADDRESSABLE (name) = 1; |
529 } | 809 } |
530 | 810 |
531 /* Make the SSA name point to all partition members. */ | 811 /* Make the SSA name point to all partition members. */ |
532 pi = get_ptr_info (name); | 812 pi = get_ptr_info (name); |
533 pt_solution_set (&pi->pt, part, false, false); | 813 pt_solution_set (&pi->pt, part, false); |
534 } | 814 } |
535 | 815 |
536 /* Make all points-to sets that contain one member of a partition | 816 /* Make all points-to sets that contain one member of a partition |
537 contain all members of the partition. */ | 817 contain all members of the partition. */ |
538 if (decls_to_partitions) | 818 if (decls_to_partitions) |
539 { | 819 { |
540 unsigned i; | 820 unsigned i; |
541 struct pointer_set_t *visited = pointer_set_create (); | 821 tree name; |
542 bitmap temp = BITMAP_ALLOC (NULL); | 822 hash_set<bitmap> visited; |
543 | 823 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack); |
544 for (i = 1; i < num_ssa_names; i++) | 824 |
545 { | 825 FOR_EACH_SSA_NAME (i, name, cfun) |
546 tree name = ssa_name (i); | 826 { |
547 struct ptr_info_def *pi; | 827 struct ptr_info_def *pi; |
548 | 828 |
549 if (name | 829 if (POINTER_TYPE_P (TREE_TYPE (name)) |
550 && POINTER_TYPE_P (TREE_TYPE (name)) | |
551 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL)) | 830 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL)) |
552 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, | 831 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, |
553 visited, temp); | 832 &visited, temp); |
554 } | 833 } |
555 | 834 |
556 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, | 835 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, |
557 decls_to_partitions, visited, temp); | 836 decls_to_partitions, &visited, temp); |
558 | 837 |
559 pointer_set_destroy (visited); | 838 delete decls_to_partitions; |
560 pointer_map_destroy (decls_to_partitions); | |
561 BITMAP_FREE (temp); | 839 BITMAP_FREE (temp); |
562 } | 840 } |
563 } | 841 } |
564 | 842 |
565 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND | 843 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND |
566 partitioning algorithm. Partitions A and B are known to be non-conflicting. | 844 partitioning algorithm. Partitions A and B are known to be non-conflicting. |
567 Merge them into a single partition A. | 845 Merge them into a single partition A. */ |
568 | |
569 At the same time, add OFFSET to all variables in partition B. At the end | |
570 of the partitioning process we've have a nice block easy to lay out within | |
571 the stack frame. */ | |
572 | 846 |
573 static void | 847 static void |
574 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset) | 848 union_stack_vars (size_t a, size_t b) |
575 { | 849 { |
576 size_t i, last; | |
577 struct stack_var *vb = &stack_vars[b]; | 850 struct stack_var *vb = &stack_vars[b]; |
578 bitmap_iterator bi; | 851 bitmap_iterator bi; |
579 unsigned u; | 852 unsigned u; |
580 | 853 |
581 /* Update each element of partition B with the given offset, | 854 gcc_assert (stack_vars[b].next == EOC); |
582 and merge them into partition A. */ | 855 /* Add B to A's partition. */ |
583 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next) | 856 stack_vars[b].next = stack_vars[a].next; |
584 { | 857 stack_vars[b].representative = a; |
585 stack_vars[i].offset += offset; | |
586 stack_vars[i].representative = a; | |
587 } | |
588 stack_vars[last].next = stack_vars[a].next; | |
589 stack_vars[a].next = b; | 858 stack_vars[a].next = b; |
590 | 859 |
591 /* Update the required alignment of partition A to account for B. */ | 860 /* Update the required alignment of partition A to account for B. */ |
592 if (stack_vars[a].alignb < stack_vars[b].alignb) | 861 if (stack_vars[a].alignb < stack_vars[b].alignb) |
593 stack_vars[a].alignb = stack_vars[b].alignb; | 862 stack_vars[a].alignb = stack_vars[b].alignb; |
603 | 872 |
604 /* A subroutine of expand_used_vars. Binpack the variables into | 873 /* A subroutine of expand_used_vars. Binpack the variables into |
605 partitions constrained by the interference graph. The overall | 874 partitions constrained by the interference graph. The overall |
606 algorithm used is as follows: | 875 algorithm used is as follows: |
607 | 876 |
608 Sort the objects by size. | 877 Sort the objects by size in descending order. |
609 For each object A { | 878 For each object A { |
610 S = size(A) | 879 S = size(A) |
611 O = 0 | 880 O = 0 |
612 loop { | 881 loop { |
613 Look for the largest non-conflicting object B with size <= S. | 882 Look for the largest non-conflicting object B with size <= S. |
614 UNION (A, B) | 883 UNION (A, B) |
615 offset(B) = O | |
616 O += size(B) | |
617 S -= size(B) | |
618 } | 884 } |
619 } | 885 } |
620 */ | 886 */ |
621 | 887 |
622 static void | 888 static void |
634 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp); | 900 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp); |
635 | 901 |
636 for (si = 0; si < n; ++si) | 902 for (si = 0; si < n; ++si) |
637 { | 903 { |
638 size_t i = stack_vars_sorted[si]; | 904 size_t i = stack_vars_sorted[si]; |
905 unsigned int ialign = stack_vars[i].alignb; | |
639 HOST_WIDE_INT isize = stack_vars[i].size; | 906 HOST_WIDE_INT isize = stack_vars[i].size; |
640 unsigned int ialign = stack_vars[i].alignb; | 907 |
641 HOST_WIDE_INT offset = 0; | 908 /* Ignore objects that aren't partition representatives. If we |
642 | 909 see a var that is not a partition representative, it must |
643 for (sj = si; sj-- > 0; ) | 910 have been merged earlier. */ |
911 if (stack_vars[i].representative != i) | |
912 continue; | |
913 | |
914 for (sj = si + 1; sj < n; ++sj) | |
644 { | 915 { |
645 size_t j = stack_vars_sorted[sj]; | 916 size_t j = stack_vars_sorted[sj]; |
917 unsigned int jalign = stack_vars[j].alignb; | |
646 HOST_WIDE_INT jsize = stack_vars[j].size; | 918 HOST_WIDE_INT jsize = stack_vars[j].size; |
647 unsigned int jalign = stack_vars[j].alignb; | |
648 | 919 |
649 /* Ignore objects that aren't partition representatives. */ | 920 /* Ignore objects that aren't partition representatives. */ |
650 if (stack_vars[j].representative != j) | 921 if (stack_vars[j].representative != j) |
651 continue; | |
652 | |
653 /* Ignore objects too large for the remaining space. */ | |
654 if (isize < jsize) | |
655 continue; | |
656 | |
657 /* Ignore conflicting objects. */ | |
658 if (stack_var_conflict_p (i, j)) | |
659 continue; | 922 continue; |
660 | 923 |
661 /* Do not mix objects of "small" (supported) alignment | 924 /* Do not mix objects of "small" (supported) alignment |
662 and "large" (unsupported) alignment. */ | 925 and "large" (unsupported) alignment. */ |
663 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) | 926 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) |
664 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)) | 927 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)) |
928 break; | |
929 | |
930 /* For Address Sanitizer do not mix objects with different | |
931 sizes, as the shorter vars wouldn't be adequately protected. | |
932 Don't do that for "large" (unsupported) alignment objects, | |
933 those aren't protected anyway. */ | |
934 if ((asan_sanitize_stack_p ()) | |
935 && isize != jsize | |
936 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) | |
937 break; | |
938 | |
939 /* Ignore conflicting objects. */ | |
940 if (stack_var_conflict_p (i, j)) | |
665 continue; | 941 continue; |
666 | 942 |
667 /* Refine the remaining space check to include alignment. */ | |
668 if (offset & (jalign - 1)) | |
669 { | |
670 HOST_WIDE_INT toff = offset; | |
671 toff += jalign - 1; | |
672 toff &= -(HOST_WIDE_INT)jalign; | |
673 if (isize - (toff - offset) < jsize) | |
674 continue; | |
675 | |
676 isize -= toff - offset; | |
677 offset = toff; | |
678 } | |
679 | |
680 /* UNION the objects, placing J at OFFSET. */ | 943 /* UNION the objects, placing J at OFFSET. */ |
681 union_stack_vars (i, j, offset); | 944 union_stack_vars (i, j); |
682 | |
683 isize -= jsize; | |
684 if (isize == 0) | |
685 break; | |
686 } | 945 } |
687 } | 946 } |
688 | 947 |
689 update_alias_info_with_stack_vars (); | 948 update_alias_info_with_stack_vars (); |
690 } | 949 } |
710 | 969 |
711 for (j = i; j != EOC; j = stack_vars[j].next) | 970 for (j = i; j != EOC; j = stack_vars[j].next) |
712 { | 971 { |
713 fputc ('\t', dump_file); | 972 fputc ('\t', dump_file); |
714 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); | 973 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); |
715 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", | 974 } |
716 stack_vars[j].offset); | 975 fputc ('\n', dump_file); |
717 } | |
718 } | 976 } |
719 } | 977 } |
720 | 978 |
721 /* Assign rtl to DECL at BASE + OFFSET. */ | 979 /* Assign rtl to DECL at BASE + OFFSET. */ |
722 | 980 |
728 rtx x; | 986 rtx x; |
729 | 987 |
730 /* If this fails, we've overflowed the stack frame. Error nicely? */ | 988 /* If this fails, we've overflowed the stack frame. Error nicely? */ |
731 gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); | 989 gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); |
732 | 990 |
733 x = plus_constant (base, offset); | 991 x = plus_constant (Pmode, base, offset); |
734 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x); | 992 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME |
993 ? TYPE_MODE (TREE_TYPE (decl)) | |
994 : DECL_MODE (SSAVAR (decl)), x); | |
735 | 995 |
736 if (TREE_CODE (decl) != SSA_NAME) | 996 if (TREE_CODE (decl) != SSA_NAME) |
737 { | 997 { |
738 /* Set alignment we actually gave this decl if it isn't an SSA name. | 998 /* Set alignment we actually gave this decl if it isn't an SSA name. |
739 If it is we generate stack slots only accidentally so it isn't as | 999 If it is we generate stack slots only accidentally so it isn't as |
740 important, we'll simply use the alignment that is already set. */ | 1000 important, we'll simply use the alignment that is already set. */ |
741 if (base == virtual_stack_vars_rtx) | 1001 if (base == virtual_stack_vars_rtx) |
742 offset -= frame_phase; | 1002 offset -= frame_phase; |
743 align = offset & -offset; | 1003 align = least_bit_hwi (offset); |
744 align *= BITS_PER_UNIT; | 1004 align *= BITS_PER_UNIT; |
745 if (align == 0 || align > base_align) | 1005 if (align == 0 || align > base_align) |
746 align = base_align; | 1006 align = base_align; |
747 | 1007 |
748 /* One would think that we could assert that we're not decreasing | 1008 /* One would think that we could assert that we're not decreasing |
749 alignment here, but (at least) the i386 port does exactly this | 1009 alignment here, but (at least) the i386 port does exactly this |
750 via the MINIMUM_ALIGNMENT hook. */ | 1010 via the MINIMUM_ALIGNMENT hook. */ |
751 | 1011 |
752 DECL_ALIGN (decl) = align; | 1012 SET_DECL_ALIGN (decl, align); |
753 DECL_USER_ALIGN (decl) = 0; | 1013 DECL_USER_ALIGN (decl) = 0; |
754 } | 1014 } |
755 | 1015 |
756 set_mem_attributes (x, SSAVAR (decl), true); | |
757 set_rtl (decl, x); | 1016 set_rtl (decl, x); |
758 } | 1017 } |
1018 | |
1019 struct stack_vars_data | |
1020 { | |
1021 /* Vector of offset pairs, always end of some padding followed | |
1022 by start of the padding that needs Address Sanitizer protection. | |
1023 The vector is in reversed, highest offset pairs come first. */ | |
1024 auto_vec<HOST_WIDE_INT> asan_vec; | |
1025 | |
1026 /* Vector of partition representative decls in between the paddings. */ | |
1027 auto_vec<tree> asan_decl_vec; | |
1028 | |
1029 /* Base pseudo register for Address Sanitizer protected automatic vars. */ | |
1030 rtx asan_base; | |
1031 | |
1032 /* Alignment needed for the Address Sanitizer protected automatic vars. */ | |
1033 unsigned int asan_alignb; | |
1034 }; | |
759 | 1035 |
760 /* A subroutine of expand_used_vars. Give each partition representative | 1036 /* A subroutine of expand_used_vars. Give each partition representative |
761 a unique location within the stack frame. Update each partition member | 1037 a unique location within the stack frame. Update each partition member |
762 with that location. */ | 1038 with that location. */ |
763 | 1039 |
764 static void | 1040 static void |
765 expand_stack_vars (bool (*pred) (tree)) | 1041 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data) |
766 { | 1042 { |
767 size_t si, i, j, n = stack_vars_num; | 1043 size_t si, i, j, n = stack_vars_num; |
768 HOST_WIDE_INT large_size = 0, large_alloc = 0; | 1044 HOST_WIDE_INT large_size = 0, large_alloc = 0; |
769 rtx large_base = NULL; | 1045 rtx large_base = NULL; |
770 unsigned large_align = 0; | 1046 unsigned large_align = 0; |
1047 bool large_allocation_done = false; | |
771 tree decl; | 1048 tree decl; |
772 | 1049 |
773 /* Determine if there are any variables requiring "large" alignment. | 1050 /* Determine if there are any variables requiring "large" alignment. |
774 Since these are dynamically allocated, we only process these if | 1051 Since these are dynamically allocated, we only process these if |
775 no predicate involved. */ | 1052 no predicate involved. */ |
782 unsigned alignb; | 1059 unsigned alignb; |
783 | 1060 |
784 i = stack_vars_sorted[si]; | 1061 i = stack_vars_sorted[si]; |
785 alignb = stack_vars[i].alignb; | 1062 alignb = stack_vars[i].alignb; |
786 | 1063 |
1064 /* All "large" alignment decls come before all "small" alignment | |
1065 decls, but "large" alignment decls are not sorted based on | |
1066 their alignment. Increase large_align to track the largest | |
1067 required alignment. */ | |
1068 if ((alignb * BITS_PER_UNIT) > large_align) | |
1069 large_align = alignb * BITS_PER_UNIT; | |
1070 | |
787 /* Stop when we get to the first decl with "small" alignment. */ | 1071 /* Stop when we get to the first decl with "small" alignment. */ |
788 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) | 1072 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) |
789 break; | 1073 break; |
790 | 1074 |
791 /* Skip variables that aren't partition representatives. */ | 1075 /* Skip variables that aren't partition representatives. */ |
793 continue; | 1077 continue; |
794 | 1078 |
795 /* Skip variables that have already had rtl assigned. See also | 1079 /* Skip variables that have already had rtl assigned. See also |
796 add_stack_var where we perpetrate this pc_rtx hack. */ | 1080 add_stack_var where we perpetrate this pc_rtx hack. */ |
797 decl = stack_vars[i].decl; | 1081 decl = stack_vars[i].decl; |
798 if ((TREE_CODE (decl) == SSA_NAME | 1082 if (TREE_CODE (decl) == SSA_NAME |
799 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] | 1083 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX |
800 : DECL_RTL (decl)) != pc_rtx) | 1084 : DECL_RTL (decl) != pc_rtx) |
801 continue; | 1085 continue; |
802 | 1086 |
803 large_size += alignb - 1; | 1087 large_size += alignb - 1; |
804 large_size &= -(HOST_WIDE_INT)alignb; | 1088 large_size &= -(HOST_WIDE_INT)alignb; |
805 large_size += stack_vars[i].size; | 1089 large_size += stack_vars[i].size; |
806 } | 1090 } |
807 | |
808 /* If there were any, allocate space. */ | |
809 if (large_size > 0) | |
810 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0, | |
811 large_align, true); | |
812 } | 1091 } |
813 | 1092 |
814 for (si = 0; si < n; ++si) | 1093 for (si = 0; si < n; ++si) |
815 { | 1094 { |
816 rtx base; | 1095 rtx base; |
824 continue; | 1103 continue; |
825 | 1104 |
826 /* Skip variables that have already had rtl assigned. See also | 1105 /* Skip variables that have already had rtl assigned. See also |
827 add_stack_var where we perpetrate this pc_rtx hack. */ | 1106 add_stack_var where we perpetrate this pc_rtx hack. */ |
828 decl = stack_vars[i].decl; | 1107 decl = stack_vars[i].decl; |
829 if ((TREE_CODE (decl) == SSA_NAME | 1108 if (TREE_CODE (decl) == SSA_NAME |
830 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] | 1109 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX |
831 : DECL_RTL (decl)) != pc_rtx) | 1110 : DECL_RTL (decl) != pc_rtx) |
832 continue; | 1111 continue; |
833 | 1112 |
834 /* Check the predicate to see whether this variable should be | 1113 /* Check the predicate to see whether this variable should be |
835 allocated in this pass. */ | 1114 allocated in this pass. */ |
836 if (pred && !pred (decl)) | 1115 if (pred && !pred (i)) |
837 continue; | 1116 continue; |
838 | 1117 |
839 alignb = stack_vars[i].alignb; | 1118 alignb = stack_vars[i].alignb; |
840 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) | 1119 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) |
841 { | 1120 { |
842 offset = alloc_stack_frame_space (stack_vars[i].size, alignb); | |
843 base = virtual_stack_vars_rtx; | 1121 base = virtual_stack_vars_rtx; |
844 base_align = crtl->max_used_stack_slot_alignment; | 1122 if ((asan_sanitize_stack_p ()) |
1123 && pred) | |
1124 { | |
1125 HOST_WIDE_INT prev_offset | |
1126 = align_base (frame_offset, | |
1127 MAX (alignb, ASAN_RED_ZONE_SIZE), | |
1128 !FRAME_GROWS_DOWNWARD); | |
1129 tree repr_decl = NULL_TREE; | |
1130 offset | |
1131 = alloc_stack_frame_space (stack_vars[i].size | |
1132 + ASAN_RED_ZONE_SIZE, | |
1133 MAX (alignb, ASAN_RED_ZONE_SIZE)); | |
1134 | |
1135 data->asan_vec.safe_push (prev_offset); | |
1136 data->asan_vec.safe_push (offset + stack_vars[i].size); | |
1137 /* Find best representative of the partition. | |
1138 Prefer those with DECL_NAME, even better | |
1139 satisfying asan_protect_stack_decl predicate. */ | |
1140 for (j = i; j != EOC; j = stack_vars[j].next) | |
1141 if (asan_protect_stack_decl (stack_vars[j].decl) | |
1142 && DECL_NAME (stack_vars[j].decl)) | |
1143 { | |
1144 repr_decl = stack_vars[j].decl; | |
1145 break; | |
1146 } | |
1147 else if (repr_decl == NULL_TREE | |
1148 && DECL_P (stack_vars[j].decl) | |
1149 && DECL_NAME (stack_vars[j].decl)) | |
1150 repr_decl = stack_vars[j].decl; | |
1151 if (repr_decl == NULL_TREE) | |
1152 repr_decl = stack_vars[i].decl; | |
1153 data->asan_decl_vec.safe_push (repr_decl); | |
1154 data->asan_alignb = MAX (data->asan_alignb, alignb); | |
1155 if (data->asan_base == NULL) | |
1156 data->asan_base = gen_reg_rtx (Pmode); | |
1157 base = data->asan_base; | |
1158 | |
1159 if (!STRICT_ALIGNMENT) | |
1160 base_align = crtl->max_used_stack_slot_alignment; | |
1161 else | |
1162 base_align = MAX (crtl->max_used_stack_slot_alignment, | |
1163 GET_MODE_ALIGNMENT (SImode) | |
1164 << ASAN_SHADOW_SHIFT); | |
1165 } | |
1166 else | |
1167 { | |
1168 offset = alloc_stack_frame_space (stack_vars[i].size, alignb); | |
1169 base_align = crtl->max_used_stack_slot_alignment; | |
1170 } | |
845 } | 1171 } |
846 else | 1172 else |
847 { | 1173 { |
848 /* Large alignment is only processed in the last pass. */ | 1174 /* Large alignment is only processed in the last pass. */ |
849 if (pred) | 1175 if (pred) |
850 continue; | 1176 continue; |
1177 | |
1178 /* If there were any variables requiring "large" alignment, allocate | |
1179 space. */ | |
1180 if (large_size > 0 && ! large_allocation_done) | |
1181 { | |
1182 HOST_WIDE_INT loffset; | |
1183 rtx large_allocsize; | |
1184 | |
1185 large_allocsize = GEN_INT (large_size); | |
1186 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL); | |
1187 loffset = alloc_stack_frame_space | |
1188 (INTVAL (large_allocsize), | |
1189 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT); | |
1190 large_base = get_dynamic_stack_base (loffset, large_align); | |
1191 large_allocation_done = true; | |
1192 } | |
851 gcc_assert (large_base != NULL); | 1193 gcc_assert (large_base != NULL); |
852 | 1194 |
853 large_alloc += alignb - 1; | 1195 large_alloc += alignb - 1; |
854 large_alloc &= -(HOST_WIDE_INT)alignb; | 1196 large_alloc &= -(HOST_WIDE_INT)alignb; |
855 offset = large_alloc; | 1197 offset = large_alloc; |
861 | 1203 |
862 /* Create rtl for each variable based on their location within the | 1204 /* Create rtl for each variable based on their location within the |
863 partition. */ | 1205 partition. */ |
864 for (j = i; j != EOC; j = stack_vars[j].next) | 1206 for (j = i; j != EOC; j = stack_vars[j].next) |
865 { | 1207 { |
866 gcc_assert (stack_vars[j].offset <= stack_vars[i].size); | |
867 expand_one_stack_var_at (stack_vars[j].decl, | 1208 expand_one_stack_var_at (stack_vars[j].decl, |
868 base, base_align, | 1209 base, base_align, |
869 stack_vars[j].offset + offset); | 1210 offset); |
870 } | 1211 } |
871 } | 1212 } |
872 | 1213 |
873 gcc_assert (large_alloc == large_size); | 1214 gcc_assert (large_alloc == large_size); |
874 } | 1215 } |
893 set_rtl (stack_vars[j].decl, NULL); | 1234 set_rtl (stack_vars[j].decl, NULL); |
894 } | 1235 } |
895 return size; | 1236 return size; |
896 } | 1237 } |
897 | 1238 |
1239 /* Record the RTL assignment X for the default def of PARM. */ | |
1240 | |
1241 extern void | |
1242 set_parm_rtl (tree parm, rtx x) | |
1243 { | |
1244 gcc_assert (TREE_CODE (parm) == PARM_DECL | |
1245 || TREE_CODE (parm) == RESULT_DECL); | |
1246 | |
1247 if (x && !MEM_P (x)) | |
1248 { | |
1249 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm), | |
1250 TYPE_MODE (TREE_TYPE (parm)), | |
1251 TYPE_ALIGN (TREE_TYPE (parm))); | |
1252 | |
1253 /* If the variable alignment is very large we'll dynamicaly | |
1254 allocate it, which means that in-frame portion is just a | |
1255 pointer. ??? We've got a pseudo for sure here, do we | |
1256 actually dynamically allocate its spilling area if needed? | |
1257 ??? Isn't it a problem when POINTER_SIZE also exceeds | |
1258 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */ | |
1259 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | |
1260 align = POINTER_SIZE; | |
1261 | |
1262 record_alignment_for_reg_var (align); | |
1263 } | |
1264 | |
1265 tree ssa = ssa_default_def (cfun, parm); | |
1266 if (!ssa) | |
1267 return set_rtl (parm, x); | |
1268 | |
1269 int part = var_to_partition (SA.map, ssa); | |
1270 gcc_assert (part != NO_PARTITION); | |
1271 | |
1272 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part); | |
1273 gcc_assert (changed); | |
1274 | |
1275 set_rtl (ssa, x); | |
1276 gcc_assert (DECL_RTL (parm) == x); | |
1277 } | |
1278 | |
898 /* A subroutine of expand_one_var. Called to immediately assign rtl | 1279 /* A subroutine of expand_one_var. Called to immediately assign rtl |
899 to a variable to be allocated in the stack frame. */ | 1280 to a variable to be allocated in the stack frame. */ |
900 | 1281 |
901 static void | 1282 static void |
902 expand_one_stack_var (tree var) | 1283 expand_one_stack_var_1 (tree var) |
903 { | 1284 { |
904 HOST_WIDE_INT size, offset; | 1285 HOST_WIDE_INT size, offset; |
905 unsigned byte_align; | 1286 unsigned byte_align; |
906 | 1287 |
907 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1); | 1288 if (TREE_CODE (var) == SSA_NAME) |
908 byte_align = get_decl_align_unit (SSAVAR (var)); | 1289 { |
1290 tree type = TREE_TYPE (var); | |
1291 size = tree_to_uhwi (TYPE_SIZE_UNIT (type)); | |
1292 byte_align = TYPE_ALIGN_UNIT (type); | |
1293 } | |
1294 else | |
1295 { | |
1296 size = tree_to_uhwi (DECL_SIZE_UNIT (var)); | |
1297 byte_align = align_local_variable (var); | |
1298 } | |
909 | 1299 |
910 /* We handle highly aligned variables in expand_stack_vars. */ | 1300 /* We handle highly aligned variables in expand_stack_vars. */ |
911 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT); | 1301 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT); |
912 | 1302 |
913 offset = alloc_stack_frame_space (size, byte_align); | 1303 offset = alloc_stack_frame_space (size, byte_align); |
914 | 1304 |
915 expand_one_stack_var_at (var, virtual_stack_vars_rtx, | 1305 expand_one_stack_var_at (var, virtual_stack_vars_rtx, |
916 crtl->max_used_stack_slot_alignment, offset); | 1306 crtl->max_used_stack_slot_alignment, offset); |
917 } | 1307 } |
918 | 1308 |
1309 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are | |
1310 already assigned some MEM. */ | |
1311 | |
1312 static void | |
1313 expand_one_stack_var (tree var) | |
1314 { | |
1315 if (TREE_CODE (var) == SSA_NAME) | |
1316 { | |
1317 int part = var_to_partition (SA.map, var); | |
1318 if (part != NO_PARTITION) | |
1319 { | |
1320 rtx x = SA.partition_to_pseudo[part]; | |
1321 gcc_assert (x); | |
1322 gcc_assert (MEM_P (x)); | |
1323 return; | |
1324 } | |
1325 } | |
1326 | |
1327 return expand_one_stack_var_1 (var); | |
1328 } | |
1329 | |
919 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL | 1330 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL |
920 that will reside in a hard register. */ | 1331 that will reside in a hard register. */ |
921 | 1332 |
922 static void | 1333 static void |
923 expand_one_hard_reg_var (tree var) | 1334 expand_one_hard_reg_var (tree var) |
924 { | 1335 { |
925 rest_of_decl_compilation (var, 0, 0); | 1336 rest_of_decl_compilation (var, 0, 0); |
926 } | 1337 } |
927 | 1338 |
1339 /* Record the alignment requirements of some variable assigned to a | |
1340 pseudo. */ | |
1341 | |
1342 static void | |
1343 record_alignment_for_reg_var (unsigned int align) | |
1344 { | |
1345 if (SUPPORTS_STACK_ALIGNMENT | |
1346 && crtl->stack_alignment_estimated < align) | |
1347 { | |
1348 /* stack_alignment_estimated shouldn't change after stack | |
1349 realign decision made */ | |
1350 gcc_assert (!crtl->stack_realign_processed); | |
1351 crtl->stack_alignment_estimated = align; | |
1352 } | |
1353 | |
1354 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted. | |
1355 So here we only make sure stack_alignment_needed >= align. */ | |
1356 if (crtl->stack_alignment_needed < align) | |
1357 crtl->stack_alignment_needed = align; | |
1358 if (crtl->max_used_stack_slot_alignment < align) | |
1359 crtl->max_used_stack_slot_alignment = align; | |
1360 } | |
1361 | |
1362 /* Create RTL for an SSA partition. */ | |
1363 | |
1364 static void | |
1365 expand_one_ssa_partition (tree var) | |
1366 { | |
1367 int part = var_to_partition (SA.map, var); | |
1368 gcc_assert (part != NO_PARTITION); | |
1369 | |
1370 if (SA.partition_to_pseudo[part]) | |
1371 return; | |
1372 | |
1373 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var), | |
1374 TYPE_MODE (TREE_TYPE (var)), | |
1375 TYPE_ALIGN (TREE_TYPE (var))); | |
1376 | |
1377 /* If the variable alignment is very large we'll dynamicaly allocate | |
1378 it, which means that in-frame portion is just a pointer. */ | |
1379 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | |
1380 align = POINTER_SIZE; | |
1381 | |
1382 record_alignment_for_reg_var (align); | |
1383 | |
1384 if (!use_register_for_decl (var)) | |
1385 { | |
1386 if (defer_stack_allocation (var, true)) | |
1387 add_stack_var (var); | |
1388 else | |
1389 expand_one_stack_var_1 (var); | |
1390 return; | |
1391 } | |
1392 | |
1393 machine_mode reg_mode = promote_ssa_mode (var, NULL); | |
1394 rtx x = gen_reg_rtx (reg_mode); | |
1395 | |
1396 set_rtl (var, x); | |
1397 | |
1398 /* For a promoted variable, X will not be used directly but wrapped in a | |
1399 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land | |
1400 will assume that its upper bits can be inferred from its lower bits. | |
1401 Therefore, if X isn't initialized on every path from the entry, then | |
1402 we must do it manually in order to fulfill the above assumption. */ | |
1403 if (reg_mode != TYPE_MODE (TREE_TYPE (var)) | |
1404 && bitmap_bit_p (SA.partitions_for_undefined_values, part)) | |
1405 emit_move_insn (x, CONST0_RTX (reg_mode)); | |
1406 } | |
1407 | |
1408 /* Record the association between the RTL generated for partition PART | |
1409 and the underlying variable of the SSA_NAME VAR. */ | |
1410 | |
1411 static void | |
1412 adjust_one_expanded_partition_var (tree var) | |
1413 { | |
1414 if (!var) | |
1415 return; | |
1416 | |
1417 tree decl = SSA_NAME_VAR (var); | |
1418 | |
1419 int part = var_to_partition (SA.map, var); | |
1420 if (part == NO_PARTITION) | |
1421 return; | |
1422 | |
1423 rtx x = SA.partition_to_pseudo[part]; | |
1424 | |
1425 gcc_assert (x); | |
1426 | |
1427 set_rtl (var, x); | |
1428 | |
1429 if (!REG_P (x)) | |
1430 return; | |
1431 | |
1432 /* Note if the object is a user variable. */ | |
1433 if (decl && !DECL_ARTIFICIAL (decl)) | |
1434 mark_user_reg (x); | |
1435 | |
1436 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var))) | |
1437 mark_reg_pointer (x, get_pointer_alignment (var)); | |
1438 } | |
1439 | |
928 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL | 1440 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL |
929 that will reside in a pseudo register. */ | 1441 that will reside in a pseudo register. */ |
930 | 1442 |
931 static void | 1443 static void |
932 expand_one_register_var (tree var) | 1444 expand_one_register_var (tree var) |
933 { | 1445 { |
934 tree decl = SSAVAR (var); | 1446 if (TREE_CODE (var) == SSA_NAME) |
1447 { | |
1448 int part = var_to_partition (SA.map, var); | |
1449 if (part != NO_PARTITION) | |
1450 { | |
1451 rtx x = SA.partition_to_pseudo[part]; | |
1452 gcc_assert (x); | |
1453 gcc_assert (REG_P (x)); | |
1454 return; | |
1455 } | |
1456 gcc_unreachable (); | |
1457 } | |
1458 | |
1459 tree decl = var; | |
935 tree type = TREE_TYPE (decl); | 1460 tree type = TREE_TYPE (decl); |
936 enum machine_mode reg_mode = promote_decl_mode (decl, NULL); | 1461 machine_mode reg_mode = promote_decl_mode (decl, NULL); |
937 rtx x = gen_reg_rtx (reg_mode); | 1462 rtx x = gen_reg_rtx (reg_mode); |
938 | 1463 |
939 set_rtl (var, x); | 1464 set_rtl (var, x); |
940 | 1465 |
941 /* Note if the object is a user variable. */ | 1466 /* Note if the object is a user variable. */ |
942 if (!DECL_ARTIFICIAL (decl)) | 1467 if (!DECL_ARTIFICIAL (decl)) |
943 mark_user_reg (x); | 1468 mark_user_reg (x); |
944 | 1469 |
945 if (POINTER_TYPE_P (type)) | 1470 if (POINTER_TYPE_P (type)) |
946 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type))); | 1471 mark_reg_pointer (x, get_pointer_alignment (var)); |
947 } | 1472 } |
948 | 1473 |
949 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that | 1474 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that |
950 has some associated error, e.g. its type is error-mark. We just need | 1475 has some associated error, e.g. its type is error-mark. We just need |
951 to pick something that won't crash the rest of the compiler. */ | 1476 to pick something that won't crash the rest of the compiler. */ |
952 | 1477 |
953 static void | 1478 static void |
954 expand_one_error_var (tree var) | 1479 expand_one_error_var (tree var) |
955 { | 1480 { |
956 enum machine_mode mode = DECL_MODE (var); | 1481 machine_mode mode = DECL_MODE (var); |
957 rtx x; | 1482 rtx x; |
958 | 1483 |
959 if (mode == BLKmode) | 1484 if (mode == BLKmode) |
960 x = gen_rtx_MEM (BLKmode, const0_rtx); | 1485 x = gen_rtx_MEM (BLKmode, const0_rtx); |
961 else if (mode == VOIDmode) | 1486 else if (mode == VOIDmode) |
975 for coalescing, which reduces the size of the quadratic problem. */ | 1500 for coalescing, which reduces the size of the quadratic problem. */ |
976 | 1501 |
977 static bool | 1502 static bool |
978 defer_stack_allocation (tree var, bool toplevel) | 1503 defer_stack_allocation (tree var, bool toplevel) |
979 { | 1504 { |
1505 tree size_unit = TREE_CODE (var) == SSA_NAME | |
1506 ? TYPE_SIZE_UNIT (TREE_TYPE (var)) | |
1507 : DECL_SIZE_UNIT (var); | |
1508 | |
1509 /* Whether the variable is small enough for immediate allocation not to be | |
1510 a problem with regard to the frame size. */ | |
1511 bool smallish | |
1512 = ((HOST_WIDE_INT) tree_to_uhwi (size_unit) | |
1513 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)); | |
1514 | |
980 /* If stack protection is enabled, *all* stack variables must be deferred, | 1515 /* If stack protection is enabled, *all* stack variables must be deferred, |
981 so that we can re-order the strings to the top of the frame. */ | 1516 so that we can re-order the strings to the top of the frame. |
982 if (flag_stack_protect) | 1517 Similarly for Address Sanitizer. */ |
1518 if (flag_stack_protect || asan_sanitize_stack_p ()) | |
983 return true; | 1519 return true; |
1520 | |
1521 unsigned int align = TREE_CODE (var) == SSA_NAME | |
1522 ? TYPE_ALIGN (TREE_TYPE (var)) | |
1523 : DECL_ALIGN (var); | |
984 | 1524 |
985 /* We handle "large" alignment via dynamic allocation. We want to handle | 1525 /* We handle "large" alignment via dynamic allocation. We want to handle |
986 this extra complication in only one place, so defer them. */ | 1526 this extra complication in only one place, so defer them. */ |
987 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT) | 1527 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) |
988 return true; | 1528 return true; |
989 | 1529 |
990 /* Variables in the outermost scope automatically conflict with | 1530 bool ignored = TREE_CODE (var) == SSA_NAME |
991 every other variable. The only reason to want to defer them | 1531 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var)) |
1532 : DECL_IGNORED_P (var); | |
1533 | |
1534 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped | |
1535 might be detached from their block and appear at toplevel when we reach | |
1536 here. We want to coalesce them with variables from other blocks when | |
1537 the immediate contribution to the frame size would be noticeable. */ | |
1538 if (toplevel && optimize > 0 && ignored && !smallish) | |
1539 return true; | |
1540 | |
1541 /* Variables declared in the outermost scope automatically conflict | |
1542 with every other variable. The only reason to want to defer them | |
992 at all is that, after sorting, we can more efficiently pack | 1543 at all is that, after sorting, we can more efficiently pack |
993 small variables in the stack frame. Continue to defer at -O2. */ | 1544 small variables in the stack frame. Continue to defer at -O2. */ |
994 if (toplevel && optimize < 2) | 1545 if (toplevel && optimize < 2) |
995 return false; | 1546 return false; |
996 | 1547 |
998 stack, which makes the quadratic problem large exactly when we | 1549 stack, which makes the quadratic problem large exactly when we |
999 want compilation to proceed as quickly as possible. On the | 1550 want compilation to proceed as quickly as possible. On the |
1000 other hand, we don't want the function's stack frame size to | 1551 other hand, we don't want the function's stack frame size to |
1001 get completely out of hand. So we avoid adding scalars and | 1552 get completely out of hand. So we avoid adding scalars and |
1002 "small" aggregates to the list at all. */ | 1553 "small" aggregates to the list at all. */ |
1003 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32) | 1554 if (optimize == 0 && smallish) |
1004 return false; | 1555 return false; |
1005 | 1556 |
1006 return true; | 1557 return true; |
1007 } | 1558 } |
1008 | 1559 |
1019 unsigned int align = BITS_PER_UNIT; | 1570 unsigned int align = BITS_PER_UNIT; |
1020 tree origvar = var; | 1571 tree origvar = var; |
1021 | 1572 |
1022 var = SSAVAR (var); | 1573 var = SSAVAR (var); |
1023 | 1574 |
1024 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL) | 1575 if (TREE_TYPE (var) != error_mark_node && VAR_P (var)) |
1025 { | 1576 { |
1577 if (is_global_var (var)) | |
1578 return 0; | |
1579 | |
1026 /* Because we don't know if VAR will be in register or on stack, | 1580 /* Because we don't know if VAR will be in register or on stack, |
1027 we conservatively assume it will be on stack even if VAR is | 1581 we conservatively assume it will be on stack even if VAR is |
1028 eventually put into register after RA pass. For non-automatic | 1582 eventually put into register after RA pass. For non-automatic |
1029 variables, which won't be on stack, we collect alignment of | 1583 variables, which won't be on stack, we collect alignment of |
1030 type and ignore user specified alignment. */ | 1584 type and ignore user specified alignment. Similarly for |
1031 if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | 1585 SSA_NAMEs for which use_register_for_decl returns true. */ |
1586 if (TREE_STATIC (var) | |
1587 || DECL_EXTERNAL (var) | |
1588 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var))) | |
1032 align = MINIMUM_ALIGNMENT (TREE_TYPE (var), | 1589 align = MINIMUM_ALIGNMENT (TREE_TYPE (var), |
1033 TYPE_MODE (TREE_TYPE (var)), | 1590 TYPE_MODE (TREE_TYPE (var)), |
1034 TYPE_ALIGN (TREE_TYPE (var))); | 1591 TYPE_ALIGN (TREE_TYPE (var))); |
1035 else if (DECL_HAS_VALUE_EXPR_P (var) | 1592 else if (DECL_HAS_VALUE_EXPR_P (var) |
1036 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var)))) | 1593 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var)))) |
1046 it, which means that in-frame portion is just a pointer. */ | 1603 it, which means that in-frame portion is just a pointer. */ |
1047 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | 1604 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) |
1048 align = POINTER_SIZE; | 1605 align = POINTER_SIZE; |
1049 } | 1606 } |
1050 | 1607 |
1051 if (SUPPORTS_STACK_ALIGNMENT | 1608 record_alignment_for_reg_var (align); |
1052 && crtl->stack_alignment_estimated < align) | |
1053 { | |
1054 /* stack_alignment_estimated shouldn't change after stack | |
1055 realign decision made */ | |
1056 gcc_assert(!crtl->stack_realign_processed); | |
1057 crtl->stack_alignment_estimated = align; | |
1058 } | |
1059 | |
1060 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted. | |
1061 So here we only make sure stack_alignment_needed >= align. */ | |
1062 if (crtl->stack_alignment_needed < align) | |
1063 crtl->stack_alignment_needed = align; | |
1064 if (crtl->max_used_stack_slot_alignment < align) | |
1065 crtl->max_used_stack_slot_alignment = align; | |
1066 | 1609 |
1067 if (TREE_CODE (origvar) == SSA_NAME) | 1610 if (TREE_CODE (origvar) == SSA_NAME) |
1068 { | 1611 { |
1069 gcc_assert (TREE_CODE (var) != VAR_DECL | 1612 gcc_assert (!VAR_P (var) |
1070 || (!DECL_EXTERNAL (var) | 1613 || (!DECL_EXTERNAL (var) |
1071 && !DECL_HAS_VALUE_EXPR_P (var) | 1614 && !DECL_HAS_VALUE_EXPR_P (var) |
1072 && !TREE_STATIC (var) | 1615 && !TREE_STATIC (var) |
1073 && TREE_TYPE (var) != error_mark_node | 1616 && TREE_TYPE (var) != error_mark_node |
1074 && !DECL_HARD_REGISTER (var) | 1617 && !DECL_HARD_REGISTER (var) |
1075 && really_expand)); | 1618 && really_expand)); |
1076 } | 1619 } |
1077 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME) | 1620 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME) |
1078 ; | 1621 ; |
1079 else if (DECL_EXTERNAL (var)) | 1622 else if (DECL_EXTERNAL (var)) |
1080 ; | 1623 ; |
1081 else if (DECL_HAS_VALUE_EXPR_P (var)) | 1624 else if (DECL_HAS_VALUE_EXPR_P (var)) |
1082 ; | 1625 ; |
1087 else if (TREE_TYPE (var) == error_mark_node) | 1630 else if (TREE_TYPE (var) == error_mark_node) |
1088 { | 1631 { |
1089 if (really_expand) | 1632 if (really_expand) |
1090 expand_one_error_var (var); | 1633 expand_one_error_var (var); |
1091 } | 1634 } |
1092 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var)) | 1635 else if (VAR_P (var) && DECL_HARD_REGISTER (var)) |
1093 { | 1636 { |
1094 if (really_expand) | 1637 if (really_expand) |
1095 expand_one_hard_reg_var (var); | 1638 { |
1639 expand_one_hard_reg_var (var); | |
1640 if (!DECL_HARD_REGISTER (var)) | |
1641 /* Invalid register specification. */ | |
1642 expand_one_error_var (var); | |
1643 } | |
1096 } | 1644 } |
1097 else if (use_register_for_decl (var)) | 1645 else if (use_register_for_decl (var)) |
1098 { | 1646 { |
1099 if (really_expand) | 1647 if (really_expand) |
1100 expand_one_register_var (origvar); | 1648 expand_one_register_var (origvar); |
1101 } | 1649 } |
1102 else if (!host_integerp (DECL_SIZE_UNIT (var), 1)) | 1650 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var))) |
1103 { | 1651 { |
1652 /* Reject variables which cover more than half of the address-space. */ | |
1104 if (really_expand) | 1653 if (really_expand) |
1105 { | 1654 { |
1106 error ("size of variable %q+D is too large", var); | 1655 error ("size of variable %q+D is too large", var); |
1107 expand_one_error_var (var); | 1656 expand_one_error_var (var); |
1108 } | 1657 } |
1110 else if (defer_stack_allocation (var, toplevel)) | 1659 else if (defer_stack_allocation (var, toplevel)) |
1111 add_stack_var (origvar); | 1660 add_stack_var (origvar); |
1112 else | 1661 else |
1113 { | 1662 { |
1114 if (really_expand) | 1663 if (really_expand) |
1115 expand_one_stack_var (origvar); | 1664 { |
1116 return tree_low_cst (DECL_SIZE_UNIT (var), 1); | 1665 if (lookup_attribute ("naked", |
1666 DECL_ATTRIBUTES (current_function_decl))) | |
1667 error ("cannot allocate stack for variable %q+D, naked function.", | |
1668 var); | |
1669 | |
1670 expand_one_stack_var (origvar); | |
1671 } | |
1672 | |
1673 | |
1674 return tree_to_uhwi (DECL_SIZE_UNIT (var)); | |
1117 } | 1675 } |
1118 return 0; | 1676 return 0; |
1119 } | 1677 } |
1120 | 1678 |
1121 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | 1679 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree |
1125 TOPLEVEL is true if this is the outermost BLOCK. */ | 1683 TOPLEVEL is true if this is the outermost BLOCK. */ |
1126 | 1684 |
1127 static void | 1685 static void |
1128 expand_used_vars_for_block (tree block, bool toplevel) | 1686 expand_used_vars_for_block (tree block, bool toplevel) |
1129 { | 1687 { |
1130 size_t i, j, old_sv_num, this_sv_num, new_sv_num; | |
1131 tree t; | 1688 tree t; |
1132 | |
1133 old_sv_num = toplevel ? 0 : stack_vars_num; | |
1134 | 1689 |
1135 /* Expand all variables at this level. */ | 1690 /* Expand all variables at this level. */ |
1136 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t)) | 1691 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t)) |
1137 if (TREE_USED (t)) | 1692 if (TREE_USED (t) |
1693 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL) | |
1694 || !DECL_NONSHAREABLE (t))) | |
1138 expand_one_var (t, toplevel, true); | 1695 expand_one_var (t, toplevel, true); |
1139 | |
1140 this_sv_num = stack_vars_num; | |
1141 | 1696 |
1142 /* Expand all variables at containing levels. */ | 1697 /* Expand all variables at containing levels. */ |
1143 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | 1698 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) |
1144 expand_used_vars_for_block (t, false); | 1699 expand_used_vars_for_block (t, false); |
1145 | |
1146 /* Since we do not track exact variable lifetimes (which is not even | |
1147 possible for variables whose address escapes), we mirror the block | |
1148 tree in the interference graph. Here we cause all variables at this | |
1149 level, and all sublevels, to conflict. */ | |
1150 if (old_sv_num < this_sv_num) | |
1151 { | |
1152 new_sv_num = stack_vars_num; | |
1153 | |
1154 for (i = old_sv_num; i < new_sv_num; ++i) | |
1155 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;) | |
1156 add_stack_var_conflict (i, j); | |
1157 } | |
1158 } | 1700 } |
1159 | 1701 |
1160 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | 1702 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree |
1161 and clear TREE_USED on all local variables. */ | 1703 and clear TREE_USED on all local variables. */ |
1162 | 1704 |
1165 { | 1707 { |
1166 tree t; | 1708 tree t; |
1167 | 1709 |
1168 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t)) | 1710 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t)) |
1169 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */ | 1711 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */ |
1712 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL) | |
1713 || !DECL_NONSHAREABLE (t)) | |
1170 TREE_USED (t) = 0; | 1714 TREE_USED (t) = 0; |
1171 | 1715 |
1172 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | 1716 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) |
1173 clear_tree_used (t); | 1717 clear_tree_used (t); |
1174 } | 1718 } |
1719 | |
1720 enum { | |
1721 SPCT_FLAG_DEFAULT = 1, | |
1722 SPCT_FLAG_ALL = 2, | |
1723 SPCT_FLAG_STRONG = 3, | |
1724 SPCT_FLAG_EXPLICIT = 4 | |
1725 }; | |
1175 | 1726 |
1176 /* Examine TYPE and determine a bit mask of the following features. */ | 1727 /* Examine TYPE and determine a bit mask of the following features. */ |
1177 | 1728 |
1178 #define SPCT_HAS_LARGE_CHAR_ARRAY 1 | 1729 #define SPCT_HAS_LARGE_CHAR_ARRAY 1 |
1179 #define SPCT_HAS_SMALL_CHAR_ARRAY 2 | 1730 #define SPCT_HAS_SMALL_CHAR_ARRAY 2 |
1196 { | 1747 { |
1197 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); | 1748 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); |
1198 unsigned HOST_WIDE_INT len; | 1749 unsigned HOST_WIDE_INT len; |
1199 | 1750 |
1200 if (!TYPE_SIZE_UNIT (type) | 1751 if (!TYPE_SIZE_UNIT (type) |
1201 || !host_integerp (TYPE_SIZE_UNIT (type), 1)) | 1752 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))) |
1202 len = max; | 1753 len = max; |
1203 else | 1754 else |
1204 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); | 1755 len = tree_to_uhwi (TYPE_SIZE_UNIT (type)); |
1205 | 1756 |
1206 if (len < max) | 1757 if (len < max) |
1207 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; | 1758 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; |
1208 else | 1759 else |
1209 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; | 1760 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; |
1240 int ret = 0; | 1791 int ret = 0; |
1241 | 1792 |
1242 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY) | 1793 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY) |
1243 has_short_buffer = true; | 1794 has_short_buffer = true; |
1244 | 1795 |
1245 if (flag_stack_protect == 2) | 1796 if (flag_stack_protect == SPCT_FLAG_ALL |
1797 || flag_stack_protect == SPCT_FLAG_STRONG | |
1798 || (flag_stack_protect == SPCT_FLAG_EXPLICIT | |
1799 && lookup_attribute ("stack_protect", | |
1800 DECL_ATTRIBUTES (current_function_decl)))) | |
1246 { | 1801 { |
1247 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) | 1802 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) |
1248 && !(bits & SPCT_HAS_AGGREGATE)) | 1803 && !(bits & SPCT_HAS_AGGREGATE)) |
1249 ret = 1; | 1804 ret = 1; |
1250 else if (bits & SPCT_HAS_ARRAY) | 1805 else if (bits & SPCT_HAS_ARRAY) |
1261 | 1816 |
1262 /* Two helper routines that check for phase 1 and phase 2. These are used | 1817 /* Two helper routines that check for phase 1 and phase 2. These are used |
1263 as callbacks for expand_stack_vars. */ | 1818 as callbacks for expand_stack_vars. */ |
1264 | 1819 |
1265 static bool | 1820 static bool |
1266 stack_protect_decl_phase_1 (tree decl) | 1821 stack_protect_decl_phase_1 (size_t i) |
1267 { | 1822 { |
1268 return stack_protect_decl_phase (decl) == 1; | 1823 return stack_protect_decl_phase (stack_vars[i].decl) == 1; |
1269 } | 1824 } |
1270 | 1825 |
1271 static bool | 1826 static bool |
1272 stack_protect_decl_phase_2 (tree decl) | 1827 stack_protect_decl_phase_2 (size_t i) |
1273 { | 1828 { |
1274 return stack_protect_decl_phase (decl) == 2; | 1829 return stack_protect_decl_phase (stack_vars[i].decl) == 2; |
1830 } | |
1831 | |
1832 /* And helper function that checks for asan phase (with stack protector | |
1833 it is phase 3). This is used as callback for expand_stack_vars. | |
1834 Returns true if any of the vars in the partition need to be protected. */ | |
1835 | |
1836 static bool | |
1837 asan_decl_phase_3 (size_t i) | |
1838 { | |
1839 while (i != EOC) | |
1840 { | |
1841 if (asan_protect_stack_decl (stack_vars[i].decl)) | |
1842 return true; | |
1843 i = stack_vars[i].next; | |
1844 } | |
1845 return false; | |
1275 } | 1846 } |
1276 | 1847 |
1277 /* Ensure that variables in different stack protection phases conflict | 1848 /* Ensure that variables in different stack protection phases conflict |
1278 so that they are not merged and share the same stack slot. */ | 1849 so that they are not merged and share the same stack slot. */ |
1279 | 1850 |
1288 phase[i] = stack_protect_decl_phase (stack_vars[i].decl); | 1859 phase[i] = stack_protect_decl_phase (stack_vars[i].decl); |
1289 | 1860 |
1290 for (i = 0; i < n; ++i) | 1861 for (i = 0; i < n; ++i) |
1291 { | 1862 { |
1292 unsigned char ph_i = phase[i]; | 1863 unsigned char ph_i = phase[i]; |
1293 for (j = 0; j < i; ++j) | 1864 for (j = i + 1; j < n; ++j) |
1294 if (ph_i != phase[j]) | 1865 if (ph_i != phase[j]) |
1295 add_stack_var_conflict (i, j); | 1866 add_stack_var_conflict (i, j); |
1296 } | 1867 } |
1297 | 1868 |
1298 XDELETEVEC (phase); | 1869 XDELETEVEC (phase); |
1313 | 1884 |
1314 /* Prepare for expanding variables. */ | 1885 /* Prepare for expanding variables. */ |
1315 static void | 1886 static void |
1316 init_vars_expansion (void) | 1887 init_vars_expansion (void) |
1317 { | 1888 { |
1318 tree t; | 1889 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */ |
1319 unsigned ix; | 1890 bitmap_obstack_initialize (&stack_var_bitmap_obstack); |
1320 /* Set TREE_USED on all variables in the local_decls. */ | 1891 |
1321 FOR_EACH_LOCAL_DECL (cfun, ix, t) | 1892 /* A map from decl to stack partition. */ |
1322 TREE_USED (t) = 1; | 1893 decl_to_stack_part = new hash_map<tree, size_t>; |
1323 | |
1324 /* Clear TREE_USED on all variables associated with a block scope. */ | |
1325 clear_tree_used (DECL_INITIAL (current_function_decl)); | |
1326 | 1894 |
1327 /* Initialize local stack smashing state. */ | 1895 /* Initialize local stack smashing state. */ |
1328 has_protected_decls = false; | 1896 has_protected_decls = false; |
1329 has_short_buffer = false; | 1897 has_short_buffer = false; |
1330 } | 1898 } |
1331 | 1899 |
1332 /* Free up stack variable graph data. */ | 1900 /* Free up stack variable graph data. */ |
1333 static void | 1901 static void |
1334 fini_vars_expansion (void) | 1902 fini_vars_expansion (void) |
1335 { | 1903 { |
1336 size_t i, n = stack_vars_num; | 1904 bitmap_obstack_release (&stack_var_bitmap_obstack); |
1337 for (i = 0; i < n; i++) | 1905 if (stack_vars) |
1338 BITMAP_FREE (stack_vars[i].conflicts); | 1906 XDELETEVEC (stack_vars); |
1339 XDELETEVEC (stack_vars); | 1907 if (stack_vars_sorted) |
1340 XDELETEVEC (stack_vars_sorted); | 1908 XDELETEVEC (stack_vars_sorted); |
1341 stack_vars = NULL; | 1909 stack_vars = NULL; |
1910 stack_vars_sorted = NULL; | |
1342 stack_vars_alloc = stack_vars_num = 0; | 1911 stack_vars_alloc = stack_vars_num = 0; |
1912 delete decl_to_stack_part; | |
1913 decl_to_stack_part = NULL; | |
1343 } | 1914 } |
1344 | 1915 |
1345 /* Make a fair guess for the size of the stack frame of the function | 1916 /* Make a fair guess for the size of the stack frame of the function |
1346 in NODE. This doesn't have to be exact, the result is only used in | 1917 in NODE. This doesn't have to be exact, the result is only used in |
1347 the inline heuristics. So we don't want to run the full stack var | 1918 the inline heuristics. So we don't want to run the full stack var |
1354 estimated_stack_frame_size (struct cgraph_node *node) | 1925 estimated_stack_frame_size (struct cgraph_node *node) |
1355 { | 1926 { |
1356 HOST_WIDE_INT size = 0; | 1927 HOST_WIDE_INT size = 0; |
1357 size_t i; | 1928 size_t i; |
1358 tree var; | 1929 tree var; |
1359 tree old_cur_fun_decl = current_function_decl; | |
1360 referenced_var_iterator rvi; | |
1361 struct function *fn = DECL_STRUCT_FUNCTION (node->decl); | 1930 struct function *fn = DECL_STRUCT_FUNCTION (node->decl); |
1362 | 1931 |
1363 current_function_decl = node->decl; | |
1364 push_cfun (fn); | 1932 push_cfun (fn); |
1365 | 1933 |
1366 gcc_checking_assert (gimple_referenced_vars (fn)); | 1934 init_vars_expansion (); |
1367 FOR_EACH_REFERENCED_VAR (fn, var, rvi) | 1935 |
1368 size += expand_one_var (var, true, false); | 1936 FOR_EACH_LOCAL_DECL (fn, i, var) |
1937 if (auto_var_in_fn_p (var, fn->decl)) | |
1938 size += expand_one_var (var, true, false); | |
1369 | 1939 |
1370 if (stack_vars_num > 0) | 1940 if (stack_vars_num > 0) |
1371 { | 1941 { |
1372 /* Fake sorting the stack vars for account_stack_vars (). */ | 1942 /* Fake sorting the stack vars for account_stack_vars (). */ |
1373 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | 1943 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); |
1374 for (i = 0; i < stack_vars_num; ++i) | 1944 for (i = 0; i < stack_vars_num; ++i) |
1375 stack_vars_sorted[i] = i; | 1945 stack_vars_sorted[i] = i; |
1376 size += account_stack_vars (); | 1946 size += account_stack_vars (); |
1377 fini_vars_expansion (); | 1947 } |
1378 } | 1948 |
1949 fini_vars_expansion (); | |
1379 pop_cfun (); | 1950 pop_cfun (); |
1380 current_function_decl = old_cur_fun_decl; | |
1381 return size; | 1951 return size; |
1382 } | 1952 } |
1383 | 1953 |
1954 /* Helper routine to check if a record or union contains an array field. */ | |
1955 | |
1956 static int | |
1957 record_or_union_type_has_array_p (const_tree tree_type) | |
1958 { | |
1959 tree fields = TYPE_FIELDS (tree_type); | |
1960 tree f; | |
1961 | |
1962 for (f = fields; f; f = DECL_CHAIN (f)) | |
1963 if (TREE_CODE (f) == FIELD_DECL) | |
1964 { | |
1965 tree field_type = TREE_TYPE (f); | |
1966 if (RECORD_OR_UNION_TYPE_P (field_type) | |
1967 && record_or_union_type_has_array_p (field_type)) | |
1968 return 1; | |
1969 if (TREE_CODE (field_type) == ARRAY_TYPE) | |
1970 return 1; | |
1971 } | |
1972 return 0; | |
1973 } | |
1974 | |
1975 /* Check if the current function has local referenced variables that | |
1976 have their addresses taken, contain an array, or are arrays. */ | |
1977 | |
1978 static bool | |
1979 stack_protect_decl_p () | |
1980 { | |
1981 unsigned i; | |
1982 tree var; | |
1983 | |
1984 FOR_EACH_LOCAL_DECL (cfun, i, var) | |
1985 if (!is_global_var (var)) | |
1986 { | |
1987 tree var_type = TREE_TYPE (var); | |
1988 if (VAR_P (var) | |
1989 && (TREE_CODE (var_type) == ARRAY_TYPE | |
1990 || TREE_ADDRESSABLE (var) | |
1991 || (RECORD_OR_UNION_TYPE_P (var_type) | |
1992 && record_or_union_type_has_array_p (var_type)))) | |
1993 return true; | |
1994 } | |
1995 return false; | |
1996 } | |
1997 | |
1998 /* Check if the current function has calls that use a return slot. */ | |
1999 | |
2000 static bool | |
2001 stack_protect_return_slot_p () | |
2002 { | |
2003 basic_block bb; | |
2004 | |
2005 FOR_ALL_BB_FN (bb, cfun) | |
2006 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); | |
2007 !gsi_end_p (gsi); gsi_next (&gsi)) | |
2008 { | |
2009 gimple *stmt = gsi_stmt (gsi); | |
2010 /* This assumes that calls to internal-only functions never | |
2011 use a return slot. */ | |
2012 if (is_gimple_call (stmt) | |
2013 && !gimple_call_internal_p (stmt) | |
2014 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)), | |
2015 gimple_call_fndecl (stmt))) | |
2016 return true; | |
2017 } | |
2018 return false; | |
2019 } | |
2020 | |
1384 /* Expand all variables used in the function. */ | 2021 /* Expand all variables used in the function. */ |
1385 | 2022 |
1386 static void | 2023 static rtx_insn * |
1387 expand_used_vars (void) | 2024 expand_used_vars (void) |
1388 { | 2025 { |
1389 tree var, outer_block = DECL_INITIAL (current_function_decl); | 2026 tree var, outer_block = DECL_INITIAL (current_function_decl); |
1390 VEC(tree,heap) *maybe_local_decls = NULL; | 2027 auto_vec<tree> maybe_local_decls; |
2028 rtx_insn *var_end_seq = NULL; | |
1391 unsigned i; | 2029 unsigned i; |
1392 unsigned len; | 2030 unsigned len; |
2031 bool gen_stack_protect_signal = false; | |
1393 | 2032 |
1394 /* Compute the phase of the stack frame for this function. */ | 2033 /* Compute the phase of the stack frame for this function. */ |
1395 { | 2034 { |
1396 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | 2035 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; |
1397 int off = STARTING_FRAME_OFFSET % align; | 2036 int off = targetm.starting_frame_offset () % align; |
1398 frame_phase = off ? align - off : 0; | 2037 frame_phase = off ? align - off : 0; |
1399 } | 2038 } |
1400 | 2039 |
2040 /* Set TREE_USED on all variables in the local_decls. */ | |
2041 FOR_EACH_LOCAL_DECL (cfun, i, var) | |
2042 TREE_USED (var) = 1; | |
2043 /* Clear TREE_USED on all variables associated with a block scope. */ | |
2044 clear_tree_used (DECL_INITIAL (current_function_decl)); | |
2045 | |
1401 init_vars_expansion (); | 2046 init_vars_expansion (); |
1402 | 2047 |
2048 if (targetm.use_pseudo_pic_reg ()) | |
2049 pic_offset_table_rtx = gen_reg_rtx (Pmode); | |
2050 | |
1403 for (i = 0; i < SA.map->num_partitions; i++) | 2051 for (i = 0; i < SA.map->num_partitions; i++) |
1404 { | 2052 { |
2053 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i)) | |
2054 continue; | |
2055 | |
1405 tree var = partition_to_var (SA.map, i); | 2056 tree var = partition_to_var (SA.map, i); |
1406 | 2057 |
1407 gcc_assert (is_gimple_reg (var)); | 2058 gcc_assert (!virtual_operand_p (var)); |
1408 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL) | 2059 |
1409 expand_one_var (var, true, true); | 2060 expand_one_ssa_partition (var); |
1410 else | 2061 } |
1411 { | 2062 |
1412 /* This is a PARM_DECL or RESULT_DECL. For those partitions that | 2063 if (flag_stack_protect == SPCT_FLAG_STRONG) |
1413 contain the default def (representing the parm or result itself) | 2064 gen_stack_protect_signal |
1414 we don't do anything here. But those which don't contain the | 2065 = stack_protect_decl_p () || stack_protect_return_slot_p (); |
1415 default def (representing a temporary based on the parm/result) | |
1416 we need to allocate space just like for normal VAR_DECLs. */ | |
1417 if (!bitmap_bit_p (SA.partition_has_default_def, i)) | |
1418 { | |
1419 expand_one_var (var, true, true); | |
1420 gcc_assert (SA.partition_to_pseudo[i]); | |
1421 } | |
1422 } | |
1423 } | |
1424 | 2066 |
1425 /* At this point all variables on the local_decls with TREE_USED | 2067 /* At this point all variables on the local_decls with TREE_USED |
1426 set are not associated with any block scope. Lay them out. */ | 2068 set are not associated with any block scope. Lay them out. */ |
1427 | 2069 |
1428 len = VEC_length (tree, cfun->local_decls); | 2070 len = vec_safe_length (cfun->local_decls); |
1429 FOR_EACH_LOCAL_DECL (cfun, i, var) | 2071 FOR_EACH_LOCAL_DECL (cfun, i, var) |
1430 { | 2072 { |
1431 bool expand_now = false; | 2073 bool expand_now = false; |
1432 | 2074 |
1433 /* Expanded above already. */ | 2075 /* Expanded above already. */ |
1442 begin with. And it doesn't really matter much, since we're | 2084 begin with. And it doesn't really matter much, since we're |
1443 not giving them stack space. Expand them now. */ | 2085 not giving them stack space. Expand them now. */ |
1444 else if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | 2086 else if (TREE_STATIC (var) || DECL_EXTERNAL (var)) |
1445 expand_now = true; | 2087 expand_now = true; |
1446 | 2088 |
1447 /* If the variable is not associated with any block, then it | 2089 /* Expand variables not associated with any block now. Those created by |
1448 was created by the optimizers, and could be live anywhere | 2090 the optimizers could be live anywhere in the function. Those that |
1449 in the function. */ | 2091 could possibly have been scoped originally and detached from their |
2092 block will have their allocation deferred so we coalesce them with | |
2093 others when optimization is enabled. */ | |
1450 else if (TREE_USED (var)) | 2094 else if (TREE_USED (var)) |
1451 expand_now = true; | 2095 expand_now = true; |
1452 | 2096 |
1453 /* Finally, mark all variables on the list as used. We'll use | 2097 /* Finally, mark all variables on the list as used. We'll use |
1454 this in a moment when we expand those associated with scopes. */ | 2098 this in a moment when we expand those associated with scopes. */ |
1468 add_local_decl (cfun, var); | 2112 add_local_decl (cfun, var); |
1469 else if (rtl == NULL_RTX) | 2113 else if (rtl == NULL_RTX) |
1470 /* If rtl isn't set yet, which can happen e.g. with | 2114 /* If rtl isn't set yet, which can happen e.g. with |
1471 -fstack-protector, retry before returning from this | 2115 -fstack-protector, retry before returning from this |
1472 function. */ | 2116 function. */ |
1473 VEC_safe_push (tree, heap, maybe_local_decls, var); | 2117 maybe_local_decls.safe_push (var); |
1474 } | 2118 } |
1475 } | 2119 } |
1476 | 2120 |
1477 /* We duplicated some of the decls in CFUN->LOCAL_DECLS. | 2121 /* We duplicated some of the decls in CFUN->LOCAL_DECLS. |
1478 | 2122 |
1483 +-- LEN points here. | 2127 +-- LEN points here. |
1484 | 2128 |
1485 We just want the duplicates, as those are the artificial | 2129 We just want the duplicates, as those are the artificial |
1486 non-ignored vars that we want to keep until instantiate_decls. | 2130 non-ignored vars that we want to keep until instantiate_decls. |
1487 Move them down and truncate the array. */ | 2131 Move them down and truncate the array. */ |
1488 if (!VEC_empty (tree, cfun->local_decls)) | 2132 if (!vec_safe_is_empty (cfun->local_decls)) |
1489 VEC_block_remove (tree, cfun->local_decls, 0, len); | 2133 cfun->local_decls->block_remove (0, len); |
1490 | 2134 |
1491 /* At this point, all variables within the block tree with TREE_USED | 2135 /* At this point, all variables within the block tree with TREE_USED |
1492 set are actually used by the optimized function. Lay them out. */ | 2136 set are actually used by the optimized function. Lay them out. */ |
1493 expand_used_vars_for_block (outer_block, true); | 2137 expand_used_vars_for_block (outer_block, true); |
1494 | 2138 |
1495 if (stack_vars_num > 0) | 2139 if (stack_vars_num > 0) |
1496 { | 2140 { |
1497 /* Due to the way alias sets work, no variables with non-conflicting | 2141 add_scope_conflicts (); |
1498 alias sets may be assigned the same address. Add conflicts to | |
1499 reflect this. */ | |
1500 add_alias_set_conflicts (); | |
1501 | 2142 |
1502 /* If stack protection is enabled, we don't share space between | 2143 /* If stack protection is enabled, we don't share space between |
1503 vulnerable data and non-vulnerable data. */ | 2144 vulnerable data and non-vulnerable data. */ |
1504 if (flag_stack_protect) | 2145 if (flag_stack_protect != 0 |
2146 && (flag_stack_protect != SPCT_FLAG_EXPLICIT | |
2147 || (flag_stack_protect == SPCT_FLAG_EXPLICIT | |
2148 && lookup_attribute ("stack_protect", | |
2149 DECL_ATTRIBUTES (current_function_decl))))) | |
1505 add_stack_protection_conflicts (); | 2150 add_stack_protection_conflicts (); |
1506 | 2151 |
1507 /* Now that we have collected all stack variables, and have computed a | 2152 /* Now that we have collected all stack variables, and have computed a |
1508 minimal interference graph, attempt to save some stack space. */ | 2153 minimal interference graph, attempt to save some stack space. */ |
1509 partition_stack_vars (); | 2154 partition_stack_vars (); |
1510 if (dump_file) | 2155 if (dump_file) |
1511 dump_stack_var_partition (); | 2156 dump_stack_var_partition (); |
1512 } | 2157 } |
1513 | 2158 |
1514 /* There are several conditions under which we should create a | 2159 switch (flag_stack_protect) |
1515 stack guard: protect-all, alloca used, protected decls present. */ | 2160 { |
1516 if (flag_stack_protect == 2 | 2161 case SPCT_FLAG_ALL: |
1517 || (flag_stack_protect | 2162 create_stack_guard (); |
1518 && (cfun->calls_alloca || has_protected_decls))) | 2163 break; |
1519 create_stack_guard (); | 2164 |
2165 case SPCT_FLAG_STRONG: | |
2166 if (gen_stack_protect_signal | |
2167 || cfun->calls_alloca || has_protected_decls | |
2168 || lookup_attribute ("stack_protect", | |
2169 DECL_ATTRIBUTES (current_function_decl))) | |
2170 create_stack_guard (); | |
2171 break; | |
2172 | |
2173 case SPCT_FLAG_DEFAULT: | |
2174 if (cfun->calls_alloca || has_protected_decls | |
2175 || lookup_attribute ("stack_protect", | |
2176 DECL_ATTRIBUTES (current_function_decl))) | |
2177 create_stack_guard (); | |
2178 break; | |
2179 | |
2180 case SPCT_FLAG_EXPLICIT: | |
2181 if (lookup_attribute ("stack_protect", | |
2182 DECL_ATTRIBUTES (current_function_decl))) | |
2183 create_stack_guard (); | |
2184 break; | |
2185 default: | |
2186 ; | |
2187 } | |
1520 | 2188 |
1521 /* Assign rtl to each variable based on these partitions. */ | 2189 /* Assign rtl to each variable based on these partitions. */ |
1522 if (stack_vars_num > 0) | 2190 if (stack_vars_num > 0) |
1523 { | 2191 { |
2192 struct stack_vars_data data; | |
2193 | |
2194 data.asan_base = NULL_RTX; | |
2195 data.asan_alignb = 0; | |
2196 | |
1524 /* Reorder decls to be protected by iterating over the variables | 2197 /* Reorder decls to be protected by iterating over the variables |
1525 array multiple times, and allocating out of each phase in turn. */ | 2198 array multiple times, and allocating out of each phase in turn. */ |
1526 /* ??? We could probably integrate this into the qsort we did | 2199 /* ??? We could probably integrate this into the qsort we did |
1527 earlier, such that we naturally see these variables first, | 2200 earlier, such that we naturally see these variables first, |
1528 and thus naturally allocate things in the right order. */ | 2201 and thus naturally allocate things in the right order. */ |
1529 if (has_protected_decls) | 2202 if (has_protected_decls) |
1530 { | 2203 { |
1531 /* Phase 1 contains only character arrays. */ | 2204 /* Phase 1 contains only character arrays. */ |
1532 expand_stack_vars (stack_protect_decl_phase_1); | 2205 expand_stack_vars (stack_protect_decl_phase_1, &data); |
1533 | 2206 |
1534 /* Phase 2 contains other kinds of arrays. */ | 2207 /* Phase 2 contains other kinds of arrays. */ |
1535 if (flag_stack_protect == 2) | 2208 if (flag_stack_protect == SPCT_FLAG_ALL |
1536 expand_stack_vars (stack_protect_decl_phase_2); | 2209 || flag_stack_protect == SPCT_FLAG_STRONG |
1537 } | 2210 || (flag_stack_protect == SPCT_FLAG_EXPLICIT |
1538 | 2211 && lookup_attribute ("stack_protect", |
1539 expand_stack_vars (NULL); | 2212 DECL_ATTRIBUTES (current_function_decl)))) |
1540 | 2213 expand_stack_vars (stack_protect_decl_phase_2, &data); |
1541 fini_vars_expansion (); | 2214 } |
1542 } | 2215 |
2216 if (asan_sanitize_stack_p ()) | |
2217 /* Phase 3, any partitions that need asan protection | |
2218 in addition to phase 1 and 2. */ | |
2219 expand_stack_vars (asan_decl_phase_3, &data); | |
2220 | |
2221 if (!data.asan_vec.is_empty ()) | |
2222 { | |
2223 HOST_WIDE_INT prev_offset = frame_offset; | |
2224 HOST_WIDE_INT offset, sz, redzonesz; | |
2225 redzonesz = ASAN_RED_ZONE_SIZE; | |
2226 sz = data.asan_vec[0] - prev_offset; | |
2227 if (data.asan_alignb > ASAN_RED_ZONE_SIZE | |
2228 && data.asan_alignb <= 4096 | |
2229 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb) | |
2230 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1) | |
2231 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz; | |
2232 offset | |
2233 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE); | |
2234 data.asan_vec.safe_push (prev_offset); | |
2235 data.asan_vec.safe_push (offset); | |
2236 /* Leave space for alignment if STRICT_ALIGNMENT. */ | |
2237 if (STRICT_ALIGNMENT) | |
2238 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode) | |
2239 << ASAN_SHADOW_SHIFT) | |
2240 / BITS_PER_UNIT, 1); | |
2241 | |
2242 var_end_seq | |
2243 = asan_emit_stack_protection (virtual_stack_vars_rtx, | |
2244 data.asan_base, | |
2245 data.asan_alignb, | |
2246 data.asan_vec.address (), | |
2247 data.asan_decl_vec.address (), | |
2248 data.asan_vec.length ()); | |
2249 } | |
2250 | |
2251 expand_stack_vars (NULL, &data); | |
2252 } | |
2253 | |
2254 if (asan_sanitize_allocas_p () && cfun->calls_alloca) | |
2255 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx, | |
2256 virtual_stack_vars_rtx, | |
2257 var_end_seq); | |
2258 | |
2259 fini_vars_expansion (); | |
1543 | 2260 |
1544 /* If there were any artificial non-ignored vars without rtl | 2261 /* If there were any artificial non-ignored vars without rtl |
1545 found earlier, see if deferred stack allocation hasn't assigned | 2262 found earlier, see if deferred stack allocation hasn't assigned |
1546 rtl to them. */ | 2263 rtl to them. */ |
1547 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var) | 2264 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var) |
1548 { | 2265 { |
1549 rtx rtl = DECL_RTL_IF_SET (var); | 2266 rtx rtl = DECL_RTL_IF_SET (var); |
1550 | 2267 |
1551 /* Keep artificial non-ignored vars in cfun->local_decls | 2268 /* Keep artificial non-ignored vars in cfun->local_decls |
1552 chain until instantiate_decls. */ | 2269 chain until instantiate_decls. */ |
1553 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) | 2270 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) |
1554 add_local_decl (cfun, var); | 2271 add_local_decl (cfun, var); |
1555 } | 2272 } |
1556 VEC_free (tree, heap, maybe_local_decls); | |
1557 | 2273 |
1558 /* If the target requires that FRAME_OFFSET be aligned, do it. */ | 2274 /* If the target requires that FRAME_OFFSET be aligned, do it. */ |
1559 if (STACK_ALIGNMENT_NEEDED) | 2275 if (STACK_ALIGNMENT_NEEDED) |
1560 { | 2276 { |
1561 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | 2277 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; |
1562 if (!FRAME_GROWS_DOWNWARD) | 2278 if (!FRAME_GROWS_DOWNWARD) |
1563 frame_offset += align - 1; | 2279 frame_offset += align - 1; |
1564 frame_offset &= -align; | 2280 frame_offset &= -align; |
1565 } | 2281 } |
2282 | |
2283 return var_end_seq; | |
1566 } | 2284 } |
1567 | 2285 |
1568 | 2286 |
1569 /* If we need to produce a detailed dump, print the tree representation | 2287 /* If we need to produce a detailed dump, print the tree representation |
1570 for STMT to the dump file. SINCE is the last RTX after which the RTL | 2288 for STMT to the dump file. SINCE is the last RTX after which the RTL |
1571 generated for STMT should have been appended. */ | 2289 generated for STMT should have been appended. */ |
1572 | 2290 |
1573 static void | 2291 static void |
1574 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since) | 2292 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since) |
1575 { | 2293 { |
1576 if (dump_file && (dump_flags & TDF_DETAILS)) | 2294 if (dump_file && (dump_flags & TDF_DETAILS)) |
1577 { | 2295 { |
1578 fprintf (dump_file, "\n;; "); | 2296 fprintf (dump_file, "\n;; "); |
1579 print_gimple_stmt (dump_file, stmt, 0, | 2297 print_gimple_stmt (dump_file, stmt, 0, |
1584 } | 2302 } |
1585 } | 2303 } |
1586 | 2304 |
1587 /* Maps the blocks that do not contain tree labels to rtx labels. */ | 2305 /* Maps the blocks that do not contain tree labels to rtx labels. */ |
1588 | 2306 |
1589 static struct pointer_map_t *lab_rtx_for_bb; | 2307 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb; |
1590 | 2308 |
1591 /* Returns the label_rtx expression for a label starting basic block BB. */ | 2309 /* Returns the label_rtx expression for a label starting basic block BB. */ |
1592 | 2310 |
1593 static rtx | 2311 static rtx_code_label * |
1594 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED) | 2312 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED) |
1595 { | 2313 { |
1596 gimple_stmt_iterator gsi; | 2314 gimple_stmt_iterator gsi; |
1597 tree lab; | 2315 tree lab; |
1598 gimple lab_stmt; | |
1599 void **elt; | |
1600 | 2316 |
1601 if (bb->flags & BB_RTL) | 2317 if (bb->flags & BB_RTL) |
1602 return block_label (bb); | 2318 return block_label (bb); |
1603 | 2319 |
1604 elt = pointer_map_contains (lab_rtx_for_bb, bb); | 2320 rtx_code_label **elt = lab_rtx_for_bb->get (bb); |
1605 if (elt) | 2321 if (elt) |
1606 return (rtx) *elt; | 2322 return *elt; |
1607 | 2323 |
1608 /* Find the tree label if it is present. */ | 2324 /* Find the tree label if it is present. */ |
1609 | 2325 |
1610 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | 2326 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
1611 { | 2327 { |
1612 lab_stmt = gsi_stmt (gsi); | 2328 glabel *lab_stmt; |
1613 if (gimple_code (lab_stmt) != GIMPLE_LABEL) | 2329 |
2330 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi)); | |
2331 if (!lab_stmt) | |
1614 break; | 2332 break; |
1615 | 2333 |
1616 lab = gimple_label_label (lab_stmt); | 2334 lab = gimple_label_label (lab_stmt); |
1617 if (DECL_NONLOCAL (lab)) | 2335 if (DECL_NONLOCAL (lab)) |
1618 break; | 2336 break; |
1619 | 2337 |
1620 return label_rtx (lab); | 2338 return jump_target_rtx (lab); |
1621 } | 2339 } |
1622 | 2340 |
1623 elt = pointer_map_insert (lab_rtx_for_bb, bb); | 2341 rtx_code_label *l = gen_label_rtx (); |
1624 *elt = gen_label_rtx (); | 2342 lab_rtx_for_bb->put (bb, l); |
1625 return (rtx) *elt; | 2343 return l; |
1626 } | 2344 } |
1627 | 2345 |
1628 | 2346 |
1629 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge | 2347 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge |
1630 of a basic block where we just expanded the conditional at the end, | 2348 of a basic block where we just expanded the conditional at the end, |
1631 possibly clean up the CFG and instruction sequence. LAST is the | 2349 possibly clean up the CFG and instruction sequence. LAST is the |
1632 last instruction before the just emitted jump sequence. */ | 2350 last instruction before the just emitted jump sequence. */ |
1633 | 2351 |
1634 static void | 2352 static void |
1635 maybe_cleanup_end_of_block (edge e, rtx last) | 2353 maybe_cleanup_end_of_block (edge e, rtx_insn *last) |
1636 { | 2354 { |
1637 /* Special case: when jumpif decides that the condition is | 2355 /* Special case: when jumpif decides that the condition is |
1638 trivial it emits an unconditional jump (and the necessary | 2356 trivial it emits an unconditional jump (and the necessary |
1639 barrier). But we still have two edges, the fallthru one is | 2357 barrier). But we still have two edges, the fallthru one is |
1640 wrong. purge_dead_edges would clean this up later. Unfortunately | 2358 wrong. purge_dead_edges would clean this up later. Unfortunately |
1645 barrier would get lost and verify_flow_info would ICE. Instead | 2363 barrier would get lost and verify_flow_info would ICE. Instead |
1646 of auditing all edge splitters to care for the barrier (which | 2364 of auditing all edge splitters to care for the barrier (which |
1647 normally isn't there in a cleaned CFG), fix it here. */ | 2365 normally isn't there in a cleaned CFG), fix it here. */ |
1648 if (BARRIER_P (get_last_insn ())) | 2366 if (BARRIER_P (get_last_insn ())) |
1649 { | 2367 { |
1650 rtx insn; | 2368 rtx_insn *insn; |
1651 remove_edge (e); | 2369 remove_edge (e); |
1652 /* Now, we have a single successor block, if we have insns to | 2370 /* Now, we have a single successor block, if we have insns to |
1653 insert on the remaining edge we potentially will insert | 2371 insert on the remaining edge we potentially will insert |
1654 it at the end of this block (if the dest block isn't feasible) | 2372 it at the end of this block (if the dest block isn't feasible) |
1655 in order to avoid splitting the edge. This insertion will take | 2373 in order to avoid splitting the edge. This insertion will take |
1681 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND. | 2399 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND. |
1682 Returns a new basic block if we've terminated the current basic | 2400 Returns a new basic block if we've terminated the current basic |
1683 block and created a new one. */ | 2401 block and created a new one. */ |
1684 | 2402 |
1685 static basic_block | 2403 static basic_block |
1686 expand_gimple_cond (basic_block bb, gimple stmt) | 2404 expand_gimple_cond (basic_block bb, gcond *stmt) |
1687 { | 2405 { |
1688 basic_block new_bb, dest; | 2406 basic_block new_bb, dest; |
1689 edge new_edge; | |
1690 edge true_edge; | 2407 edge true_edge; |
1691 edge false_edge; | 2408 edge false_edge; |
1692 rtx last2, last; | 2409 rtx_insn *last2, *last; |
1693 enum tree_code code; | 2410 enum tree_code code; |
1694 tree op0, op1; | 2411 tree op0, op1; |
1695 | 2412 |
1696 code = gimple_cond_code (stmt); | 2413 code = gimple_cond_code (stmt); |
1697 op0 = gimple_cond_lhs (stmt); | 2414 op0 = gimple_cond_lhs (stmt); |
1702 ... | 2419 ... |
1703 This would expand to two comparisons which then later might | 2420 This would expand to two comparisons which then later might |
1704 be cleaned up by combine. But some pattern matchers like if-conversion | 2421 be cleaned up by combine. But some pattern matchers like if-conversion |
1705 work better when there's only one compare, so make up for this | 2422 work better when there's only one compare, so make up for this |
1706 here as special exception if TER would have made the same change. */ | 2423 here as special exception if TER would have made the same change. */ |
1707 if (gimple_cond_single_var_p (stmt) | 2424 if (SA.values |
1708 && SA.values | |
1709 && TREE_CODE (op0) == SSA_NAME | 2425 && TREE_CODE (op0) == SSA_NAME |
2426 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE | |
2427 && TREE_CODE (op1) == INTEGER_CST | |
2428 && ((gimple_cond_code (stmt) == NE_EXPR | |
2429 && integer_zerop (op1)) | |
2430 || (gimple_cond_code (stmt) == EQ_EXPR | |
2431 && integer_onep (op1))) | |
1710 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0))) | 2432 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0))) |
1711 { | 2433 { |
1712 gimple second = SSA_NAME_DEF_STMT (op0); | 2434 gimple *second = SSA_NAME_DEF_STMT (op0); |
1713 if (gimple_code (second) == GIMPLE_ASSIGN) | 2435 if (gimple_code (second) == GIMPLE_ASSIGN) |
1714 { | 2436 { |
1715 enum tree_code code2 = gimple_assign_rhs_code (second); | 2437 enum tree_code code2 = gimple_assign_rhs_code (second); |
1716 if (TREE_CODE_CLASS (code2) == tcc_comparison) | 2438 if (TREE_CODE_CLASS (code2) == tcc_comparison) |
1717 { | 2439 { |
1718 code = code2; | 2440 code = code2; |
1719 op0 = gimple_assign_rhs1 (second); | 2441 op0 = gimple_assign_rhs1 (second); |
1720 op1 = gimple_assign_rhs2 (second); | 2442 op1 = gimple_assign_rhs2 (second); |
1721 } | 2443 } |
1722 /* If jumps are cheap turn some more codes into | 2444 /* If jumps are cheap and the target does not support conditional |
1723 jumpy sequences. */ | 2445 compare, turn some more codes into jumpy sequences. */ |
1724 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4) | 2446 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4 |
2447 && targetm.gen_ccmp_first == NULL) | |
1725 { | 2448 { |
1726 if ((code2 == BIT_AND_EXPR | 2449 if ((code2 == BIT_AND_EXPR |
1727 && TYPE_PRECISION (TREE_TYPE (op0)) == 1 | 2450 && TYPE_PRECISION (TREE_TYPE (op0)) == 1 |
1728 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST) | 2451 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST) |
1729 || code2 == TRUTH_AND_EXPR) | 2452 || code2 == TRUTH_AND_EXPR) |
1743 } | 2466 } |
1744 | 2467 |
1745 last2 = last = get_last_insn (); | 2468 last2 = last = get_last_insn (); |
1746 | 2469 |
1747 extract_true_false_edges_from_block (bb, &true_edge, &false_edge); | 2470 extract_true_false_edges_from_block (bb, &true_edge, &false_edge); |
1748 if (gimple_has_location (stmt)) | 2471 set_curr_insn_location (gimple_location (stmt)); |
1749 { | |
1750 set_curr_insn_source_location (gimple_location (stmt)); | |
1751 set_curr_insn_block (gimple_block (stmt)); | |
1752 } | |
1753 | 2472 |
1754 /* These flags have no purpose in RTL land. */ | 2473 /* These flags have no purpose in RTL land. */ |
1755 true_edge->flags &= ~EDGE_TRUE_VALUE; | 2474 true_edge->flags &= ~EDGE_TRUE_VALUE; |
1756 false_edge->flags &= ~EDGE_FALSE_VALUE; | 2475 false_edge->flags &= ~EDGE_FALSE_VALUE; |
1757 | 2476 |
1760 if (false_edge->dest == bb->next_bb) | 2479 if (false_edge->dest == bb->next_bb) |
1761 { | 2480 { |
1762 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), | 2481 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), |
1763 true_edge->probability); | 2482 true_edge->probability); |
1764 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 2483 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
1765 if (true_edge->goto_locus) | 2484 if (true_edge->goto_locus != UNKNOWN_LOCATION) |
1766 { | 2485 set_curr_insn_location (true_edge->goto_locus); |
1767 set_curr_insn_source_location (true_edge->goto_locus); | |
1768 set_curr_insn_block (true_edge->goto_block); | |
1769 true_edge->goto_locus = curr_insn_locator (); | |
1770 } | |
1771 true_edge->goto_block = NULL; | |
1772 false_edge->flags |= EDGE_FALLTHRU; | 2486 false_edge->flags |= EDGE_FALLTHRU; |
1773 maybe_cleanup_end_of_block (false_edge, last); | 2487 maybe_cleanup_end_of_block (false_edge, last); |
1774 return NULL; | 2488 return NULL; |
1775 } | 2489 } |
1776 if (true_edge->dest == bb->next_bb) | 2490 if (true_edge->dest == bb->next_bb) |
1777 { | 2491 { |
1778 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest), | 2492 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest), |
1779 false_edge->probability); | 2493 false_edge->probability); |
1780 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 2494 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
1781 if (false_edge->goto_locus) | 2495 if (false_edge->goto_locus != UNKNOWN_LOCATION) |
1782 { | 2496 set_curr_insn_location (false_edge->goto_locus); |
1783 set_curr_insn_source_location (false_edge->goto_locus); | |
1784 set_curr_insn_block (false_edge->goto_block); | |
1785 false_edge->goto_locus = curr_insn_locator (); | |
1786 } | |
1787 false_edge->goto_block = NULL; | |
1788 true_edge->flags |= EDGE_FALLTHRU; | 2497 true_edge->flags |= EDGE_FALLTHRU; |
1789 maybe_cleanup_end_of_block (true_edge, last); | 2498 maybe_cleanup_end_of_block (true_edge, last); |
1790 return NULL; | 2499 return NULL; |
1791 } | 2500 } |
1792 | 2501 |
1793 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), | 2502 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), |
1794 true_edge->probability); | 2503 true_edge->probability); |
1795 last = get_last_insn (); | 2504 last = get_last_insn (); |
1796 if (false_edge->goto_locus) | 2505 if (false_edge->goto_locus != UNKNOWN_LOCATION) |
1797 { | 2506 set_curr_insn_location (false_edge->goto_locus); |
1798 set_curr_insn_source_location (false_edge->goto_locus); | |
1799 set_curr_insn_block (false_edge->goto_block); | |
1800 false_edge->goto_locus = curr_insn_locator (); | |
1801 } | |
1802 false_edge->goto_block = NULL; | |
1803 emit_jump (label_rtx_for_bb (false_edge->dest)); | 2507 emit_jump (label_rtx_for_bb (false_edge->dest)); |
1804 | 2508 |
1805 BB_END (bb) = last; | 2509 BB_END (bb) = last; |
1806 if (BARRIER_P (BB_END (bb))) | 2510 if (BARRIER_P (BB_END (bb))) |
1807 BB_END (bb) = PREV_INSN (BB_END (bb)); | 2511 BB_END (bb) = PREV_INSN (BB_END (bb)); |
1809 | 2513 |
1810 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); | 2514 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); |
1811 dest = false_edge->dest; | 2515 dest = false_edge->dest; |
1812 redirect_edge_succ (false_edge, new_bb); | 2516 redirect_edge_succ (false_edge, new_bb); |
1813 false_edge->flags |= EDGE_FALLTHRU; | 2517 false_edge->flags |= EDGE_FALLTHRU; |
1814 new_bb->count = false_edge->count; | 2518 new_bb->count = false_edge->count (); |
1815 new_bb->frequency = EDGE_FREQUENCY (false_edge); | 2519 new_bb->frequency = EDGE_FREQUENCY (false_edge); |
1816 new_edge = make_edge (new_bb, dest, 0); | 2520 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father); |
1817 new_edge->probability = REG_BR_PROB_BASE; | 2521 add_bb_to_loop (new_bb, loop); |
1818 new_edge->count = new_bb->count; | 2522 if (loop->latch == bb |
2523 && loop->header == dest) | |
2524 loop->latch = new_bb; | |
2525 make_single_succ_edge (new_bb, dest, 0); | |
1819 if (BARRIER_P (BB_END (new_bb))) | 2526 if (BARRIER_P (BB_END (new_bb))) |
1820 BB_END (new_bb) = PREV_INSN (BB_END (new_bb)); | 2527 BB_END (new_bb) = PREV_INSN (BB_END (new_bb)); |
1821 update_bb_for_insn (new_bb); | 2528 update_bb_for_insn (new_bb); |
1822 | 2529 |
1823 maybe_dump_rtl_for_gimple_stmt (stmt, last2); | 2530 maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
1824 | 2531 |
1825 if (true_edge->goto_locus) | 2532 if (true_edge->goto_locus != UNKNOWN_LOCATION) |
1826 { | 2533 { |
1827 set_curr_insn_source_location (true_edge->goto_locus); | 2534 set_curr_insn_location (true_edge->goto_locus); |
1828 set_curr_insn_block (true_edge->goto_block); | 2535 true_edge->goto_locus = curr_insn_location (); |
1829 true_edge->goto_locus = curr_insn_locator (); | 2536 } |
1830 } | |
1831 true_edge->goto_block = NULL; | |
1832 | 2537 |
1833 return new_bb; | 2538 return new_bb; |
2539 } | |
2540 | |
2541 /* Mark all calls that can have a transaction restart. */ | |
2542 | |
2543 static void | |
2544 mark_transaction_restart_calls (gimple *stmt) | |
2545 { | |
2546 struct tm_restart_node dummy; | |
2547 tm_restart_node **slot; | |
2548 | |
2549 if (!cfun->gimple_df->tm_restart) | |
2550 return; | |
2551 | |
2552 dummy.stmt = stmt; | |
2553 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT); | |
2554 if (slot) | |
2555 { | |
2556 struct tm_restart_node *n = *slot; | |
2557 tree list = n->label_or_list; | |
2558 rtx_insn *insn; | |
2559 | |
2560 for (insn = next_real_insn (get_last_insn ()); | |
2561 !CALL_P (insn); | |
2562 insn = next_real_insn (insn)) | |
2563 continue; | |
2564 | |
2565 if (TREE_CODE (list) == LABEL_DECL) | |
2566 add_reg_note (insn, REG_TM, label_rtx (list)); | |
2567 else | |
2568 for (; list ; list = TREE_CHAIN (list)) | |
2569 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list))); | |
2570 } | |
1834 } | 2571 } |
1835 | 2572 |
1836 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL | 2573 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL |
1837 statement STMT. */ | 2574 statement STMT. */ |
1838 | 2575 |
1839 static void | 2576 static void |
1840 expand_call_stmt (gimple stmt) | 2577 expand_call_stmt (gcall *stmt) |
1841 { | 2578 { |
1842 tree exp; | 2579 tree exp, decl, lhs; |
1843 tree lhs = gimple_call_lhs (stmt); | 2580 bool builtin_p; |
1844 size_t i; | 2581 size_t i; |
1845 bool builtin_p; | 2582 |
1846 tree decl; | 2583 if (gimple_call_internal_p (stmt)) |
2584 { | |
2585 expand_internal_call (stmt); | |
2586 return; | |
2587 } | |
2588 | |
2589 /* If this is a call to a built-in function and it has no effect other | |
2590 than setting the lhs, try to implement it using an internal function | |
2591 instead. */ | |
2592 decl = gimple_call_fndecl (stmt); | |
2593 if (gimple_call_lhs (stmt) | |
2594 && !gimple_has_side_effects (stmt) | |
2595 && (optimize || (decl && called_as_built_in (decl)))) | |
2596 { | |
2597 internal_fn ifn = replacement_internal_fn (stmt); | |
2598 if (ifn != IFN_LAST) | |
2599 { | |
2600 expand_internal_call (ifn, stmt); | |
2601 return; | |
2602 } | |
2603 } | |
1847 | 2604 |
1848 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); | 2605 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); |
1849 | 2606 |
1850 CALL_EXPR_FN (exp) = gimple_call_fn (stmt); | 2607 CALL_EXPR_FN (exp) = gimple_call_fn (stmt); |
1851 decl = gimple_call_fndecl (stmt); | |
1852 builtin_p = decl && DECL_BUILT_IN (decl); | 2608 builtin_p = decl && DECL_BUILT_IN (decl); |
2609 | |
2610 /* If this is not a builtin function, the function type through which the | |
2611 call is made may be different from the type of the function. */ | |
2612 if (!builtin_p) | |
2613 CALL_EXPR_FN (exp) | |
2614 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)), | |
2615 CALL_EXPR_FN (exp)); | |
1853 | 2616 |
1854 TREE_TYPE (exp) = gimple_call_return_type (stmt); | 2617 TREE_TYPE (exp) = gimple_call_return_type (stmt); |
1855 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt); | 2618 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt); |
1856 | 2619 |
1857 for (i = 0; i < gimple_call_num_args (stmt); i++) | 2620 for (i = 0; i < gimple_call_num_args (stmt); i++) |
1858 { | 2621 { |
1859 tree arg = gimple_call_arg (stmt, i); | 2622 tree arg = gimple_call_arg (stmt, i); |
1860 gimple def; | 2623 gimple *def; |
1861 /* TER addresses into arguments of builtin functions so we have a | 2624 /* TER addresses into arguments of builtin functions so we have a |
1862 chance to infer more correct alignment information. See PR39954. */ | 2625 chance to infer more correct alignment information. See PR39954. */ |
1863 if (builtin_p | 2626 if (builtin_p |
1864 && TREE_CODE (arg) == SSA_NAME | 2627 && TREE_CODE (arg) == SSA_NAME |
1865 && (def = get_gimple_for_ssa_name (arg)) | 2628 && (def = get_gimple_for_ssa_name (arg)) |
1873 | 2636 |
1874 if (gimple_call_nothrow_p (stmt)) | 2637 if (gimple_call_nothrow_p (stmt)) |
1875 TREE_NOTHROW (exp) = 1; | 2638 TREE_NOTHROW (exp) = 1; |
1876 | 2639 |
1877 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt); | 2640 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt); |
2641 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt); | |
1878 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt); | 2642 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt); |
1879 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt); | 2643 if (decl |
1880 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt); | 2644 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL |
2645 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl))) | |
2646 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt); | |
2647 else | |
2648 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt); | |
1881 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt); | 2649 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt); |
2650 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt); | |
1882 SET_EXPR_LOCATION (exp, gimple_location (stmt)); | 2651 SET_EXPR_LOCATION (exp, gimple_location (stmt)); |
1883 TREE_BLOCK (exp) = gimple_block (stmt); | 2652 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt); |
1884 | 2653 |
2654 /* Ensure RTL is created for debug args. */ | |
2655 if (decl && DECL_HAS_DEBUG_ARGS_P (decl)) | |
2656 { | |
2657 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl); | |
2658 unsigned int ix; | |
2659 tree dtemp; | |
2660 | |
2661 if (debug_args) | |
2662 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2) | |
2663 { | |
2664 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL); | |
2665 expand_debug_expr (dtemp); | |
2666 } | |
2667 } | |
2668 | |
2669 rtx_insn *before_call = get_last_insn (); | |
2670 lhs = gimple_call_lhs (stmt); | |
1885 if (lhs) | 2671 if (lhs) |
1886 expand_assignment (lhs, exp, false); | 2672 expand_assignment (lhs, exp, false); |
1887 else | 2673 else |
1888 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL); | 2674 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL); |
2675 | |
2676 /* If the gimple call is an indirect call and has 'nocf_check' | |
2677 attribute find a generated CALL insn to mark it as no | |
2678 control-flow verification is needed. */ | |
2679 if (gimple_call_nocf_check_p (stmt) | |
2680 && !gimple_call_fndecl (stmt)) | |
2681 { | |
2682 rtx_insn *last = get_last_insn (); | |
2683 while (!CALL_P (last) | |
2684 && last != before_call) | |
2685 last = PREV_INSN (last); | |
2686 | |
2687 if (last != before_call) | |
2688 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx); | |
2689 } | |
2690 | |
2691 mark_transaction_restart_calls (stmt); | |
2692 } | |
2693 | |
2694 | |
2695 /* Generate RTL for an asm statement (explicit assembler code). | |
2696 STRING is a STRING_CST node containing the assembler code text, | |
2697 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the | |
2698 insn is volatile; don't optimize it. */ | |
2699 | |
2700 static void | |
2701 expand_asm_loc (tree string, int vol, location_t locus) | |
2702 { | |
2703 rtx body; | |
2704 | |
2705 body = gen_rtx_ASM_INPUT_loc (VOIDmode, | |
2706 ggc_strdup (TREE_STRING_POINTER (string)), | |
2707 locus); | |
2708 | |
2709 MEM_VOLATILE_P (body) = vol; | |
2710 | |
2711 /* Non-empty basic ASM implicitly clobbers memory. */ | |
2712 if (TREE_STRING_LENGTH (string) != 0) | |
2713 { | |
2714 rtx asm_op, clob; | |
2715 unsigned i, nclobbers; | |
2716 auto_vec<rtx> input_rvec, output_rvec; | |
2717 auto_vec<const char *> constraints; | |
2718 auto_vec<rtx> clobber_rvec; | |
2719 HARD_REG_SET clobbered_regs; | |
2720 CLEAR_HARD_REG_SET (clobbered_regs); | |
2721 | |
2722 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)); | |
2723 clobber_rvec.safe_push (clob); | |
2724 | |
2725 if (targetm.md_asm_adjust) | |
2726 targetm.md_asm_adjust (output_rvec, input_rvec, | |
2727 constraints, clobber_rvec, | |
2728 clobbered_regs); | |
2729 | |
2730 asm_op = body; | |
2731 nclobbers = clobber_rvec.length (); | |
2732 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers)); | |
2733 | |
2734 XVECEXP (body, 0, 0) = asm_op; | |
2735 for (i = 0; i < nclobbers; i++) | |
2736 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]); | |
2737 } | |
2738 | |
2739 emit_insn (body); | |
2740 } | |
2741 | |
2742 /* Return the number of times character C occurs in string S. */ | |
2743 static int | |
2744 n_occurrences (int c, const char *s) | |
2745 { | |
2746 int n = 0; | |
2747 while (*s) | |
2748 n += (*s++ == c); | |
2749 return n; | |
2750 } | |
2751 | |
2752 /* A subroutine of expand_asm_operands. Check that all operands have | |
2753 the same number of alternatives. Return true if so. */ | |
2754 | |
2755 static bool | |
2756 check_operand_nalternatives (const vec<const char *> &constraints) | |
2757 { | |
2758 unsigned len = constraints.length(); | |
2759 if (len > 0) | |
2760 { | |
2761 int nalternatives = n_occurrences (',', constraints[0]); | |
2762 | |
2763 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES) | |
2764 { | |
2765 error ("too many alternatives in %<asm%>"); | |
2766 return false; | |
2767 } | |
2768 | |
2769 for (unsigned i = 1; i < len; ++i) | |
2770 if (n_occurrences (',', constraints[i]) != nalternatives) | |
2771 { | |
2772 error ("operand constraints for %<asm%> differ " | |
2773 "in number of alternatives"); | |
2774 return false; | |
2775 } | |
2776 } | |
2777 return true; | |
2778 } | |
2779 | |
2780 /* Check for overlap between registers marked in CLOBBERED_REGS and | |
2781 anything inappropriate in T. Emit error and return the register | |
2782 variable definition for error, NULL_TREE for ok. */ | |
2783 | |
2784 static bool | |
2785 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs) | |
2786 { | |
2787 /* Conflicts between asm-declared register variables and the clobber | |
2788 list are not allowed. */ | |
2789 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs); | |
2790 | |
2791 if (overlap) | |
2792 { | |
2793 error ("asm-specifier for variable %qE conflicts with asm clobber list", | |
2794 DECL_NAME (overlap)); | |
2795 | |
2796 /* Reset registerness to stop multiple errors emitted for a single | |
2797 variable. */ | |
2798 DECL_REGISTER (overlap) = 0; | |
2799 return true; | |
2800 } | |
2801 | |
2802 return false; | |
2803 } | |
2804 | |
2805 /* Generate RTL for an asm statement with arguments. | |
2806 STRING is the instruction template. | |
2807 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs. | |
2808 Each output or input has an expression in the TREE_VALUE and | |
2809 a tree list in TREE_PURPOSE which in turn contains a constraint | |
2810 name in TREE_VALUE (or NULL_TREE) and a constraint string | |
2811 in TREE_PURPOSE. | |
2812 CLOBBERS is a list of STRING_CST nodes each naming a hard register | |
2813 that is clobbered by this insn. | |
2814 | |
2815 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB | |
2816 should be the fallthru basic block of the asm goto. | |
2817 | |
2818 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly. | |
2819 Some elements of OUTPUTS may be replaced with trees representing temporary | |
2820 values. The caller should copy those temporary values to the originally | |
2821 specified lvalues. | |
2822 | |
2823 VOL nonzero means the insn is volatile; don't optimize it. */ | |
2824 | |
2825 static void | |
2826 expand_asm_stmt (gasm *stmt) | |
2827 { | |
2828 class save_input_location | |
2829 { | |
2830 location_t old; | |
2831 | |
2832 public: | |
2833 explicit save_input_location(location_t where) | |
2834 { | |
2835 old = input_location; | |
2836 input_location = where; | |
2837 } | |
2838 | |
2839 ~save_input_location() | |
2840 { | |
2841 input_location = old; | |
2842 } | |
2843 }; | |
2844 | |
2845 location_t locus = gimple_location (stmt); | |
2846 | |
2847 if (gimple_asm_input_p (stmt)) | |
2848 { | |
2849 const char *s = gimple_asm_string (stmt); | |
2850 tree string = build_string (strlen (s), s); | |
2851 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus); | |
2852 return; | |
2853 } | |
2854 | |
2855 /* There are some legacy diagnostics in here, and also avoids a | |
2856 sixth parameger to targetm.md_asm_adjust. */ | |
2857 save_input_location s_i_l(locus); | |
2858 | |
2859 unsigned noutputs = gimple_asm_noutputs (stmt); | |
2860 unsigned ninputs = gimple_asm_ninputs (stmt); | |
2861 unsigned nlabels = gimple_asm_nlabels (stmt); | |
2862 unsigned i; | |
2863 | |
2864 /* ??? Diagnose during gimplification? */ | |
2865 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS) | |
2866 { | |
2867 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS); | |
2868 return; | |
2869 } | |
2870 | |
2871 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec; | |
2872 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec; | |
2873 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints; | |
2874 | |
2875 /* Copy the gimple vectors into new vectors that we can manipulate. */ | |
2876 | |
2877 output_tvec.safe_grow (noutputs); | |
2878 input_tvec.safe_grow (ninputs); | |
2879 constraints.safe_grow (noutputs + ninputs); | |
2880 | |
2881 for (i = 0; i < noutputs; ++i) | |
2882 { | |
2883 tree t = gimple_asm_output_op (stmt, i); | |
2884 output_tvec[i] = TREE_VALUE (t); | |
2885 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); | |
2886 } | |
2887 for (i = 0; i < ninputs; i++) | |
2888 { | |
2889 tree t = gimple_asm_input_op (stmt, i); | |
2890 input_tvec[i] = TREE_VALUE (t); | |
2891 constraints[i + noutputs] | |
2892 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); | |
2893 } | |
2894 | |
2895 /* ??? Diagnose during gimplification? */ | |
2896 if (! check_operand_nalternatives (constraints)) | |
2897 return; | |
2898 | |
2899 /* Count the number of meaningful clobbered registers, ignoring what | |
2900 we would ignore later. */ | |
2901 auto_vec<rtx> clobber_rvec; | |
2902 HARD_REG_SET clobbered_regs; | |
2903 CLEAR_HARD_REG_SET (clobbered_regs); | |
2904 | |
2905 if (unsigned n = gimple_asm_nclobbers (stmt)) | |
2906 { | |
2907 clobber_rvec.reserve (n); | |
2908 for (i = 0; i < n; i++) | |
2909 { | |
2910 tree t = gimple_asm_clobber_op (stmt, i); | |
2911 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t)); | |
2912 int nregs, j; | |
2913 | |
2914 j = decode_reg_name_and_count (regname, &nregs); | |
2915 if (j < 0) | |
2916 { | |
2917 if (j == -2) | |
2918 { | |
2919 /* ??? Diagnose during gimplification? */ | |
2920 error ("unknown register name %qs in %<asm%>", regname); | |
2921 } | |
2922 else if (j == -4) | |
2923 { | |
2924 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)); | |
2925 clobber_rvec.safe_push (x); | |
2926 } | |
2927 else | |
2928 { | |
2929 /* Otherwise we should have -1 == empty string | |
2930 or -3 == cc, which is not a register. */ | |
2931 gcc_assert (j == -1 || j == -3); | |
2932 } | |
2933 } | |
2934 else | |
2935 for (int reg = j; reg < j + nregs; reg++) | |
2936 { | |
2937 /* Clobbering the PIC register is an error. */ | |
2938 if (reg == (int) PIC_OFFSET_TABLE_REGNUM) | |
2939 { | |
2940 /* ??? Diagnose during gimplification? */ | |
2941 error ("PIC register clobbered by %qs in %<asm%>", | |
2942 regname); | |
2943 return; | |
2944 } | |
2945 | |
2946 SET_HARD_REG_BIT (clobbered_regs, reg); | |
2947 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg); | |
2948 clobber_rvec.safe_push (x); | |
2949 } | |
2950 } | |
2951 } | |
2952 unsigned nclobbers = clobber_rvec.length(); | |
2953 | |
2954 /* First pass over inputs and outputs checks validity and sets | |
2955 mark_addressable if needed. */ | |
2956 /* ??? Diagnose during gimplification? */ | |
2957 | |
2958 for (i = 0; i < noutputs; ++i) | |
2959 { | |
2960 tree val = output_tvec[i]; | |
2961 tree type = TREE_TYPE (val); | |
2962 const char *constraint; | |
2963 bool is_inout; | |
2964 bool allows_reg; | |
2965 bool allows_mem; | |
2966 | |
2967 /* Try to parse the output constraint. If that fails, there's | |
2968 no point in going further. */ | |
2969 constraint = constraints[i]; | |
2970 if (!parse_output_constraint (&constraint, i, ninputs, noutputs, | |
2971 &allows_mem, &allows_reg, &is_inout)) | |
2972 return; | |
2973 | |
2974 if (! allows_reg | |
2975 && (allows_mem | |
2976 || is_inout | |
2977 || (DECL_P (val) | |
2978 && REG_P (DECL_RTL (val)) | |
2979 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))) | |
2980 mark_addressable (val); | |
2981 } | |
2982 | |
2983 for (i = 0; i < ninputs; ++i) | |
2984 { | |
2985 bool allows_reg, allows_mem; | |
2986 const char *constraint; | |
2987 | |
2988 constraint = constraints[i + noutputs]; | |
2989 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0, | |
2990 constraints.address (), | |
2991 &allows_mem, &allows_reg)) | |
2992 return; | |
2993 | |
2994 if (! allows_reg && allows_mem) | |
2995 mark_addressable (input_tvec[i]); | |
2996 } | |
2997 | |
2998 /* Second pass evaluates arguments. */ | |
2999 | |
3000 /* Make sure stack is consistent for asm goto. */ | |
3001 if (nlabels > 0) | |
3002 do_pending_stack_adjust (); | |
3003 int old_generating_concat_p = generating_concat_p; | |
3004 | |
3005 /* Vector of RTX's of evaluated output operands. */ | |
3006 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec; | |
3007 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum; | |
3008 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL; | |
3009 | |
3010 output_rvec.safe_grow (noutputs); | |
3011 | |
3012 for (i = 0; i < noutputs; ++i) | |
3013 { | |
3014 tree val = output_tvec[i]; | |
3015 tree type = TREE_TYPE (val); | |
3016 bool is_inout, allows_reg, allows_mem, ok; | |
3017 rtx op; | |
3018 | |
3019 ok = parse_output_constraint (&constraints[i], i, ninputs, | |
3020 noutputs, &allows_mem, &allows_reg, | |
3021 &is_inout); | |
3022 gcc_assert (ok); | |
3023 | |
3024 /* If an output operand is not a decl or indirect ref and our constraint | |
3025 allows a register, make a temporary to act as an intermediate. | |
3026 Make the asm insn write into that, then we will copy it to | |
3027 the real output operand. Likewise for promoted variables. */ | |
3028 | |
3029 generating_concat_p = 0; | |
3030 | |
3031 if ((TREE_CODE (val) == INDIRECT_REF | |
3032 && allows_mem) | |
3033 || (DECL_P (val) | |
3034 && (allows_mem || REG_P (DECL_RTL (val))) | |
3035 && ! (REG_P (DECL_RTL (val)) | |
3036 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))) | |
3037 || ! allows_reg | |
3038 || is_inout) | |
3039 { | |
3040 op = expand_expr (val, NULL_RTX, VOIDmode, | |
3041 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE); | |
3042 if (MEM_P (op)) | |
3043 op = validize_mem (op); | |
3044 | |
3045 if (! allows_reg && !MEM_P (op)) | |
3046 error ("output number %d not directly addressable", i); | |
3047 if ((! allows_mem && MEM_P (op)) | |
3048 || GET_CODE (op) == CONCAT) | |
3049 { | |
3050 rtx old_op = op; | |
3051 op = gen_reg_rtx (GET_MODE (op)); | |
3052 | |
3053 generating_concat_p = old_generating_concat_p; | |
3054 | |
3055 if (is_inout) | |
3056 emit_move_insn (op, old_op); | |
3057 | |
3058 push_to_sequence2 (after_rtl_seq, after_rtl_end); | |
3059 emit_move_insn (old_op, op); | |
3060 after_rtl_seq = get_insns (); | |
3061 after_rtl_end = get_last_insn (); | |
3062 end_sequence (); | |
3063 } | |
3064 } | |
3065 else | |
3066 { | |
3067 op = assign_temp (type, 0, 1); | |
3068 op = validize_mem (op); | |
3069 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME) | |
3070 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op); | |
3071 | |
3072 generating_concat_p = old_generating_concat_p; | |
3073 | |
3074 push_to_sequence2 (after_rtl_seq, after_rtl_end); | |
3075 expand_assignment (val, make_tree (type, op), false); | |
3076 after_rtl_seq = get_insns (); | |
3077 after_rtl_end = get_last_insn (); | |
3078 end_sequence (); | |
3079 } | |
3080 output_rvec[i] = op; | |
3081 | |
3082 if (is_inout) | |
3083 inout_opnum.safe_push (i); | |
3084 } | |
3085 | |
3086 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec; | |
3087 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode; | |
3088 | |
3089 input_rvec.safe_grow (ninputs); | |
3090 input_mode.safe_grow (ninputs); | |
3091 | |
3092 generating_concat_p = 0; | |
3093 | |
3094 for (i = 0; i < ninputs; ++i) | |
3095 { | |
3096 tree val = input_tvec[i]; | |
3097 tree type = TREE_TYPE (val); | |
3098 bool allows_reg, allows_mem, ok; | |
3099 const char *constraint; | |
3100 rtx op; | |
3101 | |
3102 constraint = constraints[i + noutputs]; | |
3103 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0, | |
3104 constraints.address (), | |
3105 &allows_mem, &allows_reg); | |
3106 gcc_assert (ok); | |
3107 | |
3108 /* EXPAND_INITIALIZER will not generate code for valid initializer | |
3109 constants, but will still generate code for other types of operand. | |
3110 This is the behavior we want for constant constraints. */ | |
3111 op = expand_expr (val, NULL_RTX, VOIDmode, | |
3112 allows_reg ? EXPAND_NORMAL | |
3113 : allows_mem ? EXPAND_MEMORY | |
3114 : EXPAND_INITIALIZER); | |
3115 | |
3116 /* Never pass a CONCAT to an ASM. */ | |
3117 if (GET_CODE (op) == CONCAT) | |
3118 op = force_reg (GET_MODE (op), op); | |
3119 else if (MEM_P (op)) | |
3120 op = validize_mem (op); | |
3121 | |
3122 if (asm_operand_ok (op, constraint, NULL) <= 0) | |
3123 { | |
3124 if (allows_reg && TYPE_MODE (type) != BLKmode) | |
3125 op = force_reg (TYPE_MODE (type), op); | |
3126 else if (!allows_mem) | |
3127 warning (0, "asm operand %d probably doesn%'t match constraints", | |
3128 i + noutputs); | |
3129 else if (MEM_P (op)) | |
3130 { | |
3131 /* We won't recognize either volatile memory or memory | |
3132 with a queued address as available a memory_operand | |
3133 at this point. Ignore it: clearly this *is* a memory. */ | |
3134 } | |
3135 else | |
3136 gcc_unreachable (); | |
3137 } | |
3138 input_rvec[i] = op; | |
3139 input_mode[i] = TYPE_MODE (type); | |
3140 } | |
3141 | |
3142 /* For in-out operands, copy output rtx to input rtx. */ | |
3143 unsigned ninout = inout_opnum.length(); | |
3144 for (i = 0; i < ninout; i++) | |
3145 { | |
3146 int j = inout_opnum[i]; | |
3147 rtx o = output_rvec[j]; | |
3148 | |
3149 input_rvec.safe_push (o); | |
3150 input_mode.safe_push (GET_MODE (o)); | |
3151 | |
3152 char buffer[16]; | |
3153 sprintf (buffer, "%d", j); | |
3154 constraints.safe_push (ggc_strdup (buffer)); | |
3155 } | |
3156 ninputs += ninout; | |
3157 | |
3158 /* Sometimes we wish to automatically clobber registers across an asm. | |
3159 Case in point is when the i386 backend moved from cc0 to a hard reg -- | |
3160 maintaining source-level compatibility means automatically clobbering | |
3161 the flags register. */ | |
3162 rtx_insn *after_md_seq = NULL; | |
3163 if (targetm.md_asm_adjust) | |
3164 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec, | |
3165 constraints, clobber_rvec, | |
3166 clobbered_regs); | |
3167 | |
3168 /* Do not allow the hook to change the output and input count, | |
3169 lest it mess up the operand numbering. */ | |
3170 gcc_assert (output_rvec.length() == noutputs); | |
3171 gcc_assert (input_rvec.length() == ninputs); | |
3172 gcc_assert (constraints.length() == noutputs + ninputs); | |
3173 | |
3174 /* But it certainly can adjust the clobbers. */ | |
3175 nclobbers = clobber_rvec.length(); | |
3176 | |
3177 /* Third pass checks for easy conflicts. */ | |
3178 /* ??? Why are we doing this on trees instead of rtx. */ | |
3179 | |
3180 bool clobber_conflict_found = 0; | |
3181 for (i = 0; i < noutputs; ++i) | |
3182 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs)) | |
3183 clobber_conflict_found = 1; | |
3184 for (i = 0; i < ninputs - ninout; ++i) | |
3185 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs)) | |
3186 clobber_conflict_found = 1; | |
3187 | |
3188 /* Make vectors for the expression-rtx, constraint strings, | |
3189 and named operands. */ | |
3190 | |
3191 rtvec argvec = rtvec_alloc (ninputs); | |
3192 rtvec constraintvec = rtvec_alloc (ninputs); | |
3193 rtvec labelvec = rtvec_alloc (nlabels); | |
3194 | |
3195 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode | |
3196 : GET_MODE (output_rvec[0])), | |
3197 ggc_strdup (gimple_asm_string (stmt)), | |
3198 "", 0, argvec, constraintvec, | |
3199 labelvec, locus); | |
3200 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt); | |
3201 | |
3202 for (i = 0; i < ninputs; ++i) | |
3203 { | |
3204 ASM_OPERANDS_INPUT (body, i) = input_rvec[i]; | |
3205 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i) | |
3206 = gen_rtx_ASM_INPUT_loc (input_mode[i], | |
3207 constraints[i + noutputs], | |
3208 locus); | |
3209 } | |
3210 | |
3211 /* Copy labels to the vector. */ | |
3212 rtx_code_label *fallthru_label = NULL; | |
3213 if (nlabels > 0) | |
3214 { | |
3215 basic_block fallthru_bb = NULL; | |
3216 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs); | |
3217 if (fallthru) | |
3218 fallthru_bb = fallthru->dest; | |
3219 | |
3220 for (i = 0; i < nlabels; ++i) | |
3221 { | |
3222 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i)); | |
3223 rtx_insn *r; | |
3224 /* If asm goto has any labels in the fallthru basic block, use | |
3225 a label that we emit immediately after the asm goto. Expansion | |
3226 may insert further instructions into the same basic block after | |
3227 asm goto and if we don't do this, insertion of instructions on | |
3228 the fallthru edge might misbehave. See PR58670. */ | |
3229 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb) | |
3230 { | |
3231 if (fallthru_label == NULL_RTX) | |
3232 fallthru_label = gen_label_rtx (); | |
3233 r = fallthru_label; | |
3234 } | |
3235 else | |
3236 r = label_rtx (label); | |
3237 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r); | |
3238 } | |
3239 } | |
3240 | |
3241 /* Now, for each output, construct an rtx | |
3242 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER | |
3243 ARGVEC CONSTRAINTS OPNAMES)) | |
3244 If there is more than one, put them inside a PARALLEL. */ | |
3245 | |
3246 if (nlabels > 0 && nclobbers == 0) | |
3247 { | |
3248 gcc_assert (noutputs == 0); | |
3249 emit_jump_insn (body); | |
3250 } | |
3251 else if (noutputs == 0 && nclobbers == 0) | |
3252 { | |
3253 /* No output operands: put in a raw ASM_OPERANDS rtx. */ | |
3254 emit_insn (body); | |
3255 } | |
3256 else if (noutputs == 1 && nclobbers == 0) | |
3257 { | |
3258 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0]; | |
3259 emit_insn (gen_rtx_SET (output_rvec[0], body)); | |
3260 } | |
3261 else | |
3262 { | |
3263 rtx obody = body; | |
3264 int num = noutputs; | |
3265 | |
3266 if (num == 0) | |
3267 num = 1; | |
3268 | |
3269 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers)); | |
3270 | |
3271 /* For each output operand, store a SET. */ | |
3272 for (i = 0; i < noutputs; ++i) | |
3273 { | |
3274 rtx src, o = output_rvec[i]; | |
3275 if (i == 0) | |
3276 { | |
3277 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0]; | |
3278 src = obody; | |
3279 } | |
3280 else | |
3281 { | |
3282 src = gen_rtx_ASM_OPERANDS (GET_MODE (o), | |
3283 ASM_OPERANDS_TEMPLATE (obody), | |
3284 constraints[i], i, argvec, | |
3285 constraintvec, labelvec, locus); | |
3286 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt); | |
3287 } | |
3288 XVECEXP (body, 0, i) = gen_rtx_SET (o, src); | |
3289 } | |
3290 | |
3291 /* If there are no outputs (but there are some clobbers) | |
3292 store the bare ASM_OPERANDS into the PARALLEL. */ | |
3293 if (i == 0) | |
3294 XVECEXP (body, 0, i++) = obody; | |
3295 | |
3296 /* Store (clobber REG) for each clobbered register specified. */ | |
3297 for (unsigned j = 0; j < nclobbers; ++j) | |
3298 { | |
3299 rtx clobbered_reg = clobber_rvec[j]; | |
3300 | |
3301 /* Do sanity check for overlap between clobbers and respectively | |
3302 input and outputs that hasn't been handled. Such overlap | |
3303 should have been detected and reported above. */ | |
3304 if (!clobber_conflict_found && REG_P (clobbered_reg)) | |
3305 { | |
3306 /* We test the old body (obody) contents to avoid | |
3307 tripping over the under-construction body. */ | |
3308 for (unsigned k = 0; k < noutputs; ++k) | |
3309 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k])) | |
3310 internal_error ("asm clobber conflict with output operand"); | |
3311 | |
3312 for (unsigned k = 0; k < ninputs - ninout; ++k) | |
3313 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k])) | |
3314 internal_error ("asm clobber conflict with input operand"); | |
3315 } | |
3316 | |
3317 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg); | |
3318 } | |
3319 | |
3320 if (nlabels > 0) | |
3321 emit_jump_insn (body); | |
3322 else | |
3323 emit_insn (body); | |
3324 } | |
3325 | |
3326 generating_concat_p = old_generating_concat_p; | |
3327 | |
3328 if (fallthru_label) | |
3329 emit_label (fallthru_label); | |
3330 | |
3331 if (after_md_seq) | |
3332 emit_insn (after_md_seq); | |
3333 if (after_rtl_seq) | |
3334 emit_insn (after_rtl_seq); | |
3335 | |
3336 free_temp_slots (); | |
3337 crtl->has_asm_statement = 1; | |
3338 } | |
3339 | |
3340 /* Emit code to jump to the address | |
3341 specified by the pointer expression EXP. */ | |
3342 | |
3343 static void | |
3344 expand_computed_goto (tree exp) | |
3345 { | |
3346 rtx x = expand_normal (exp); | |
3347 | |
3348 do_pending_stack_adjust (); | |
3349 emit_indirect_jump (x); | |
3350 } | |
3351 | |
3352 /* Generate RTL code for a `goto' statement with target label LABEL. | |
3353 LABEL should be a LABEL_DECL tree node that was or will later be | |
3354 defined with `expand_label'. */ | |
3355 | |
3356 static void | |
3357 expand_goto (tree label) | |
3358 { | |
3359 if (flag_checking) | |
3360 { | |
3361 /* Check for a nonlocal goto to a containing function. Should have | |
3362 gotten translated to __builtin_nonlocal_goto. */ | |
3363 tree context = decl_function_context (label); | |
3364 gcc_assert (!context || context == current_function_decl); | |
3365 } | |
3366 | |
3367 emit_jump (jump_target_rtx (label)); | |
3368 } | |
3369 | |
3370 /* Output a return with no value. */ | |
3371 | |
3372 static void | |
3373 expand_null_return_1 (void) | |
3374 { | |
3375 clear_pending_stack_adjust (); | |
3376 do_pending_stack_adjust (); | |
3377 emit_jump (return_label); | |
3378 } | |
3379 | |
3380 /* Generate RTL to return from the current function, with no value. | |
3381 (That is, we do not do anything about returning any value.) */ | |
3382 | |
3383 void | |
3384 expand_null_return (void) | |
3385 { | |
3386 /* If this function was declared to return a value, but we | |
3387 didn't, clobber the return registers so that they are not | |
3388 propagated live to the rest of the function. */ | |
3389 clobber_return_register (); | |
3390 | |
3391 expand_null_return_1 (); | |
3392 } | |
3393 | |
3394 /* Generate RTL to return from the current function, with value VAL. */ | |
3395 | |
3396 static void | |
3397 expand_value_return (rtx val) | |
3398 { | |
3399 /* Copy the value to the return location unless it's already there. */ | |
3400 | |
3401 tree decl = DECL_RESULT (current_function_decl); | |
3402 rtx return_reg = DECL_RTL (decl); | |
3403 if (return_reg != val) | |
3404 { | |
3405 tree funtype = TREE_TYPE (current_function_decl); | |
3406 tree type = TREE_TYPE (decl); | |
3407 int unsignedp = TYPE_UNSIGNED (type); | |
3408 machine_mode old_mode = DECL_MODE (decl); | |
3409 machine_mode mode; | |
3410 if (DECL_BY_REFERENCE (decl)) | |
3411 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2); | |
3412 else | |
3413 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1); | |
3414 | |
3415 if (mode != old_mode) | |
3416 val = convert_modes (mode, old_mode, val, unsignedp); | |
3417 | |
3418 if (GET_CODE (return_reg) == PARALLEL) | |
3419 emit_group_load (return_reg, val, type, int_size_in_bytes (type)); | |
3420 else | |
3421 emit_move_insn (return_reg, val); | |
3422 } | |
3423 | |
3424 expand_null_return_1 (); | |
3425 } | |
3426 | |
3427 /* Generate RTL to evaluate the expression RETVAL and return it | |
3428 from the current function. */ | |
3429 | |
3430 static void | |
3431 expand_return (tree retval, tree bounds) | |
3432 { | |
3433 rtx result_rtl; | |
3434 rtx val = 0; | |
3435 tree retval_rhs; | |
3436 rtx bounds_rtl; | |
3437 | |
3438 /* If function wants no value, give it none. */ | |
3439 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE) | |
3440 { | |
3441 expand_normal (retval); | |
3442 expand_null_return (); | |
3443 return; | |
3444 } | |
3445 | |
3446 if (retval == error_mark_node) | |
3447 { | |
3448 /* Treat this like a return of no value from a function that | |
3449 returns a value. */ | |
3450 expand_null_return (); | |
3451 return; | |
3452 } | |
3453 else if ((TREE_CODE (retval) == MODIFY_EXPR | |
3454 || TREE_CODE (retval) == INIT_EXPR) | |
3455 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL) | |
3456 retval_rhs = TREE_OPERAND (retval, 1); | |
3457 else | |
3458 retval_rhs = retval; | |
3459 | |
3460 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl)); | |
3461 | |
3462 /* Put returned bounds to the right place. */ | |
3463 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl)); | |
3464 if (bounds_rtl) | |
3465 { | |
3466 rtx addr = NULL; | |
3467 rtx bnd = NULL; | |
3468 | |
3469 if (bounds && bounds != error_mark_node) | |
3470 { | |
3471 bnd = expand_normal (bounds); | |
3472 targetm.calls.store_returned_bounds (bounds_rtl, bnd); | |
3473 } | |
3474 else if (REG_P (bounds_rtl)) | |
3475 { | |
3476 if (bounds) | |
3477 bnd = chkp_expand_zero_bounds (); | |
3478 else | |
3479 { | |
3480 addr = expand_normal (build_fold_addr_expr (retval_rhs)); | |
3481 addr = gen_rtx_MEM (Pmode, addr); | |
3482 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL); | |
3483 } | |
3484 | |
3485 targetm.calls.store_returned_bounds (bounds_rtl, bnd); | |
3486 } | |
3487 else | |
3488 { | |
3489 int n; | |
3490 | |
3491 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL); | |
3492 | |
3493 if (bounds) | |
3494 bnd = chkp_expand_zero_bounds (); | |
3495 else | |
3496 { | |
3497 addr = expand_normal (build_fold_addr_expr (retval_rhs)); | |
3498 addr = gen_rtx_MEM (Pmode, addr); | |
3499 } | |
3500 | |
3501 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++) | |
3502 { | |
3503 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0); | |
3504 if (!bounds) | |
3505 { | |
3506 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1); | |
3507 rtx from = adjust_address (addr, Pmode, INTVAL (offs)); | |
3508 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL); | |
3509 } | |
3510 targetm.calls.store_returned_bounds (slot, bnd); | |
3511 } | |
3512 } | |
3513 } | |
3514 else if (chkp_function_instrumented_p (current_function_decl) | |
3515 && !BOUNDED_P (retval_rhs) | |
3516 && chkp_type_has_pointer (TREE_TYPE (retval_rhs)) | |
3517 && TREE_CODE (retval_rhs) != RESULT_DECL) | |
3518 { | |
3519 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs)); | |
3520 addr = gen_rtx_MEM (Pmode, addr); | |
3521 | |
3522 gcc_assert (MEM_P (result_rtl)); | |
3523 | |
3524 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs)); | |
3525 } | |
3526 | |
3527 /* If we are returning the RESULT_DECL, then the value has already | |
3528 been stored into it, so we don't have to do anything special. */ | |
3529 if (TREE_CODE (retval_rhs) == RESULT_DECL) | |
3530 expand_value_return (result_rtl); | |
3531 | |
3532 /* If the result is an aggregate that is being returned in one (or more) | |
3533 registers, load the registers here. */ | |
3534 | |
3535 else if (retval_rhs != 0 | |
3536 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode | |
3537 && REG_P (result_rtl)) | |
3538 { | |
3539 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs); | |
3540 if (val) | |
3541 { | |
3542 /* Use the mode of the result value on the return register. */ | |
3543 PUT_MODE (result_rtl, GET_MODE (val)); | |
3544 expand_value_return (val); | |
3545 } | |
3546 else | |
3547 expand_null_return (); | |
3548 } | |
3549 else if (retval_rhs != 0 | |
3550 && !VOID_TYPE_P (TREE_TYPE (retval_rhs)) | |
3551 && (REG_P (result_rtl) | |
3552 || (GET_CODE (result_rtl) == PARALLEL))) | |
3553 { | |
3554 /* Compute the return value into a temporary (usually a pseudo reg). */ | |
3555 val | |
3556 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1); | |
3557 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL); | |
3558 val = force_not_mem (val); | |
3559 expand_value_return (val); | |
3560 } | |
3561 else | |
3562 { | |
3563 /* No hard reg used; calculate value into hard return reg. */ | |
3564 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
3565 expand_value_return (result_rtl); | |
3566 } | |
1889 } | 3567 } |
1890 | 3568 |
1891 /* A subroutine of expand_gimple_stmt, expanding one gimple statement | 3569 /* A subroutine of expand_gimple_stmt, expanding one gimple statement |
1892 STMT that doesn't require special handling for outgoing edges. That | 3570 STMT that doesn't require special handling for outgoing edges. That |
1893 is no tailcalls and no GIMPLE_COND. */ | 3571 is no tailcalls and no GIMPLE_COND. */ |
1894 | 3572 |
1895 static void | 3573 static void |
1896 expand_gimple_stmt_1 (gimple stmt) | 3574 expand_gimple_stmt_1 (gimple *stmt) |
1897 { | 3575 { |
1898 tree op0; | 3576 tree op0; |
3577 | |
3578 set_curr_insn_location (gimple_location (stmt)); | |
3579 | |
1899 switch (gimple_code (stmt)) | 3580 switch (gimple_code (stmt)) |
1900 { | 3581 { |
1901 case GIMPLE_GOTO: | 3582 case GIMPLE_GOTO: |
1902 op0 = gimple_goto_dest (stmt); | 3583 op0 = gimple_goto_dest (stmt); |
1903 if (TREE_CODE (op0) == LABEL_DECL) | 3584 if (TREE_CODE (op0) == LABEL_DECL) |
1904 expand_goto (op0); | 3585 expand_goto (op0); |
1905 else | 3586 else |
1906 expand_computed_goto (op0); | 3587 expand_computed_goto (op0); |
1907 break; | 3588 break; |
1908 case GIMPLE_LABEL: | 3589 case GIMPLE_LABEL: |
1909 expand_label (gimple_label_label (stmt)); | 3590 expand_label (gimple_label_label (as_a <glabel *> (stmt))); |
1910 break; | 3591 break; |
1911 case GIMPLE_NOP: | 3592 case GIMPLE_NOP: |
1912 case GIMPLE_PREDICT: | 3593 case GIMPLE_PREDICT: |
1913 break; | 3594 break; |
1914 case GIMPLE_SWITCH: | 3595 case GIMPLE_SWITCH: |
1915 expand_case (stmt); | 3596 { |
3597 gswitch *swtch = as_a <gswitch *> (stmt); | |
3598 if (gimple_switch_num_labels (swtch) == 1) | |
3599 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch))); | |
3600 else | |
3601 expand_case (swtch); | |
3602 } | |
1916 break; | 3603 break; |
1917 case GIMPLE_ASM: | 3604 case GIMPLE_ASM: |
1918 expand_asm_stmt (stmt); | 3605 expand_asm_stmt (as_a <gasm *> (stmt)); |
1919 break; | 3606 break; |
1920 case GIMPLE_CALL: | 3607 case GIMPLE_CALL: |
1921 expand_call_stmt (stmt); | 3608 expand_call_stmt (as_a <gcall *> (stmt)); |
1922 break; | 3609 break; |
1923 | 3610 |
1924 case GIMPLE_RETURN: | 3611 case GIMPLE_RETURN: |
1925 op0 = gimple_return_retval (stmt); | 3612 { |
1926 | 3613 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt)); |
1927 if (op0 && op0 != error_mark_node) | 3614 op0 = gimple_return_retval (as_a <greturn *> (stmt)); |
1928 { | 3615 |
1929 tree result = DECL_RESULT (current_function_decl); | 3616 if (op0 && op0 != error_mark_node) |
1930 | 3617 { |
1931 /* If we are not returning the current function's RESULT_DECL, | 3618 tree result = DECL_RESULT (current_function_decl); |
1932 build an assignment to it. */ | 3619 |
1933 if (op0 != result) | 3620 /* Mark we have return statement with missing bounds. */ |
1934 { | 3621 if (!bnd |
1935 /* I believe that a function's RESULT_DECL is unique. */ | 3622 && chkp_function_instrumented_p (cfun->decl) |
1936 gcc_assert (TREE_CODE (op0) != RESULT_DECL); | 3623 && !DECL_P (op0)) |
1937 | 3624 bnd = error_mark_node; |
1938 /* ??? We'd like to use simply expand_assignment here, | 3625 |
1939 but this fails if the value is of BLKmode but the return | 3626 /* If we are not returning the current function's RESULT_DECL, |
1940 decl is a register. expand_return has special handling | 3627 build an assignment to it. */ |
1941 for this combination, which eventually should move | 3628 if (op0 != result) |
1942 to common code. See comments there. Until then, let's | 3629 { |
1943 build a modify expression :-/ */ | 3630 /* I believe that a function's RESULT_DECL is unique. */ |
1944 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result), | 3631 gcc_assert (TREE_CODE (op0) != RESULT_DECL); |
1945 result, op0); | 3632 |
1946 } | 3633 /* ??? We'd like to use simply expand_assignment here, |
1947 } | 3634 but this fails if the value is of BLKmode but the return |
1948 if (!op0) | 3635 decl is a register. expand_return has special handling |
1949 expand_null_return (); | 3636 for this combination, which eventually should move |
1950 else | 3637 to common code. See comments there. Until then, let's |
1951 expand_return (op0); | 3638 build a modify expression :-/ */ |
3639 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result), | |
3640 result, op0); | |
3641 } | |
3642 } | |
3643 | |
3644 if (!op0) | |
3645 expand_null_return (); | |
3646 else | |
3647 expand_return (op0, bnd); | |
3648 } | |
1952 break; | 3649 break; |
1953 | 3650 |
1954 case GIMPLE_ASSIGN: | 3651 case GIMPLE_ASSIGN: |
1955 { | 3652 { |
1956 tree lhs = gimple_assign_lhs (stmt); | 3653 gassign *assign_stmt = as_a <gassign *> (stmt); |
3654 tree lhs = gimple_assign_lhs (assign_stmt); | |
1957 | 3655 |
1958 /* Tree expand used to fiddle with |= and &= of two bitfield | 3656 /* Tree expand used to fiddle with |= and &= of two bitfield |
1959 COMPONENT_REFs here. This can't happen with gimple, the LHS | 3657 COMPONENT_REFs here. This can't happen with gimple, the LHS |
1960 of binary assigns must be a gimple reg. */ | 3658 of binary assigns must be a gimple reg. */ |
1961 | 3659 |
1962 if (TREE_CODE (lhs) != SSA_NAME | 3660 if (TREE_CODE (lhs) != SSA_NAME |
1963 || get_gimple_rhs_class (gimple_expr_code (stmt)) | 3661 || get_gimple_rhs_class (gimple_expr_code (stmt)) |
1964 == GIMPLE_SINGLE_RHS) | 3662 == GIMPLE_SINGLE_RHS) |
1965 { | 3663 { |
1966 tree rhs = gimple_assign_rhs1 (stmt); | 3664 tree rhs = gimple_assign_rhs1 (assign_stmt); |
1967 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt)) | 3665 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt)) |
1968 == GIMPLE_SINGLE_RHS); | 3666 == GIMPLE_SINGLE_RHS); |
1969 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)) | 3667 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs) |
3668 /* Do not put locations on possibly shared trees. */ | |
3669 && !is_gimple_min_invariant (rhs)) | |
1970 SET_EXPR_LOCATION (rhs, gimple_location (stmt)); | 3670 SET_EXPR_LOCATION (rhs, gimple_location (stmt)); |
1971 expand_assignment (lhs, rhs, | 3671 if (TREE_CLOBBER_P (rhs)) |
1972 gimple_assign_nontemporal_move_p (stmt)); | 3672 /* This is a clobber to mark the going out of scope for |
3673 this LHS. */ | |
3674 ; | |
3675 else | |
3676 expand_assignment (lhs, rhs, | |
3677 gimple_assign_nontemporal_move_p ( | |
3678 assign_stmt)); | |
1973 } | 3679 } |
1974 else | 3680 else |
1975 { | 3681 { |
1976 rtx target, temp; | 3682 rtx target, temp; |
1977 bool nontemporal = gimple_assign_nontemporal_move_p (stmt); | 3683 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt); |
1978 struct separate_ops ops; | 3684 struct separate_ops ops; |
1979 bool promoted = false; | 3685 bool promoted = false; |
1980 | 3686 |
1981 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE); | 3687 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE); |
1982 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) | 3688 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) |
1983 promoted = true; | 3689 promoted = true; |
1984 | 3690 |
1985 ops.code = gimple_assign_rhs_code (stmt); | 3691 ops.code = gimple_assign_rhs_code (assign_stmt); |
1986 ops.type = TREE_TYPE (lhs); | 3692 ops.type = TREE_TYPE (lhs); |
1987 switch (get_gimple_rhs_class (gimple_expr_code (stmt))) | 3693 switch (get_gimple_rhs_class (ops.code)) |
1988 { | 3694 { |
1989 case GIMPLE_TERNARY_RHS: | 3695 case GIMPLE_TERNARY_RHS: |
1990 ops.op2 = gimple_assign_rhs3 (stmt); | 3696 ops.op2 = gimple_assign_rhs3 (assign_stmt); |
1991 /* Fallthru */ | 3697 /* Fallthru */ |
1992 case GIMPLE_BINARY_RHS: | 3698 case GIMPLE_BINARY_RHS: |
1993 ops.op1 = gimple_assign_rhs2 (stmt); | 3699 ops.op1 = gimple_assign_rhs2 (assign_stmt); |
1994 /* Fallthru */ | 3700 /* Fallthru */ |
1995 case GIMPLE_UNARY_RHS: | 3701 case GIMPLE_UNARY_RHS: |
1996 ops.op0 = gimple_assign_rhs1 (stmt); | 3702 ops.op0 = gimple_assign_rhs1 (assign_stmt); |
1997 break; | 3703 break; |
1998 default: | 3704 default: |
1999 gcc_unreachable (); | 3705 gcc_unreachable (); |
2000 } | 3706 } |
2001 ops.location = gimple_location (stmt); | 3707 ops.location = gimple_location (stmt); |
2009 | 3715 |
2010 if (temp == target) | 3716 if (temp == target) |
2011 ; | 3717 ; |
2012 else if (promoted) | 3718 else if (promoted) |
2013 { | 3719 { |
2014 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target); | 3720 int unsignedp = SUBREG_PROMOTED_SIGN (target); |
2015 /* If TEMP is a VOIDmode constant, use convert_modes to make | 3721 /* If TEMP is a VOIDmode constant, use convert_modes to make |
2016 sure that we properly convert it. */ | 3722 sure that we properly convert it. */ |
2017 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | 3723 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) |
2018 { | 3724 { |
2019 temp = convert_modes (GET_MODE (target), | 3725 temp = convert_modes (GET_MODE (target), |
2047 | 3753 |
2048 In addition to generating the necessary RTL instructions this also | 3754 In addition to generating the necessary RTL instructions this also |
2049 sets REG_EH_REGION notes if necessary and sets the current source | 3755 sets REG_EH_REGION notes if necessary and sets the current source |
2050 location for diagnostics. */ | 3756 location for diagnostics. */ |
2051 | 3757 |
2052 static rtx | 3758 static rtx_insn * |
2053 expand_gimple_stmt (gimple stmt) | 3759 expand_gimple_stmt (gimple *stmt) |
2054 { | 3760 { |
2055 int lp_nr = 0; | |
2056 rtx last = NULL; | |
2057 location_t saved_location = input_location; | 3761 location_t saved_location = input_location; |
2058 | 3762 rtx_insn *last = get_last_insn (); |
2059 last = get_last_insn (); | 3763 int lp_nr; |
2060 | 3764 |
2061 /* If this is an expression of some kind and it has an associated line | |
2062 number, then emit the line number before expanding the expression. | |
2063 | |
2064 We need to save and restore the file and line information so that | |
2065 errors discovered during expansion are emitted with the right | |
2066 information. It would be better of the diagnostic routines | |
2067 used the file/line information embedded in the tree nodes rather | |
2068 than globals. */ | |
2069 gcc_assert (cfun); | 3765 gcc_assert (cfun); |
2070 | 3766 |
3767 /* We need to save and restore the current source location so that errors | |
3768 discovered during expansion are emitted with the right location. But | |
3769 it would be better if the diagnostic routines used the source location | |
3770 embedded in the tree nodes rather than globals. */ | |
2071 if (gimple_has_location (stmt)) | 3771 if (gimple_has_location (stmt)) |
2072 { | 3772 input_location = gimple_location (stmt); |
2073 input_location = gimple_location (stmt); | |
2074 set_curr_insn_source_location (input_location); | |
2075 | |
2076 /* Record where the insns produced belong. */ | |
2077 set_curr_insn_block (gimple_block (stmt)); | |
2078 } | |
2079 | 3773 |
2080 expand_gimple_stmt_1 (stmt); | 3774 expand_gimple_stmt_1 (stmt); |
3775 | |
2081 /* Free any temporaries used to evaluate this statement. */ | 3776 /* Free any temporaries used to evaluate this statement. */ |
2082 free_temp_slots (); | 3777 free_temp_slots (); |
2083 | 3778 |
2084 input_location = saved_location; | 3779 input_location = saved_location; |
2085 | 3780 |
2086 /* Mark all insns that may trap. */ | 3781 /* Mark all insns that may trap. */ |
2087 lp_nr = lookup_stmt_eh_lp (stmt); | 3782 lp_nr = lookup_stmt_eh_lp (stmt); |
2088 if (lp_nr) | 3783 if (lp_nr) |
2089 { | 3784 { |
2090 rtx insn; | 3785 rtx_insn *insn; |
2091 for (insn = next_real_insn (last); insn; | 3786 for (insn = next_real_insn (last); insn; |
2092 insn = next_real_insn (insn)) | 3787 insn = next_real_insn (insn)) |
2093 { | 3788 { |
2094 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) | 3789 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) |
2095 /* If we want exceptions for non-call insns, any | 3790 /* If we want exceptions for non-call insns, any |
2113 can still reach the rest of BB. The case here is __builtin_sqrt, | 3808 can still reach the rest of BB. The case here is __builtin_sqrt, |
2114 where the NaN result goes through the external function (with a | 3809 where the NaN result goes through the external function (with a |
2115 tailcall) and the normal result happens via a sqrt instruction. */ | 3810 tailcall) and the normal result happens via a sqrt instruction. */ |
2116 | 3811 |
2117 static basic_block | 3812 static basic_block |
2118 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru) | 3813 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru) |
2119 { | 3814 { |
2120 rtx last2, last; | 3815 rtx_insn *last2, *last; |
2121 edge e; | 3816 edge e; |
2122 edge_iterator ei; | 3817 edge_iterator ei; |
2123 int probability; | 3818 profile_probability probability; |
2124 gcov_type count; | |
2125 | 3819 |
2126 last2 = last = expand_gimple_stmt (stmt); | 3820 last2 = last = expand_gimple_stmt (stmt); |
2127 | 3821 |
2128 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last)) | 3822 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last)) |
2129 if (CALL_P (last) && SIBLING_CALL_P (last)) | 3823 if (CALL_P (last) && SIBLING_CALL_P (last)) |
2144 EH or abnormal edges, we shouldn't have created a tail call in | 3838 EH or abnormal edges, we shouldn't have created a tail call in |
2145 the first place. So it seems to me we should just be removing | 3839 the first place. So it seems to me we should just be removing |
2146 all edges here, or redirecting the existing fallthru edge to | 3840 all edges here, or redirecting the existing fallthru edge to |
2147 the exit block. */ | 3841 the exit block. */ |
2148 | 3842 |
2149 probability = 0; | 3843 probability = profile_probability::never (); |
2150 count = 0; | |
2151 | 3844 |
2152 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | 3845 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
2153 { | 3846 { |
2154 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) | 3847 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) |
2155 { | 3848 { |
2156 if (e->dest != EXIT_BLOCK_PTR) | 3849 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) |
2157 { | 3850 { |
2158 e->dest->count -= e->count; | |
2159 e->dest->frequency -= EDGE_FREQUENCY (e); | 3851 e->dest->frequency -= EDGE_FREQUENCY (e); |
2160 if (e->dest->count < 0) | |
2161 e->dest->count = 0; | |
2162 if (e->dest->frequency < 0) | 3852 if (e->dest->frequency < 0) |
2163 e->dest->frequency = 0; | 3853 e->dest->frequency = 0; |
2164 } | 3854 } |
2165 count += e->count; | |
2166 probability += e->probability; | 3855 probability += e->probability; |
2167 remove_edge (e); | 3856 remove_edge (e); |
2168 } | 3857 } |
2169 else | 3858 else |
2170 ei_next (&ei); | 3859 ei_next (&ei); |
2187 break; | 3876 break; |
2188 } | 3877 } |
2189 delete_insn (NEXT_INSN (last)); | 3878 delete_insn (NEXT_INSN (last)); |
2190 } | 3879 } |
2191 | 3880 |
2192 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL); | 3881 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL |
2193 e->probability += probability; | 3882 | EDGE_SIBCALL); |
2194 e->count += count; | 3883 e->probability = probability; |
2195 BB_END (bb) = last; | 3884 BB_END (bb) = last; |
2196 update_bb_for_insn (bb); | 3885 update_bb_for_insn (bb); |
2197 | 3886 |
2198 if (NEXT_INSN (last)) | 3887 if (NEXT_INSN (last)) |
2199 { | 3888 { |
2210 } | 3899 } |
2211 | 3900 |
2212 /* Return the difference between the floor and the truncated result of | 3901 /* Return the difference between the floor and the truncated result of |
2213 a signed division by OP1 with remainder MOD. */ | 3902 a signed division by OP1 with remainder MOD. */ |
2214 static rtx | 3903 static rtx |
2215 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | 3904 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1) |
2216 { | 3905 { |
2217 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */ | 3906 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */ |
2218 return gen_rtx_IF_THEN_ELSE | 3907 return gen_rtx_IF_THEN_ELSE |
2219 (mode, gen_rtx_NE (BImode, mod, const0_rtx), | 3908 (mode, gen_rtx_NE (BImode, mod, const0_rtx), |
2220 gen_rtx_IF_THEN_ELSE | 3909 gen_rtx_IF_THEN_ELSE |
2226 } | 3915 } |
2227 | 3916 |
2228 /* Return the difference between the ceil and the truncated result of | 3917 /* Return the difference between the ceil and the truncated result of |
2229 a signed division by OP1 with remainder MOD. */ | 3918 a signed division by OP1 with remainder MOD. */ |
2230 static rtx | 3919 static rtx |
2231 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | 3920 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1) |
2232 { | 3921 { |
2233 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */ | 3922 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */ |
2234 return gen_rtx_IF_THEN_ELSE | 3923 return gen_rtx_IF_THEN_ELSE |
2235 (mode, gen_rtx_NE (BImode, mod, const0_rtx), | 3924 (mode, gen_rtx_NE (BImode, mod, const0_rtx), |
2236 gen_rtx_IF_THEN_ELSE | 3925 gen_rtx_IF_THEN_ELSE |
2242 } | 3931 } |
2243 | 3932 |
2244 /* Return the difference between the ceil and the truncated result of | 3933 /* Return the difference between the ceil and the truncated result of |
2245 an unsigned division by OP1 with remainder MOD. */ | 3934 an unsigned division by OP1 with remainder MOD. */ |
2246 static rtx | 3935 static rtx |
2247 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED) | 3936 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED) |
2248 { | 3937 { |
2249 /* (mod != 0 ? 1 : 0) */ | 3938 /* (mod != 0 ? 1 : 0) */ |
2250 return gen_rtx_IF_THEN_ELSE | 3939 return gen_rtx_IF_THEN_ELSE |
2251 (mode, gen_rtx_NE (BImode, mod, const0_rtx), | 3940 (mode, gen_rtx_NE (BImode, mod, const0_rtx), |
2252 const1_rtx, const0_rtx); | 3941 const1_rtx, const0_rtx); |
2254 | 3943 |
2255 /* Return the difference between the rounded and the truncated result | 3944 /* Return the difference between the rounded and the truncated result |
2256 of a signed division by OP1 with remainder MOD. Halfway cases are | 3945 of a signed division by OP1 with remainder MOD. Halfway cases are |
2257 rounded away from zero, rather than to the nearest even number. */ | 3946 rounded away from zero, rather than to the nearest even number. */ |
2258 static rtx | 3947 static rtx |
2259 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | 3948 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1) |
2260 { | 3949 { |
2261 /* (abs (mod) >= abs (op1) - abs (mod) | 3950 /* (abs (mod) >= abs (op1) - abs (mod) |
2262 ? (op1 / mod > 0 ? 1 : -1) | 3951 ? (op1 / mod > 0 ? 1 : -1) |
2263 : 0) */ | 3952 : 0) */ |
2264 return gen_rtx_IF_THEN_ELSE | 3953 return gen_rtx_IF_THEN_ELSE |
2277 /* Return the difference between the rounded and the truncated result | 3966 /* Return the difference between the rounded and the truncated result |
2278 of a unsigned division by OP1 with remainder MOD. Halfway cases | 3967 of a unsigned division by OP1 with remainder MOD. Halfway cases |
2279 are rounded away from zero, rather than to the nearest even | 3968 are rounded away from zero, rather than to the nearest even |
2280 number. */ | 3969 number. */ |
2281 static rtx | 3970 static rtx |
2282 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | 3971 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1) |
2283 { | 3972 { |
2284 /* (mod >= op1 - mod ? 1 : 0) */ | 3973 /* (mod >= op1 - mod ? 1 : 0) */ |
2285 return gen_rtx_IF_THEN_ELSE | 3974 return gen_rtx_IF_THEN_ELSE |
2286 (mode, gen_rtx_GE (BImode, mod, | 3975 (mode, gen_rtx_GE (BImode, mod, |
2287 gen_rtx_MINUS (mode, op1, mod)), | 3976 gen_rtx_MINUS (mode, op1, mod)), |
2290 | 3979 |
2291 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting | 3980 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting |
2292 any rtl. */ | 3981 any rtl. */ |
2293 | 3982 |
2294 static rtx | 3983 static rtx |
2295 convert_debug_memory_address (enum machine_mode mode, rtx x, | 3984 convert_debug_memory_address (scalar_int_mode mode, rtx x, |
2296 addr_space_t as) | 3985 addr_space_t as) |
2297 { | 3986 { |
2298 enum machine_mode xmode = GET_MODE (x); | |
2299 | |
2300 #ifndef POINTERS_EXTEND_UNSIGNED | 3987 #ifndef POINTERS_EXTEND_UNSIGNED |
2301 gcc_assert (mode == Pmode | 3988 gcc_assert (mode == Pmode |
2302 || mode == targetm.addr_space.address_mode (as)); | 3989 || mode == targetm.addr_space.address_mode (as)); |
2303 gcc_assert (xmode == mode || xmode == VOIDmode); | 3990 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode); |
2304 #else | 3991 #else |
2305 rtx temp; | 3992 rtx temp; |
2306 enum machine_mode address_mode = targetm.addr_space.address_mode (as); | 3993 |
2307 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as); | 3994 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as)); |
2308 | |
2309 gcc_assert (mode == address_mode || mode == pointer_mode); | |
2310 | 3995 |
2311 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode) | 3996 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode) |
2312 return x; | 3997 return x; |
2313 | 3998 |
2314 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode)) | 3999 /* X must have some form of address mode already. */ |
2315 x = simplify_gen_subreg (mode, x, xmode, | 4000 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x)); |
2316 subreg_lowpart_offset | 4001 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode)) |
2317 (mode, xmode)); | 4002 x = lowpart_subreg (mode, x, xmode); |
2318 else if (POINTERS_EXTEND_UNSIGNED > 0) | 4003 else if (POINTERS_EXTEND_UNSIGNED > 0) |
2319 x = gen_rtx_ZERO_EXTEND (mode, x); | 4004 x = gen_rtx_ZERO_EXTEND (mode, x); |
2320 else if (!POINTERS_EXTEND_UNSIGNED) | 4005 else if (!POINTERS_EXTEND_UNSIGNED) |
2321 x = gen_rtx_SIGN_EXTEND (mode, x); | 4006 x = gen_rtx_SIGN_EXTEND (mode, x); |
2322 else | 4007 else |
2332 && CONST_INT_P (XEXP (SUBREG_REG (x), 1)))) | 4017 && CONST_INT_P (XEXP (SUBREG_REG (x), 1)))) |
2333 && GET_MODE (SUBREG_REG (x)) == mode) | 4018 && GET_MODE (SUBREG_REG (x)) == mode) |
2334 return SUBREG_REG (x); | 4019 return SUBREG_REG (x); |
2335 break; | 4020 break; |
2336 case LABEL_REF: | 4021 case LABEL_REF: |
2337 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0)); | 4022 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x)); |
2338 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x); | 4023 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x); |
2339 return temp; | 4024 return temp; |
2340 case SYMBOL_REF: | 4025 case SYMBOL_REF: |
2341 temp = shallow_copy_rtx (x); | 4026 temp = shallow_copy_rtx (x); |
2342 PUT_MODE (temp, mode); | 4027 PUT_MODE (temp, mode); |
2364 #endif /* POINTERS_EXTEND_UNSIGNED */ | 4049 #endif /* POINTERS_EXTEND_UNSIGNED */ |
2365 | 4050 |
2366 return x; | 4051 return x; |
2367 } | 4052 } |
2368 | 4053 |
2369 /* Return an RTX equivalent to the value of the tree expression | 4054 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created |
2370 EXP. */ | 4055 by avoid_deep_ter_for_debug. */ |
4056 | |
4057 static hash_map<tree, tree> *deep_ter_debug_map; | |
4058 | |
4059 /* Split too deep TER chains for debug stmts using debug temporaries. */ | |
4060 | |
4061 static void | |
4062 avoid_deep_ter_for_debug (gimple *stmt, int depth) | |
4063 { | |
4064 use_operand_p use_p; | |
4065 ssa_op_iter iter; | |
4066 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) | |
4067 { | |
4068 tree use = USE_FROM_PTR (use_p); | |
4069 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use)) | |
4070 continue; | |
4071 gimple *g = get_gimple_for_ssa_name (use); | |
4072 if (g == NULL) | |
4073 continue; | |
4074 if (depth > 6 && !stmt_ends_bb_p (g)) | |
4075 { | |
4076 if (deep_ter_debug_map == NULL) | |
4077 deep_ter_debug_map = new hash_map<tree, tree>; | |
4078 | |
4079 tree &vexpr = deep_ter_debug_map->get_or_insert (use); | |
4080 if (vexpr != NULL) | |
4081 continue; | |
4082 vexpr = make_node (DEBUG_EXPR_DECL); | |
4083 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g); | |
4084 DECL_ARTIFICIAL (vexpr) = 1; | |
4085 TREE_TYPE (vexpr) = TREE_TYPE (use); | |
4086 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use))); | |
4087 gimple_stmt_iterator gsi = gsi_for_stmt (g); | |
4088 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT); | |
4089 avoid_deep_ter_for_debug (def_temp, 0); | |
4090 } | |
4091 else | |
4092 avoid_deep_ter_for_debug (g, depth + 1); | |
4093 } | |
4094 } | |
4095 | |
4096 /* Return an RTX equivalent to the value of the parameter DECL. */ | |
4097 | |
4098 static rtx | |
4099 expand_debug_parm_decl (tree decl) | |
4100 { | |
4101 rtx incoming = DECL_INCOMING_RTL (decl); | |
4102 | |
4103 if (incoming | |
4104 && GET_MODE (incoming) != BLKmode | |
4105 && ((REG_P (incoming) && HARD_REGISTER_P (incoming)) | |
4106 || (MEM_P (incoming) | |
4107 && REG_P (XEXP (incoming, 0)) | |
4108 && HARD_REGISTER_P (XEXP (incoming, 0))))) | |
4109 { | |
4110 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming)); | |
4111 | |
4112 #ifdef HAVE_window_save | |
4113 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers. | |
4114 If the target machine has an explicit window save instruction, the | |
4115 actual entry value is the corresponding OUTGOING_REGNO instead. */ | |
4116 if (REG_P (incoming) | |
4117 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming)) | |
4118 incoming | |
4119 = gen_rtx_REG_offset (incoming, GET_MODE (incoming), | |
4120 OUTGOING_REGNO (REGNO (incoming)), 0); | |
4121 else if (MEM_P (incoming)) | |
4122 { | |
4123 rtx reg = XEXP (incoming, 0); | |
4124 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg)) | |
4125 { | |
4126 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg))); | |
4127 incoming = replace_equiv_address_nv (incoming, reg); | |
4128 } | |
4129 else | |
4130 incoming = copy_rtx (incoming); | |
4131 } | |
4132 #endif | |
4133 | |
4134 ENTRY_VALUE_EXP (rtl) = incoming; | |
4135 return rtl; | |
4136 } | |
4137 | |
4138 if (incoming | |
4139 && GET_MODE (incoming) != BLKmode | |
4140 && !TREE_ADDRESSABLE (decl) | |
4141 && MEM_P (incoming) | |
4142 && (XEXP (incoming, 0) == virtual_incoming_args_rtx | |
4143 || (GET_CODE (XEXP (incoming, 0)) == PLUS | |
4144 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx | |
4145 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1))))) | |
4146 return copy_rtx (incoming); | |
4147 | |
4148 return NULL_RTX; | |
4149 } | |
4150 | |
4151 /* Return an RTX equivalent to the value of the tree expression EXP. */ | |
2371 | 4152 |
2372 static rtx | 4153 static rtx |
2373 expand_debug_expr (tree exp) | 4154 expand_debug_expr (tree exp) |
2374 { | 4155 { |
2375 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX; | 4156 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX; |
2376 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); | 4157 machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); |
4158 machine_mode inner_mode = VOIDmode; | |
2377 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); | 4159 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); |
2378 addr_space_t as; | 4160 addr_space_t as; |
4161 scalar_int_mode op0_mode, op1_mode, addr_mode; | |
2379 | 4162 |
2380 switch (TREE_CODE_CLASS (TREE_CODE (exp))) | 4163 switch (TREE_CODE_CLASS (TREE_CODE (exp))) |
2381 { | 4164 { |
2382 case tcc_expression: | 4165 case tcc_expression: |
2383 switch (TREE_CODE (exp)) | 4166 switch (TREE_CODE (exp)) |
2384 { | 4167 { |
2385 case COND_EXPR: | 4168 case COND_EXPR: |
2386 case DOT_PROD_EXPR: | 4169 case DOT_PROD_EXPR: |
4170 case SAD_EXPR: | |
2387 case WIDEN_MULT_PLUS_EXPR: | 4171 case WIDEN_MULT_PLUS_EXPR: |
2388 case WIDEN_MULT_MINUS_EXPR: | 4172 case WIDEN_MULT_MINUS_EXPR: |
2389 case FMA_EXPR: | 4173 case FMA_EXPR: |
2390 goto ternary; | 4174 goto ternary; |
2391 | 4175 |
2410 return NULL_RTX; | 4194 return NULL_RTX; |
2411 /* Fall through. */ | 4195 /* Fall through. */ |
2412 | 4196 |
2413 binary: | 4197 binary: |
2414 case tcc_binary: | 4198 case tcc_binary: |
2415 case tcc_comparison: | |
2416 op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); | 4199 op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); |
2417 if (!op1) | 4200 if (!op1) |
2418 return NULL_RTX; | 4201 return NULL_RTX; |
4202 switch (TREE_CODE (exp)) | |
4203 { | |
4204 case LSHIFT_EXPR: | |
4205 case RSHIFT_EXPR: | |
4206 case LROTATE_EXPR: | |
4207 case RROTATE_EXPR: | |
4208 case WIDEN_LSHIFT_EXPR: | |
4209 /* Ensure second operand isn't wider than the first one. */ | |
4210 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))); | |
4211 if (is_a <scalar_int_mode> (inner_mode, &op1_mode) | |
4212 && (GET_MODE_UNIT_PRECISION (mode) | |
4213 < GET_MODE_PRECISION (op1_mode))) | |
4214 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode); | |
4215 break; | |
4216 default: | |
4217 break; | |
4218 } | |
2419 /* Fall through. */ | 4219 /* Fall through. */ |
2420 | 4220 |
2421 unary: | 4221 unary: |
2422 case tcc_unary: | 4222 case tcc_unary: |
4223 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
2423 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | 4224 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); |
2424 if (!op0) | 4225 if (!op0) |
2425 return NULL_RTX; | 4226 return NULL_RTX; |
2426 break; | 4227 break; |
4228 | |
4229 case tcc_comparison: | |
4230 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
4231 goto binary; | |
2427 | 4232 |
2428 case tcc_type: | 4233 case tcc_type: |
2429 case tcc_statement: | 4234 case tcc_statement: |
2430 gcc_unreachable (); | 4235 gcc_unreachable (); |
2431 | 4236 |
2448 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp)); | 4253 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp)); |
2449 op0 = gen_rtx_MEM (BLKmode, op0); | 4254 op0 = gen_rtx_MEM (BLKmode, op0); |
2450 set_mem_attributes (op0, exp, 0); | 4255 set_mem_attributes (op0, exp, 0); |
2451 return op0; | 4256 return op0; |
2452 } | 4257 } |
2453 /* Fall through... */ | 4258 /* Fall through. */ |
2454 | 4259 |
2455 case INTEGER_CST: | 4260 case INTEGER_CST: |
2456 case REAL_CST: | 4261 case REAL_CST: |
2457 case FIXED_CST: | 4262 case FIXED_CST: |
2458 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER); | 4263 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER); |
2485 op0 = DECL_RTL_IF_SET (exp); | 4290 op0 = DECL_RTL_IF_SET (exp); |
2486 | 4291 |
2487 /* This decl was probably optimized away. */ | 4292 /* This decl was probably optimized away. */ |
2488 if (!op0) | 4293 if (!op0) |
2489 { | 4294 { |
2490 if (TREE_CODE (exp) != VAR_DECL | 4295 if (!VAR_P (exp) |
2491 || DECL_EXTERNAL (exp) | 4296 || DECL_EXTERNAL (exp) |
2492 || !TREE_STATIC (exp) | 4297 || !TREE_STATIC (exp) |
2493 || !DECL_NAME (exp) | 4298 || !DECL_NAME (exp) |
2494 || DECL_HARD_REGISTER (exp) | 4299 || DECL_HARD_REGISTER (exp) |
4300 || DECL_IN_CONSTANT_POOL (exp) | |
2495 || mode == VOIDmode) | 4301 || mode == VOIDmode) |
2496 return NULL; | 4302 return NULL; |
2497 | 4303 |
2498 op0 = make_decl_rtl_for_debug (exp); | 4304 op0 = make_decl_rtl_for_debug (exp); |
2499 if (!MEM_P (op0) | 4305 if (!MEM_P (op0) |
2503 } | 4309 } |
2504 else | 4310 else |
2505 op0 = copy_rtx (op0); | 4311 op0 = copy_rtx (op0); |
2506 | 4312 |
2507 if (GET_MODE (op0) == BLKmode | 4313 if (GET_MODE (op0) == BLKmode |
2508 /* If op0 is not BLKmode, but BLKmode is, adjust_mode | 4314 /* If op0 is not BLKmode, but mode is, adjust_mode |
2509 below would ICE. While it is likely a FE bug, | 4315 below would ICE. While it is likely a FE bug, |
2510 try to be robust here. See PR43166. */ | 4316 try to be robust here. See PR43166. */ |
2511 || mode == BLKmode | 4317 || mode == BLKmode |
2512 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode)) | 4318 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode)) |
2513 { | 4319 { |
2518 | 4324 |
2519 /* Fall through. */ | 4325 /* Fall through. */ |
2520 | 4326 |
2521 adjust_mode: | 4327 adjust_mode: |
2522 case PAREN_EXPR: | 4328 case PAREN_EXPR: |
2523 case NOP_EXPR: | 4329 CASE_CONVERT: |
2524 case CONVERT_EXPR: | |
2525 { | 4330 { |
2526 enum machine_mode inner_mode = GET_MODE (op0); | 4331 inner_mode = GET_MODE (op0); |
2527 | 4332 |
2528 if (mode == inner_mode) | 4333 if (mode == inner_mode) |
2529 return op0; | 4334 return op0; |
2530 | 4335 |
2531 if (inner_mode == VOIDmode) | 4336 if (inner_mode == VOIDmode) |
2538 return op0; | 4343 return op0; |
2539 } | 4344 } |
2540 | 4345 |
2541 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode)) | 4346 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode)) |
2542 { | 4347 { |
2543 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode)) | 4348 if (GET_MODE_UNIT_BITSIZE (mode) |
4349 == GET_MODE_UNIT_BITSIZE (inner_mode)) | |
2544 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0); | 4350 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0); |
2545 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode)) | 4351 else if (GET_MODE_UNIT_BITSIZE (mode) |
4352 < GET_MODE_UNIT_BITSIZE (inner_mode)) | |
2546 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode); | 4353 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode); |
2547 else | 4354 else |
2548 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode); | 4355 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode); |
2549 } | 4356 } |
2550 else if (FLOAT_MODE_P (mode)) | 4357 else if (FLOAT_MODE_P (mode)) |
2560 if (unsignedp) | 4367 if (unsignedp) |
2561 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode); | 4368 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode); |
2562 else | 4369 else |
2563 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode); | 4370 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode); |
2564 } | 4371 } |
2565 else if (CONSTANT_P (op0) | 4372 else if (GET_MODE_UNIT_PRECISION (mode) |
2566 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode)) | 4373 == GET_MODE_UNIT_PRECISION (inner_mode)) |
2567 op0 = simplify_gen_subreg (mode, op0, inner_mode, | 4374 op0 = lowpart_subreg (mode, op0, inner_mode); |
2568 subreg_lowpart_offset (mode, | 4375 else if (GET_MODE_UNIT_PRECISION (mode) |
2569 inner_mode)); | 4376 < GET_MODE_UNIT_PRECISION (inner_mode)) |
2570 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary | 4377 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode); |
4378 else if (UNARY_CLASS_P (exp) | |
2571 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) | 4379 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) |
2572 : unsignedp) | 4380 : unsignedp) |
2573 op0 = gen_rtx_ZERO_EXTEND (mode, op0); | 4381 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode); |
2574 else | 4382 else |
2575 op0 = gen_rtx_SIGN_EXTEND (mode, op0); | 4383 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode); |
2576 | 4384 |
2577 return op0; | 4385 return op0; |
2578 } | 4386 } |
2579 | 4387 |
2580 case MEM_REF: | 4388 case MEM_REF: |
2586 if (newexp) | 4394 if (newexp) |
2587 return expand_debug_expr (newexp); | 4395 return expand_debug_expr (newexp); |
2588 } | 4396 } |
2589 /* FALLTHROUGH */ | 4397 /* FALLTHROUGH */ |
2590 case INDIRECT_REF: | 4398 case INDIRECT_REF: |
4399 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
2591 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | 4400 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); |
2592 if (!op0) | 4401 if (!op0) |
2593 return NULL; | 4402 return NULL; |
2594 | 4403 |
2595 if (TREE_CODE (exp) == MEM_REF) | 4404 if (TREE_CODE (exp) == MEM_REF) |
2603 | 4412 |
2604 op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); | 4413 op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); |
2605 if (!op1 || !CONST_INT_P (op1)) | 4414 if (!op1 || !CONST_INT_P (op1)) |
2606 return NULL; | 4415 return NULL; |
2607 | 4416 |
2608 op0 = plus_constant (op0, INTVAL (op1)); | 4417 op0 = plus_constant (inner_mode, op0, INTVAL (op1)); |
2609 } | 4418 } |
2610 | 4419 |
2611 if (POINTER_TYPE_P (TREE_TYPE (exp))) | 4420 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
2612 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); | |
2613 else | |
2614 as = ADDR_SPACE_GENERIC; | |
2615 | 4421 |
2616 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as), | 4422 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as), |
2617 op0, as); | 4423 op0, as); |
2618 if (op0 == NULL_RTX) | 4424 if (op0 == NULL_RTX) |
2619 return NULL; | 4425 return NULL; |
2635 op0 = expand_debug_expr | 4441 op0 = expand_debug_expr |
2636 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); | 4442 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); |
2637 if (!op0) | 4443 if (!op0) |
2638 return NULL; | 4444 return NULL; |
2639 | 4445 |
2640 if (POINTER_TYPE_P (TREE_TYPE (exp))) | 4446 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
2641 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); | |
2642 else | |
2643 as = ADDR_SPACE_GENERIC; | |
2644 | |
2645 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as), | 4447 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as), |
2646 op0, as); | 4448 op0, as); |
2647 if (op0 == NULL_RTX) | 4449 if (op0 == NULL_RTX) |
2648 return NULL; | 4450 return NULL; |
2649 | 4451 |
2661 case BIT_FIELD_REF: | 4463 case BIT_FIELD_REF: |
2662 case REALPART_EXPR: | 4464 case REALPART_EXPR: |
2663 case IMAGPART_EXPR: | 4465 case IMAGPART_EXPR: |
2664 case VIEW_CONVERT_EXPR: | 4466 case VIEW_CONVERT_EXPR: |
2665 { | 4467 { |
2666 enum machine_mode mode1; | 4468 machine_mode mode1; |
2667 HOST_WIDE_INT bitsize, bitpos; | 4469 HOST_WIDE_INT bitsize, bitpos; |
2668 tree offset; | 4470 tree offset; |
2669 int volatilep = 0; | 4471 int reversep, volatilep = 0; |
2670 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, | 4472 tree tem |
2671 &mode1, &unsignedp, &volatilep, false); | 4473 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1, |
4474 &unsignedp, &reversep, &volatilep); | |
2672 rtx orig_op0; | 4475 rtx orig_op0; |
2673 | 4476 |
2674 if (bitsize == 0) | 4477 if (bitsize == 0) |
2675 return NULL; | 4478 return NULL; |
2676 | 4479 |
2679 if (!op0) | 4482 if (!op0) |
2680 return NULL; | 4483 return NULL; |
2681 | 4484 |
2682 if (offset) | 4485 if (offset) |
2683 { | 4486 { |
2684 enum machine_mode addrmode, offmode; | 4487 machine_mode addrmode, offmode; |
2685 | 4488 |
2686 if (!MEM_P (op0)) | 4489 if (!MEM_P (op0)) |
2687 return NULL; | 4490 return NULL; |
2688 | 4491 |
2689 op0 = XEXP (op0, 0); | 4492 op0 = XEXP (op0, 0); |
2698 offmode = GET_MODE (op1); | 4501 offmode = GET_MODE (op1); |
2699 if (offmode == VOIDmode) | 4502 if (offmode == VOIDmode) |
2700 offmode = TYPE_MODE (TREE_TYPE (offset)); | 4503 offmode = TYPE_MODE (TREE_TYPE (offset)); |
2701 | 4504 |
2702 if (addrmode != offmode) | 4505 if (addrmode != offmode) |
2703 op1 = simplify_gen_subreg (addrmode, op1, offmode, | 4506 op1 = lowpart_subreg (addrmode, op1, offmode); |
2704 subreg_lowpart_offset (addrmode, | |
2705 offmode)); | |
2706 | 4507 |
2707 /* Don't use offset_address here, we don't need a | 4508 /* Don't use offset_address here, we don't need a |
2708 recognizable address, and we don't want to generate | 4509 recognizable address, and we don't want to generate |
2709 code. */ | 4510 code. */ |
2710 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1)); | 4511 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode, |
4512 op0, op1)); | |
2711 } | 4513 } |
2712 | 4514 |
2713 if (MEM_P (op0)) | 4515 if (MEM_P (op0)) |
2714 { | 4516 { |
2715 if (mode1 == VOIDmode) | 4517 if (mode1 == VOIDmode) |
2716 /* Bitfield. */ | 4518 /* Bitfield. */ |
2717 mode1 = smallest_mode_for_size (bitsize, MODE_INT); | 4519 mode1 = smallest_int_mode_for_size (bitsize); |
2718 if (bitpos >= BITS_PER_UNIT) | 4520 if (bitpos >= BITS_PER_UNIT) |
2719 { | 4521 { |
2720 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); | 4522 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); |
2721 bitpos %= BITS_PER_UNIT; | 4523 bitpos %= BITS_PER_UNIT; |
2722 } | 4524 } |
2723 else if (bitpos < 0) | 4525 else if (bitpos < 0) |
2724 { | 4526 { |
2725 HOST_WIDE_INT units | 4527 HOST_WIDE_INT units |
2726 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT; | 4528 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT; |
2727 op0 = adjust_address_nv (op0, mode1, units); | 4529 op0 = adjust_address_nv (op0, mode1, -units); |
2728 bitpos += units * BITS_PER_UNIT; | 4530 bitpos += units * BITS_PER_UNIT; |
2729 } | 4531 } |
2730 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode)) | 4532 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode)) |
2731 op0 = adjust_address_nv (op0, mode, 0); | 4533 op0 = adjust_address_nv (op0, mode, 0); |
2732 else if (GET_MODE (op0) != mode1) | 4534 else if (GET_MODE (op0) != mode1) |
2748 return NULL; | 4550 return NULL; |
2749 | 4551 |
2750 if ((bitpos % BITS_PER_UNIT) == 0 | 4552 if ((bitpos % BITS_PER_UNIT) == 0 |
2751 && bitsize == GET_MODE_BITSIZE (mode1)) | 4553 && bitsize == GET_MODE_BITSIZE (mode1)) |
2752 { | 4554 { |
2753 enum machine_mode opmode = GET_MODE (op0); | 4555 machine_mode opmode = GET_MODE (op0); |
2754 | 4556 |
2755 if (opmode == VOIDmode) | 4557 if (opmode == VOIDmode) |
2756 opmode = TYPE_MODE (TREE_TYPE (tem)); | 4558 opmode = TYPE_MODE (TREE_TYPE (tem)); |
2757 | 4559 |
2758 /* This condition may hold if we're expanding the address | 4560 /* This condition may hold if we're expanding the address |
2778 : TYPE_MODE (TREE_TYPE (tem)), | 4580 : TYPE_MODE (TREE_TYPE (tem)), |
2779 op0, GEN_INT (bitsize), GEN_INT (bitpos)); | 4581 op0, GEN_INT (bitsize), GEN_INT (bitpos)); |
2780 } | 4582 } |
2781 | 4583 |
2782 case ABS_EXPR: | 4584 case ABS_EXPR: |
2783 return gen_rtx_ABS (mode, op0); | 4585 return simplify_gen_unary (ABS, mode, op0, mode); |
2784 | 4586 |
2785 case NEGATE_EXPR: | 4587 case NEGATE_EXPR: |
2786 return gen_rtx_NEG (mode, op0); | 4588 return simplify_gen_unary (NEG, mode, op0, mode); |
2787 | 4589 |
2788 case BIT_NOT_EXPR: | 4590 case BIT_NOT_EXPR: |
2789 return gen_rtx_NOT (mode, op0); | 4591 return simplify_gen_unary (NOT, mode, op0, mode); |
2790 | 4592 |
2791 case FLOAT_EXPR: | 4593 case FLOAT_EXPR: |
2792 if (unsignedp) | 4594 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, |
2793 return gen_rtx_UNSIGNED_FLOAT (mode, op0); | 4595 0))) |
2794 else | 4596 ? UNSIGNED_FLOAT : FLOAT, mode, op0, |
2795 return gen_rtx_FLOAT (mode, op0); | 4597 inner_mode); |
2796 | 4598 |
2797 case FIX_TRUNC_EXPR: | 4599 case FIX_TRUNC_EXPR: |
2798 if (unsignedp) | 4600 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0, |
2799 return gen_rtx_UNSIGNED_FIX (mode, op0); | 4601 inner_mode); |
2800 else | |
2801 return gen_rtx_FIX (mode, op0); | |
2802 | 4602 |
2803 case POINTER_PLUS_EXPR: | 4603 case POINTER_PLUS_EXPR: |
2804 /* For the rare target where pointers are not the same size as | 4604 /* For the rare target where pointers are not the same size as |
2805 size_t, we need to check for mis-matched modes and correct | 4605 size_t, we need to check for mis-matched modes and correct |
2806 the addend. */ | 4606 the addend. */ |
2807 if (op0 && op1 | 4607 if (op0 && op1 |
2808 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode | 4608 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode) |
2809 && GET_MODE (op0) != GET_MODE (op1)) | 4609 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode) |
2810 { | 4610 && op0_mode != op1_mode) |
2811 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))) | 4611 { |
2812 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1); | 4612 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode) |
4613 /* If OP0 is a partial mode, then we must truncate, even | |
4614 if it has the same bitsize as OP1 as GCC's | |
4615 representation of partial modes is opaque. */ | |
4616 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT | |
4617 && (GET_MODE_BITSIZE (op0_mode) | |
4618 == GET_MODE_BITSIZE (op1_mode)))) | |
4619 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode); | |
2813 else | 4620 else |
2814 /* We always sign-extend, regardless of the signedness of | 4621 /* We always sign-extend, regardless of the signedness of |
2815 the operand, because the operand is always unsigned | 4622 the operand, because the operand is always unsigned |
2816 here even if the original C expression is signed. */ | 4623 here even if the original C expression is signed. */ |
2817 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1); | 4624 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode); |
2818 } | 4625 } |
2819 /* Fall through. */ | 4626 /* Fall through. */ |
2820 case PLUS_EXPR: | 4627 case PLUS_EXPR: |
2821 return gen_rtx_PLUS (mode, op0, op1); | 4628 return simplify_gen_binary (PLUS, mode, op0, op1); |
2822 | 4629 |
2823 case MINUS_EXPR: | 4630 case MINUS_EXPR: |
2824 return gen_rtx_MINUS (mode, op0, op1); | 4631 return simplify_gen_binary (MINUS, mode, op0, op1); |
2825 | 4632 |
2826 case MULT_EXPR: | 4633 case MULT_EXPR: |
2827 return gen_rtx_MULT (mode, op0, op1); | 4634 return simplify_gen_binary (MULT, mode, op0, op1); |
2828 | 4635 |
2829 case RDIV_EXPR: | 4636 case RDIV_EXPR: |
2830 case TRUNC_DIV_EXPR: | 4637 case TRUNC_DIV_EXPR: |
2831 case EXACT_DIV_EXPR: | 4638 case EXACT_DIV_EXPR: |
2832 if (unsignedp) | 4639 if (unsignedp) |
2833 return gen_rtx_UDIV (mode, op0, op1); | 4640 return simplify_gen_binary (UDIV, mode, op0, op1); |
2834 else | 4641 else |
2835 return gen_rtx_DIV (mode, op0, op1); | 4642 return simplify_gen_binary (DIV, mode, op0, op1); |
2836 | 4643 |
2837 case TRUNC_MOD_EXPR: | 4644 case TRUNC_MOD_EXPR: |
2838 if (unsignedp) | 4645 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1); |
2839 return gen_rtx_UMOD (mode, op0, op1); | |
2840 else | |
2841 return gen_rtx_MOD (mode, op0, op1); | |
2842 | 4646 |
2843 case FLOOR_DIV_EXPR: | 4647 case FLOOR_DIV_EXPR: |
2844 if (unsignedp) | 4648 if (unsignedp) |
2845 return gen_rtx_UDIV (mode, op0, op1); | 4649 return simplify_gen_binary (UDIV, mode, op0, op1); |
2846 else | 4650 else |
2847 { | 4651 { |
2848 rtx div = gen_rtx_DIV (mode, op0, op1); | 4652 rtx div = simplify_gen_binary (DIV, mode, op0, op1); |
2849 rtx mod = gen_rtx_MOD (mode, op0, op1); | 4653 rtx mod = simplify_gen_binary (MOD, mode, op0, op1); |
2850 rtx adj = floor_sdiv_adjust (mode, mod, op1); | 4654 rtx adj = floor_sdiv_adjust (mode, mod, op1); |
2851 return gen_rtx_PLUS (mode, div, adj); | 4655 return simplify_gen_binary (PLUS, mode, div, adj); |
2852 } | 4656 } |
2853 | 4657 |
2854 case FLOOR_MOD_EXPR: | 4658 case FLOOR_MOD_EXPR: |
2855 if (unsignedp) | 4659 if (unsignedp) |
2856 return gen_rtx_UMOD (mode, op0, op1); | 4660 return simplify_gen_binary (UMOD, mode, op0, op1); |
2857 else | 4661 else |
2858 { | 4662 { |
2859 rtx mod = gen_rtx_MOD (mode, op0, op1); | 4663 rtx mod = simplify_gen_binary (MOD, mode, op0, op1); |
2860 rtx adj = floor_sdiv_adjust (mode, mod, op1); | 4664 rtx adj = floor_sdiv_adjust (mode, mod, op1); |
2861 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | 4665 adj = simplify_gen_unary (NEG, mode, |
2862 return gen_rtx_PLUS (mode, mod, adj); | 4666 simplify_gen_binary (MULT, mode, adj, op1), |
4667 mode); | |
4668 return simplify_gen_binary (PLUS, mode, mod, adj); | |
2863 } | 4669 } |
2864 | 4670 |
2865 case CEIL_DIV_EXPR: | 4671 case CEIL_DIV_EXPR: |
2866 if (unsignedp) | 4672 if (unsignedp) |
2867 { | 4673 { |
2868 rtx div = gen_rtx_UDIV (mode, op0, op1); | 4674 rtx div = simplify_gen_binary (UDIV, mode, op0, op1); |
2869 rtx mod = gen_rtx_UMOD (mode, op0, op1); | 4675 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1); |
2870 rtx adj = ceil_udiv_adjust (mode, mod, op1); | 4676 rtx adj = ceil_udiv_adjust (mode, mod, op1); |
2871 return gen_rtx_PLUS (mode, div, adj); | 4677 return simplify_gen_binary (PLUS, mode, div, adj); |
2872 } | 4678 } |
2873 else | 4679 else |
2874 { | 4680 { |
2875 rtx div = gen_rtx_DIV (mode, op0, op1); | 4681 rtx div = simplify_gen_binary (DIV, mode, op0, op1); |
2876 rtx mod = gen_rtx_MOD (mode, op0, op1); | 4682 rtx mod = simplify_gen_binary (MOD, mode, op0, op1); |
2877 rtx adj = ceil_sdiv_adjust (mode, mod, op1); | 4683 rtx adj = ceil_sdiv_adjust (mode, mod, op1); |
2878 return gen_rtx_PLUS (mode, div, adj); | 4684 return simplify_gen_binary (PLUS, mode, div, adj); |
2879 } | 4685 } |
2880 | 4686 |
2881 case CEIL_MOD_EXPR: | 4687 case CEIL_MOD_EXPR: |
2882 if (unsignedp) | 4688 if (unsignedp) |
2883 { | 4689 { |
2884 rtx mod = gen_rtx_UMOD (mode, op0, op1); | 4690 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1); |
2885 rtx adj = ceil_udiv_adjust (mode, mod, op1); | 4691 rtx adj = ceil_udiv_adjust (mode, mod, op1); |
2886 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | 4692 adj = simplify_gen_unary (NEG, mode, |
2887 return gen_rtx_PLUS (mode, mod, adj); | 4693 simplify_gen_binary (MULT, mode, adj, op1), |
4694 mode); | |
4695 return simplify_gen_binary (PLUS, mode, mod, adj); | |
2888 } | 4696 } |
2889 else | 4697 else |
2890 { | 4698 { |
2891 rtx mod = gen_rtx_MOD (mode, op0, op1); | 4699 rtx mod = simplify_gen_binary (MOD, mode, op0, op1); |
2892 rtx adj = ceil_sdiv_adjust (mode, mod, op1); | 4700 rtx adj = ceil_sdiv_adjust (mode, mod, op1); |
2893 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | 4701 adj = simplify_gen_unary (NEG, mode, |
2894 return gen_rtx_PLUS (mode, mod, adj); | 4702 simplify_gen_binary (MULT, mode, adj, op1), |
4703 mode); | |
4704 return simplify_gen_binary (PLUS, mode, mod, adj); | |
2895 } | 4705 } |
2896 | 4706 |
2897 case ROUND_DIV_EXPR: | 4707 case ROUND_DIV_EXPR: |
2898 if (unsignedp) | 4708 if (unsignedp) |
2899 { | 4709 { |
2900 rtx div = gen_rtx_UDIV (mode, op0, op1); | 4710 rtx div = simplify_gen_binary (UDIV, mode, op0, op1); |
2901 rtx mod = gen_rtx_UMOD (mode, op0, op1); | 4711 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1); |
2902 rtx adj = round_udiv_adjust (mode, mod, op1); | 4712 rtx adj = round_udiv_adjust (mode, mod, op1); |
2903 return gen_rtx_PLUS (mode, div, adj); | 4713 return simplify_gen_binary (PLUS, mode, div, adj); |
2904 } | 4714 } |
2905 else | 4715 else |
2906 { | 4716 { |
2907 rtx div = gen_rtx_DIV (mode, op0, op1); | 4717 rtx div = simplify_gen_binary (DIV, mode, op0, op1); |
2908 rtx mod = gen_rtx_MOD (mode, op0, op1); | 4718 rtx mod = simplify_gen_binary (MOD, mode, op0, op1); |
2909 rtx adj = round_sdiv_adjust (mode, mod, op1); | 4719 rtx adj = round_sdiv_adjust (mode, mod, op1); |
2910 return gen_rtx_PLUS (mode, div, adj); | 4720 return simplify_gen_binary (PLUS, mode, div, adj); |
2911 } | 4721 } |
2912 | 4722 |
2913 case ROUND_MOD_EXPR: | 4723 case ROUND_MOD_EXPR: |
2914 if (unsignedp) | 4724 if (unsignedp) |
2915 { | 4725 { |
2916 rtx mod = gen_rtx_UMOD (mode, op0, op1); | 4726 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1); |
2917 rtx adj = round_udiv_adjust (mode, mod, op1); | 4727 rtx adj = round_udiv_adjust (mode, mod, op1); |
2918 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | 4728 adj = simplify_gen_unary (NEG, mode, |
2919 return gen_rtx_PLUS (mode, mod, adj); | 4729 simplify_gen_binary (MULT, mode, adj, op1), |
4730 mode); | |
4731 return simplify_gen_binary (PLUS, mode, mod, adj); | |
2920 } | 4732 } |
2921 else | 4733 else |
2922 { | 4734 { |
2923 rtx mod = gen_rtx_MOD (mode, op0, op1); | 4735 rtx mod = simplify_gen_binary (MOD, mode, op0, op1); |
2924 rtx adj = round_sdiv_adjust (mode, mod, op1); | 4736 rtx adj = round_sdiv_adjust (mode, mod, op1); |
2925 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | 4737 adj = simplify_gen_unary (NEG, mode, |
2926 return gen_rtx_PLUS (mode, mod, adj); | 4738 simplify_gen_binary (MULT, mode, adj, op1), |
4739 mode); | |
4740 return simplify_gen_binary (PLUS, mode, mod, adj); | |
2927 } | 4741 } |
2928 | 4742 |
2929 case LSHIFT_EXPR: | 4743 case LSHIFT_EXPR: |
2930 return gen_rtx_ASHIFT (mode, op0, op1); | 4744 return simplify_gen_binary (ASHIFT, mode, op0, op1); |
2931 | 4745 |
2932 case RSHIFT_EXPR: | 4746 case RSHIFT_EXPR: |
2933 if (unsignedp) | 4747 if (unsignedp) |
2934 return gen_rtx_LSHIFTRT (mode, op0, op1); | 4748 return simplify_gen_binary (LSHIFTRT, mode, op0, op1); |
2935 else | 4749 else |
2936 return gen_rtx_ASHIFTRT (mode, op0, op1); | 4750 return simplify_gen_binary (ASHIFTRT, mode, op0, op1); |
2937 | 4751 |
2938 case LROTATE_EXPR: | 4752 case LROTATE_EXPR: |
2939 return gen_rtx_ROTATE (mode, op0, op1); | 4753 return simplify_gen_binary (ROTATE, mode, op0, op1); |
2940 | 4754 |
2941 case RROTATE_EXPR: | 4755 case RROTATE_EXPR: |
2942 return gen_rtx_ROTATERT (mode, op0, op1); | 4756 return simplify_gen_binary (ROTATERT, mode, op0, op1); |
2943 | 4757 |
2944 case MIN_EXPR: | 4758 case MIN_EXPR: |
2945 if (unsignedp) | 4759 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1); |
2946 return gen_rtx_UMIN (mode, op0, op1); | |
2947 else | |
2948 return gen_rtx_SMIN (mode, op0, op1); | |
2949 | 4760 |
2950 case MAX_EXPR: | 4761 case MAX_EXPR: |
2951 if (unsignedp) | 4762 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1); |
2952 return gen_rtx_UMAX (mode, op0, op1); | |
2953 else | |
2954 return gen_rtx_SMAX (mode, op0, op1); | |
2955 | 4763 |
2956 case BIT_AND_EXPR: | 4764 case BIT_AND_EXPR: |
2957 case TRUTH_AND_EXPR: | 4765 case TRUTH_AND_EXPR: |
2958 return gen_rtx_AND (mode, op0, op1); | 4766 return simplify_gen_binary (AND, mode, op0, op1); |
2959 | 4767 |
2960 case BIT_IOR_EXPR: | 4768 case BIT_IOR_EXPR: |
2961 case TRUTH_OR_EXPR: | 4769 case TRUTH_OR_EXPR: |
2962 return gen_rtx_IOR (mode, op0, op1); | 4770 return simplify_gen_binary (IOR, mode, op0, op1); |
2963 | 4771 |
2964 case BIT_XOR_EXPR: | 4772 case BIT_XOR_EXPR: |
2965 case TRUTH_XOR_EXPR: | 4773 case TRUTH_XOR_EXPR: |
2966 return gen_rtx_XOR (mode, op0, op1); | 4774 return simplify_gen_binary (XOR, mode, op0, op1); |
2967 | 4775 |
2968 case TRUTH_ANDIF_EXPR: | 4776 case TRUTH_ANDIF_EXPR: |
2969 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx); | 4777 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx); |
2970 | 4778 |
2971 case TRUTH_ORIF_EXPR: | 4779 case TRUTH_ORIF_EXPR: |
2972 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1); | 4780 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1); |
2973 | 4781 |
2974 case TRUTH_NOT_EXPR: | 4782 case TRUTH_NOT_EXPR: |
2975 return gen_rtx_EQ (mode, op0, const0_rtx); | 4783 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx); |
2976 | 4784 |
2977 case LT_EXPR: | 4785 case LT_EXPR: |
2978 if (unsignedp) | 4786 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode, |
2979 return gen_rtx_LTU (mode, op0, op1); | 4787 op0, op1); |
2980 else | |
2981 return gen_rtx_LT (mode, op0, op1); | |
2982 | 4788 |
2983 case LE_EXPR: | 4789 case LE_EXPR: |
2984 if (unsignedp) | 4790 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode, |
2985 return gen_rtx_LEU (mode, op0, op1); | 4791 op0, op1); |
2986 else | |
2987 return gen_rtx_LE (mode, op0, op1); | |
2988 | 4792 |
2989 case GT_EXPR: | 4793 case GT_EXPR: |
2990 if (unsignedp) | 4794 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode, |
2991 return gen_rtx_GTU (mode, op0, op1); | 4795 op0, op1); |
2992 else | |
2993 return gen_rtx_GT (mode, op0, op1); | |
2994 | 4796 |
2995 case GE_EXPR: | 4797 case GE_EXPR: |
2996 if (unsignedp) | 4798 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode, |
2997 return gen_rtx_GEU (mode, op0, op1); | 4799 op0, op1); |
2998 else | |
2999 return gen_rtx_GE (mode, op0, op1); | |
3000 | 4800 |
3001 case EQ_EXPR: | 4801 case EQ_EXPR: |
3002 return gen_rtx_EQ (mode, op0, op1); | 4802 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1); |
3003 | 4803 |
3004 case NE_EXPR: | 4804 case NE_EXPR: |
3005 return gen_rtx_NE (mode, op0, op1); | 4805 return simplify_gen_relational (NE, mode, inner_mode, op0, op1); |
3006 | 4806 |
3007 case UNORDERED_EXPR: | 4807 case UNORDERED_EXPR: |
3008 return gen_rtx_UNORDERED (mode, op0, op1); | 4808 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1); |
3009 | 4809 |
3010 case ORDERED_EXPR: | 4810 case ORDERED_EXPR: |
3011 return gen_rtx_ORDERED (mode, op0, op1); | 4811 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1); |
3012 | 4812 |
3013 case UNLT_EXPR: | 4813 case UNLT_EXPR: |
3014 return gen_rtx_UNLT (mode, op0, op1); | 4814 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1); |
3015 | 4815 |
3016 case UNLE_EXPR: | 4816 case UNLE_EXPR: |
3017 return gen_rtx_UNLE (mode, op0, op1); | 4817 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1); |
3018 | 4818 |
3019 case UNGT_EXPR: | 4819 case UNGT_EXPR: |
3020 return gen_rtx_UNGT (mode, op0, op1); | 4820 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1); |
3021 | 4821 |
3022 case UNGE_EXPR: | 4822 case UNGE_EXPR: |
3023 return gen_rtx_UNGE (mode, op0, op1); | 4823 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1); |
3024 | 4824 |
3025 case UNEQ_EXPR: | 4825 case UNEQ_EXPR: |
3026 return gen_rtx_UNEQ (mode, op0, op1); | 4826 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1); |
3027 | 4827 |
3028 case LTGT_EXPR: | 4828 case LTGT_EXPR: |
3029 return gen_rtx_LTGT (mode, op0, op1); | 4829 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1); |
3030 | 4830 |
3031 case COND_EXPR: | 4831 case COND_EXPR: |
3032 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2); | 4832 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2); |
3033 | 4833 |
3034 case COMPLEX_EXPR: | 4834 case COMPLEX_EXPR: |
3040 return gen_rtx_CONCAT (mode, op0, op1); | 4840 return gen_rtx_CONCAT (mode, op0, op1); |
3041 | 4841 |
3042 case CONJ_EXPR: | 4842 case CONJ_EXPR: |
3043 if (GET_CODE (op0) == CONCAT) | 4843 if (GET_CODE (op0) == CONCAT) |
3044 return gen_rtx_CONCAT (mode, XEXP (op0, 0), | 4844 return gen_rtx_CONCAT (mode, XEXP (op0, 0), |
3045 gen_rtx_NEG (GET_MODE_INNER (mode), | 4845 simplify_gen_unary (NEG, GET_MODE_INNER (mode), |
3046 XEXP (op0, 1))); | 4846 XEXP (op0, 1), |
4847 GET_MODE_INNER (mode))); | |
3047 else | 4848 else |
3048 { | 4849 { |
3049 enum machine_mode imode = GET_MODE_INNER (mode); | 4850 scalar_mode imode = GET_MODE_INNER (mode); |
3050 rtx re, im; | 4851 rtx re, im; |
3051 | 4852 |
3052 if (MEM_P (op0)) | 4853 if (MEM_P (op0)) |
3053 { | 4854 { |
3054 re = adjust_address_nv (op0, imode, 0); | 4855 re = adjust_address_nv (op0, imode, 0); |
3055 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode)); | 4856 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode)); |
3056 } | 4857 } |
3057 else | 4858 else |
3058 { | 4859 { |
3059 enum machine_mode ifmode = int_mode_for_mode (mode); | 4860 scalar_int_mode ifmode; |
3060 enum machine_mode ihmode = int_mode_for_mode (imode); | 4861 scalar_int_mode ihmode; |
3061 rtx halfsize; | 4862 rtx halfsize; |
3062 if (ifmode == BLKmode || ihmode == BLKmode) | 4863 if (!int_mode_for_mode (mode).exists (&ifmode) |
4864 || !int_mode_for_mode (imode).exists (&ihmode)) | |
3063 return NULL; | 4865 return NULL; |
3064 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode)); | 4866 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode)); |
3065 re = op0; | 4867 re = op0; |
3066 if (mode != ifmode) | 4868 if (mode != ifmode) |
3067 re = gen_rtx_SUBREG (ifmode, re, 0); | 4869 re = gen_rtx_SUBREG (ifmode, re, 0); |
3084 if (!op0 || !MEM_P (op0)) | 4886 if (!op0 || !MEM_P (op0)) |
3085 { | 4887 { |
3086 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL | 4888 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL |
3087 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL | 4889 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL |
3088 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL) | 4890 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL) |
3089 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))) | 4891 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)) |
4892 || target_for_debug_bind (TREE_OPERAND (exp, 0)))) | |
3090 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0)); | 4893 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0)); |
3091 | 4894 |
3092 if (handled_component_p (TREE_OPERAND (exp, 0))) | 4895 if (handled_component_p (TREE_OPERAND (exp, 0))) |
3093 { | 4896 { |
3094 HOST_WIDE_INT bitoffset, bitsize, maxsize; | 4897 HOST_WIDE_INT bitoffset, bitsize, maxsize; |
4898 bool reverse; | |
3095 tree decl | 4899 tree decl |
3096 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), | 4900 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset, |
3097 &bitoffset, &bitsize, &maxsize); | 4901 &bitsize, &maxsize, &reverse); |
3098 if ((TREE_CODE (decl) == VAR_DECL | 4902 if ((VAR_P (decl) |
3099 || TREE_CODE (decl) == PARM_DECL | 4903 || TREE_CODE (decl) == PARM_DECL |
3100 || TREE_CODE (decl) == RESULT_DECL) | 4904 || TREE_CODE (decl) == RESULT_DECL) |
3101 && !TREE_ADDRESSABLE (decl) | 4905 && (!TREE_ADDRESSABLE (decl) |
4906 || target_for_debug_bind (decl)) | |
3102 && (bitoffset % BITS_PER_UNIT) == 0 | 4907 && (bitoffset % BITS_PER_UNIT) == 0 |
3103 && bitsize > 0 | 4908 && bitsize > 0 |
3104 && bitsize == maxsize) | 4909 && bitsize == maxsize) |
3105 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl), | 4910 { |
3106 bitoffset / BITS_PER_UNIT); | 4911 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl); |
4912 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT); | |
4913 } | |
3107 } | 4914 } |
3108 | 4915 |
4916 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF | |
4917 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) | |
4918 == ADDR_EXPR) | |
4919 { | |
4920 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), | |
4921 0)); | |
4922 if (op0 != NULL | |
4923 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR | |
4924 || (GET_CODE (op0) == PLUS | |
4925 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR | |
4926 && CONST_INT_P (XEXP (op0, 1))))) | |
4927 { | |
4928 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), | |
4929 1)); | |
4930 if (!op1 || !CONST_INT_P (op1)) | |
4931 return NULL; | |
4932 | |
4933 return plus_constant (mode, op0, INTVAL (op1)); | |
4934 } | |
4935 } | |
4936 | |
3109 return NULL; | 4937 return NULL; |
3110 } | 4938 } |
3111 | 4939 |
3112 as = TYPE_ADDR_SPACE (TREE_TYPE (exp)); | 4940 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); |
3113 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as); | 4941 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)); |
4942 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as); | |
3114 | 4943 |
3115 return op0; | 4944 return op0; |
3116 | 4945 |
3117 case VECTOR_CST: | 4946 case VECTOR_CST: |
3118 exp = build_constructor_from_list (TREE_TYPE (exp), | 4947 { |
3119 TREE_VECTOR_CST_ELTS (exp)); | 4948 unsigned i, nelts; |
3120 /* Fall through. */ | 4949 |
4950 nelts = VECTOR_CST_NELTS (exp); | |
4951 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts)); | |
4952 | |
4953 for (i = 0; i < nelts; ++i) | |
4954 { | |
4955 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i)); | |
4956 if (!op1) | |
4957 return NULL; | |
4958 XVECEXP (op0, 0, i) = op1; | |
4959 } | |
4960 | |
4961 return op0; | |
4962 } | |
3121 | 4963 |
3122 case CONSTRUCTOR: | 4964 case CONSTRUCTOR: |
3123 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) | 4965 if (TREE_CLOBBER_P (exp)) |
4966 return NULL; | |
4967 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) | |
3124 { | 4968 { |
3125 unsigned i; | 4969 unsigned i; |
3126 tree val; | 4970 tree val; |
3127 | 4971 |
3128 op0 = gen_rtx_CONCATN | 4972 op0 = gen_rtx_CONCATN |
3157 /* ??? Maybe handle some builtins? */ | 5001 /* ??? Maybe handle some builtins? */ |
3158 return NULL; | 5002 return NULL; |
3159 | 5003 |
3160 case SSA_NAME: | 5004 case SSA_NAME: |
3161 { | 5005 { |
3162 gimple g = get_gimple_for_ssa_name (exp); | 5006 gimple *g = get_gimple_for_ssa_name (exp); |
3163 if (g) | 5007 if (g) |
3164 { | 5008 { |
3165 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g)); | 5009 tree t = NULL_TREE; |
5010 if (deep_ter_debug_map) | |
5011 { | |
5012 tree *slot = deep_ter_debug_map->get (exp); | |
5013 if (slot) | |
5014 t = *slot; | |
5015 } | |
5016 if (t == NULL_TREE) | |
5017 t = gimple_assign_rhs_to_tree (g); | |
5018 op0 = expand_debug_expr (t); | |
3166 if (!op0) | 5019 if (!op0) |
3167 return NULL; | 5020 return NULL; |
3168 } | 5021 } |
3169 else | 5022 else |
3170 { | 5023 { |
5024 /* If this is a reference to an incoming value of | |
5025 parameter that is never used in the code or where the | |
5026 incoming value is never used in the code, use | |
5027 PARM_DECL's DECL_RTL if set. */ | |
5028 if (SSA_NAME_IS_DEFAULT_DEF (exp) | |
5029 && SSA_NAME_VAR (exp) | |
5030 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL | |
5031 && has_zero_uses (exp)) | |
5032 { | |
5033 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp)); | |
5034 if (op0) | |
5035 goto adjust_mode; | |
5036 op0 = expand_debug_expr (SSA_NAME_VAR (exp)); | |
5037 if (op0) | |
5038 goto adjust_mode; | |
5039 } | |
5040 | |
3171 int part = var_to_partition (SA.map, exp); | 5041 int part = var_to_partition (SA.map, exp); |
3172 | 5042 |
3173 if (part == NO_PARTITION) | 5043 if (part == NO_PARTITION) |
3174 return NULL; | 5044 return NULL; |
3175 | 5045 |
3187 case REALIGN_LOAD_EXPR: | 5057 case REALIGN_LOAD_EXPR: |
3188 case REDUC_MAX_EXPR: | 5058 case REDUC_MAX_EXPR: |
3189 case REDUC_MIN_EXPR: | 5059 case REDUC_MIN_EXPR: |
3190 case REDUC_PLUS_EXPR: | 5060 case REDUC_PLUS_EXPR: |
3191 case VEC_COND_EXPR: | 5061 case VEC_COND_EXPR: |
3192 case VEC_EXTRACT_EVEN_EXPR: | |
3193 case VEC_EXTRACT_ODD_EXPR: | |
3194 case VEC_INTERLEAVE_HIGH_EXPR: | |
3195 case VEC_INTERLEAVE_LOW_EXPR: | |
3196 case VEC_LSHIFT_EXPR: | |
3197 case VEC_PACK_FIX_TRUNC_EXPR: | 5062 case VEC_PACK_FIX_TRUNC_EXPR: |
3198 case VEC_PACK_SAT_EXPR: | 5063 case VEC_PACK_SAT_EXPR: |
3199 case VEC_PACK_TRUNC_EXPR: | 5064 case VEC_PACK_TRUNC_EXPR: |
3200 case VEC_RSHIFT_EXPR: | |
3201 case VEC_UNPACK_FLOAT_HI_EXPR: | 5065 case VEC_UNPACK_FLOAT_HI_EXPR: |
3202 case VEC_UNPACK_FLOAT_LO_EXPR: | 5066 case VEC_UNPACK_FLOAT_LO_EXPR: |
3203 case VEC_UNPACK_HI_EXPR: | 5067 case VEC_UNPACK_HI_EXPR: |
3204 case VEC_UNPACK_LO_EXPR: | 5068 case VEC_UNPACK_LO_EXPR: |
3205 case VEC_WIDEN_MULT_HI_EXPR: | 5069 case VEC_WIDEN_MULT_HI_EXPR: |
3206 case VEC_WIDEN_MULT_LO_EXPR: | 5070 case VEC_WIDEN_MULT_LO_EXPR: |
5071 case VEC_WIDEN_MULT_EVEN_EXPR: | |
5072 case VEC_WIDEN_MULT_ODD_EXPR: | |
5073 case VEC_WIDEN_LSHIFT_HI_EXPR: | |
5074 case VEC_WIDEN_LSHIFT_LO_EXPR: | |
5075 case VEC_PERM_EXPR: | |
3207 return NULL; | 5076 return NULL; |
3208 | 5077 |
3209 /* Misc codes. */ | 5078 /* Misc codes. */ |
3210 case ADDR_SPACE_CONVERT_EXPR: | 5079 case ADDR_SPACE_CONVERT_EXPR: |
3211 case FIXED_CONVERT_EXPR: | 5080 case FIXED_CONVERT_EXPR: |
3212 case OBJ_TYPE_REF: | 5081 case OBJ_TYPE_REF: |
3213 case WITH_SIZE_EXPR: | 5082 case WITH_SIZE_EXPR: |
5083 case BIT_INSERT_EXPR: | |
3214 return NULL; | 5084 return NULL; |
3215 | 5085 |
3216 case DOT_PROD_EXPR: | 5086 case DOT_PROD_EXPR: |
3217 if (SCALAR_INT_MODE_P (GET_MODE (op0)) | 5087 if (SCALAR_INT_MODE_P (GET_MODE (op0)) |
3218 && SCALAR_INT_MODE_P (mode)) | 5088 && SCALAR_INT_MODE_P (mode)) |
3219 { | 5089 { |
3220 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | 5090 op0 |
3221 op0 = gen_rtx_ZERO_EXTEND (mode, op0); | 5091 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, |
3222 else | 5092 0))) |
3223 op0 = gen_rtx_SIGN_EXTEND (mode, op0); | 5093 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0, |
3224 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))) | 5094 inner_mode); |
3225 op1 = gen_rtx_ZERO_EXTEND (mode, op1); | 5095 op1 |
3226 else | 5096 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, |
3227 op1 = gen_rtx_SIGN_EXTEND (mode, op1); | 5097 1))) |
3228 op0 = gen_rtx_MULT (mode, op0, op1); | 5098 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1, |
3229 return gen_rtx_PLUS (mode, op0, op2); | 5099 inner_mode); |
5100 op0 = simplify_gen_binary (MULT, mode, op0, op1); | |
5101 return simplify_gen_binary (PLUS, mode, op0, op2); | |
3230 } | 5102 } |
3231 return NULL; | 5103 return NULL; |
3232 | 5104 |
3233 case WIDEN_MULT_EXPR: | 5105 case WIDEN_MULT_EXPR: |
3234 case WIDEN_MULT_PLUS_EXPR: | 5106 case WIDEN_MULT_PLUS_EXPR: |
3235 case WIDEN_MULT_MINUS_EXPR: | 5107 case WIDEN_MULT_MINUS_EXPR: |
3236 if (SCALAR_INT_MODE_P (GET_MODE (op0)) | 5108 if (SCALAR_INT_MODE_P (GET_MODE (op0)) |
3237 && SCALAR_INT_MODE_P (mode)) | 5109 && SCALAR_INT_MODE_P (mode)) |
3238 { | 5110 { |
3239 enum machine_mode inner_mode = GET_MODE (op0); | 5111 inner_mode = GET_MODE (op0); |
3240 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | 5112 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
3241 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode); | 5113 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode); |
3242 else | 5114 else |
3243 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode); | 5115 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode); |
3244 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))) | 5116 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))) |
3245 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode); | 5117 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode); |
3246 else | 5118 else |
3247 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode); | 5119 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode); |
3248 op0 = gen_rtx_MULT (mode, op0, op1); | 5120 op0 = simplify_gen_binary (MULT, mode, op0, op1); |
3249 if (TREE_CODE (exp) == WIDEN_MULT_EXPR) | 5121 if (TREE_CODE (exp) == WIDEN_MULT_EXPR) |
3250 return op0; | 5122 return op0; |
3251 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR) | 5123 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR) |
3252 return gen_rtx_PLUS (mode, op0, op2); | 5124 return simplify_gen_binary (PLUS, mode, op0, op2); |
3253 else | 5125 else |
3254 return gen_rtx_MINUS (mode, op2, op0); | 5126 return simplify_gen_binary (MINUS, mode, op2, op0); |
3255 } | 5127 } |
3256 return NULL; | 5128 return NULL; |
3257 | 5129 |
5130 case MULT_HIGHPART_EXPR: | |
5131 /* ??? Similar to the above. */ | |
5132 return NULL; | |
5133 | |
3258 case WIDEN_SUM_EXPR: | 5134 case WIDEN_SUM_EXPR: |
5135 case WIDEN_LSHIFT_EXPR: | |
3259 if (SCALAR_INT_MODE_P (GET_MODE (op0)) | 5136 if (SCALAR_INT_MODE_P (GET_MODE (op0)) |
3260 && SCALAR_INT_MODE_P (mode)) | 5137 && SCALAR_INT_MODE_P (mode)) |
3261 { | 5138 { |
3262 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | 5139 op0 |
3263 op0 = gen_rtx_ZERO_EXTEND (mode, op0); | 5140 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, |
3264 else | 5141 0))) |
3265 op0 = gen_rtx_SIGN_EXTEND (mode, op0); | 5142 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0, |
3266 return gen_rtx_PLUS (mode, op0, op1); | 5143 inner_mode); |
5144 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR | |
5145 ? ASHIFT : PLUS, mode, op0, op1); | |
3267 } | 5146 } |
3268 return NULL; | 5147 return NULL; |
3269 | 5148 |
3270 case FMA_EXPR: | 5149 case FMA_EXPR: |
3271 return gen_rtx_FMA (mode, op0, op1, op2); | 5150 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2); |
3272 | 5151 |
3273 default: | 5152 default: |
3274 flag_unsupported: | 5153 flag_unsupported: |
3275 #ifdef ENABLE_CHECKING | 5154 if (flag_checking) |
3276 debug_tree (exp); | 5155 { |
3277 gcc_unreachable (); | 5156 debug_tree (exp); |
3278 #else | 5157 gcc_unreachable (); |
5158 } | |
3279 return NULL; | 5159 return NULL; |
3280 #endif | 5160 } |
3281 } | 5161 } |
5162 | |
5163 /* Return an RTX equivalent to the source bind value of the tree expression | |
5164 EXP. */ | |
5165 | |
5166 static rtx | |
5167 expand_debug_source_expr (tree exp) | |
5168 { | |
5169 rtx op0 = NULL_RTX; | |
5170 machine_mode mode = VOIDmode, inner_mode; | |
5171 | |
5172 switch (TREE_CODE (exp)) | |
5173 { | |
5174 case PARM_DECL: | |
5175 { | |
5176 mode = DECL_MODE (exp); | |
5177 op0 = expand_debug_parm_decl (exp); | |
5178 if (op0) | |
5179 break; | |
5180 /* See if this isn't an argument that has been completely | |
5181 optimized out. */ | |
5182 if (!DECL_RTL_SET_P (exp) | |
5183 && !DECL_INCOMING_RTL (exp) | |
5184 && DECL_ABSTRACT_ORIGIN (current_function_decl)) | |
5185 { | |
5186 tree aexp = DECL_ORIGIN (exp); | |
5187 if (DECL_CONTEXT (aexp) | |
5188 == DECL_ABSTRACT_ORIGIN (current_function_decl)) | |
5189 { | |
5190 vec<tree, va_gc> **debug_args; | |
5191 unsigned int ix; | |
5192 tree ddecl; | |
5193 debug_args = decl_debug_args_lookup (current_function_decl); | |
5194 if (debug_args != NULL) | |
5195 { | |
5196 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); | |
5197 ix += 2) | |
5198 if (ddecl == aexp) | |
5199 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp); | |
5200 } | |
5201 } | |
5202 } | |
5203 break; | |
5204 } | |
5205 default: | |
5206 break; | |
5207 } | |
5208 | |
5209 if (op0 == NULL_RTX) | |
5210 return NULL_RTX; | |
5211 | |
5212 inner_mode = GET_MODE (op0); | |
5213 if (mode == inner_mode) | |
5214 return op0; | |
5215 | |
5216 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode)) | |
5217 { | |
5218 if (GET_MODE_UNIT_BITSIZE (mode) | |
5219 == GET_MODE_UNIT_BITSIZE (inner_mode)) | |
5220 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0); | |
5221 else if (GET_MODE_UNIT_BITSIZE (mode) | |
5222 < GET_MODE_UNIT_BITSIZE (inner_mode)) | |
5223 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode); | |
5224 else | |
5225 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode); | |
5226 } | |
5227 else if (FLOAT_MODE_P (mode)) | |
5228 gcc_unreachable (); | |
5229 else if (FLOAT_MODE_P (inner_mode)) | |
5230 { | |
5231 if (TYPE_UNSIGNED (TREE_TYPE (exp))) | |
5232 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode); | |
5233 else | |
5234 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode); | |
5235 } | |
5236 else if (GET_MODE_UNIT_PRECISION (mode) | |
5237 == GET_MODE_UNIT_PRECISION (inner_mode)) | |
5238 op0 = lowpart_subreg (mode, op0, inner_mode); | |
5239 else if (GET_MODE_UNIT_PRECISION (mode) | |
5240 < GET_MODE_UNIT_PRECISION (inner_mode)) | |
5241 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode); | |
5242 else if (TYPE_UNSIGNED (TREE_TYPE (exp))) | |
5243 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode); | |
5244 else | |
5245 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode); | |
5246 | |
5247 return op0; | |
5248 } | |
5249 | |
5250 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity. | |
5251 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything | |
5252 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */ | |
5253 | |
5254 static void | |
5255 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth) | |
5256 { | |
5257 rtx exp = *exp_p; | |
5258 | |
5259 if (exp == NULL_RTX) | |
5260 return; | |
5261 | |
5262 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER) | |
5263 return; | |
5264 | |
5265 if (depth == 4) | |
5266 { | |
5267 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */ | |
5268 rtx dval = make_debug_expr_from_rtl (exp); | |
5269 | |
5270 /* Emit a debug bind insn before INSN. */ | |
5271 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp), | |
5272 DEBUG_EXPR_TREE_DECL (dval), exp, | |
5273 VAR_INIT_STATUS_INITIALIZED); | |
5274 | |
5275 emit_debug_insn_before (bind, insn); | |
5276 *exp_p = dval; | |
5277 return; | |
5278 } | |
5279 | |
5280 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp)); | |
5281 int i, j; | |
5282 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++) | |
5283 switch (*format_ptr++) | |
5284 { | |
5285 case 'e': | |
5286 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1); | |
5287 break; | |
5288 | |
5289 case 'E': | |
5290 case 'V': | |
5291 for (j = 0; j < XVECLEN (exp, i); j++) | |
5292 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1); | |
5293 break; | |
5294 | |
5295 default: | |
5296 break; | |
5297 } | |
3282 } | 5298 } |
3283 | 5299 |
3284 /* Expand the _LOCs in debug insns. We run this after expanding all | 5300 /* Expand the _LOCs in debug insns. We run this after expanding all |
3285 regular insns, so that any variables referenced in the function | 5301 regular insns, so that any variables referenced in the function |
3286 will have their DECL_RTLs set. */ | 5302 will have their DECL_RTLs set. */ |
3287 | 5303 |
3288 static void | 5304 static void |
3289 expand_debug_locations (void) | 5305 expand_debug_locations (void) |
3290 { | 5306 { |
3291 rtx insn; | 5307 rtx_insn *insn; |
3292 rtx last = get_last_insn (); | 5308 rtx_insn *last = get_last_insn (); |
3293 int save_strict_alias = flag_strict_aliasing; | 5309 int save_strict_alias = flag_strict_aliasing; |
3294 | 5310 |
3295 /* New alias sets while setting up memory attributes cause | 5311 /* New alias sets while setting up memory attributes cause |
3296 -fcompare-debug failures, even though it doesn't bring about any | 5312 -fcompare-debug failures, even though it doesn't bring about any |
3297 codegen changes. */ | 5313 codegen changes. */ |
3300 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | 5316 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
3301 if (DEBUG_INSN_P (insn)) | 5317 if (DEBUG_INSN_P (insn)) |
3302 { | 5318 { |
3303 tree value = (tree)INSN_VAR_LOCATION_LOC (insn); | 5319 tree value = (tree)INSN_VAR_LOCATION_LOC (insn); |
3304 rtx val; | 5320 rtx val; |
3305 enum machine_mode mode; | 5321 rtx_insn *prev_insn, *insn2; |
5322 machine_mode mode; | |
3306 | 5323 |
3307 if (value == NULL_TREE) | 5324 if (value == NULL_TREE) |
3308 val = NULL_RTX; | 5325 val = NULL_RTX; |
3309 else | 5326 else |
3310 { | 5327 { |
3311 val = expand_debug_expr (value); | 5328 if (INSN_VAR_LOCATION_STATUS (insn) |
5329 == VAR_INIT_STATUS_UNINITIALIZED) | |
5330 val = expand_debug_source_expr (value); | |
5331 /* The avoid_deep_ter_for_debug function inserts | |
5332 debug bind stmts after SSA_NAME definition, with the | |
5333 SSA_NAME as the whole bind location. Disable temporarily | |
5334 expansion of that SSA_NAME into the DEBUG_EXPR_DECL | |
5335 being defined in this DEBUG_INSN. */ | |
5336 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME) | |
5337 { | |
5338 tree *slot = deep_ter_debug_map->get (value); | |
5339 if (slot) | |
5340 { | |
5341 if (*slot == INSN_VAR_LOCATION_DECL (insn)) | |
5342 *slot = NULL_TREE; | |
5343 else | |
5344 slot = NULL; | |
5345 } | |
5346 val = expand_debug_expr (value); | |
5347 if (slot) | |
5348 *slot = INSN_VAR_LOCATION_DECL (insn); | |
5349 } | |
5350 else | |
5351 val = expand_debug_expr (value); | |
3312 gcc_assert (last == get_last_insn ()); | 5352 gcc_assert (last == get_last_insn ()); |
3313 } | 5353 } |
3314 | 5354 |
3315 if (!val) | 5355 if (!val) |
3316 val = gen_rtx_UNKNOWN_VAR_LOC (); | 5356 val = gen_rtx_UNKNOWN_VAR_LOC (); |
3318 { | 5358 { |
3319 mode = GET_MODE (INSN_VAR_LOCATION (insn)); | 5359 mode = GET_MODE (INSN_VAR_LOCATION (insn)); |
3320 | 5360 |
3321 gcc_assert (mode == GET_MODE (val) | 5361 gcc_assert (mode == GET_MODE (val) |
3322 || (GET_MODE (val) == VOIDmode | 5362 || (GET_MODE (val) == VOIDmode |
3323 && (CONST_INT_P (val) | 5363 && (CONST_SCALAR_INT_P (val) |
3324 || GET_CODE (val) == CONST_FIXED | 5364 || GET_CODE (val) == CONST_FIXED |
3325 || GET_CODE (val) == CONST_DOUBLE | |
3326 || GET_CODE (val) == LABEL_REF))); | 5365 || GET_CODE (val) == LABEL_REF))); |
3327 } | 5366 } |
3328 | 5367 |
3329 INSN_VAR_LOCATION_LOC (insn) = val; | 5368 INSN_VAR_LOCATION_LOC (insn) = val; |
5369 prev_insn = PREV_INSN (insn); | |
5370 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2)) | |
5371 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0); | |
3330 } | 5372 } |
3331 | 5373 |
3332 flag_strict_aliasing = save_strict_alias; | 5374 flag_strict_aliasing = save_strict_alias; |
3333 } | 5375 } |
3334 | 5376 |
3335 /* Expand basic block BB from GIMPLE trees to RTL. */ | 5377 /* Performs swapping operands of commutative operations to expand |
3336 | 5378 the expensive one first. */ |
3337 static basic_block | 5379 |
3338 expand_gimple_basic_block (basic_block bb) | 5380 static void |
3339 { | 5381 reorder_operands (basic_block bb) |
5382 { | |
5383 unsigned int *lattice; /* Hold cost of each statement. */ | |
5384 unsigned int i = 0, n = 0; | |
3340 gimple_stmt_iterator gsi; | 5385 gimple_stmt_iterator gsi; |
3341 gimple_seq stmts; | 5386 gimple_seq stmts; |
3342 gimple stmt = NULL; | 5387 gimple *stmt; |
3343 rtx note, last; | 5388 bool swap; |
5389 tree op0, op1; | |
5390 ssa_op_iter iter; | |
5391 use_operand_p use_p; | |
5392 gimple *def0, *def1; | |
5393 | |
5394 /* Compute cost of each statement using estimate_num_insns. */ | |
5395 stmts = bb_seq (bb); | |
5396 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi)) | |
5397 { | |
5398 stmt = gsi_stmt (gsi); | |
5399 if (!is_gimple_debug (stmt)) | |
5400 gimple_set_uid (stmt, n++); | |
5401 } | |
5402 lattice = XNEWVEC (unsigned int, n); | |
5403 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi)) | |
5404 { | |
5405 unsigned cost; | |
5406 stmt = gsi_stmt (gsi); | |
5407 if (is_gimple_debug (stmt)) | |
5408 continue; | |
5409 cost = estimate_num_insns (stmt, &eni_size_weights); | |
5410 lattice[i] = cost; | |
5411 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) | |
5412 { | |
5413 tree use = USE_FROM_PTR (use_p); | |
5414 gimple *def_stmt; | |
5415 if (TREE_CODE (use) != SSA_NAME) | |
5416 continue; | |
5417 def_stmt = get_gimple_for_ssa_name (use); | |
5418 if (!def_stmt) | |
5419 continue; | |
5420 lattice[i] += lattice[gimple_uid (def_stmt)]; | |
5421 } | |
5422 i++; | |
5423 if (!is_gimple_assign (stmt) | |
5424 || !commutative_tree_code (gimple_assign_rhs_code (stmt))) | |
5425 continue; | |
5426 op0 = gimple_op (stmt, 1); | |
5427 op1 = gimple_op (stmt, 2); | |
5428 if (TREE_CODE (op0) != SSA_NAME | |
5429 || TREE_CODE (op1) != SSA_NAME) | |
5430 continue; | |
5431 /* Swap operands if the second one is more expensive. */ | |
5432 def0 = get_gimple_for_ssa_name (op0); | |
5433 def1 = get_gimple_for_ssa_name (op1); | |
5434 if (!def1) | |
5435 continue; | |
5436 swap = false; | |
5437 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)]) | |
5438 swap = true; | |
5439 if (swap) | |
5440 { | |
5441 if (dump_file && (dump_flags & TDF_DETAILS)) | |
5442 { | |
5443 fprintf (dump_file, "Swap operands in stmt:\n"); | |
5444 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
5445 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n", | |
5446 def0 ? lattice[gimple_uid (def0)] : 0, | |
5447 lattice[gimple_uid (def1)]); | |
5448 } | |
5449 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt), | |
5450 gimple_assign_rhs2_ptr (stmt)); | |
5451 } | |
5452 } | |
5453 XDELETE (lattice); | |
5454 } | |
5455 | |
5456 /* Expand basic block BB from GIMPLE trees to RTL. */ | |
5457 | |
5458 static basic_block | |
5459 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls) | |
5460 { | |
5461 gimple_stmt_iterator gsi; | |
5462 gimple_seq stmts; | |
5463 gimple *stmt = NULL; | |
5464 rtx_note *note; | |
5465 rtx_insn *last; | |
3344 edge e; | 5466 edge e; |
3345 edge_iterator ei; | 5467 edge_iterator ei; |
3346 void **elt; | |
3347 | 5468 |
3348 if (dump_file) | 5469 if (dump_file) |
3349 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", | 5470 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", |
3350 bb->index); | 5471 bb->index); |
3351 | 5472 |
3352 /* Note that since we are now transitioning from GIMPLE to RTL, we | 5473 /* Note that since we are now transitioning from GIMPLE to RTL, we |
3353 cannot use the gsi_*_bb() routines because they expect the basic | 5474 cannot use the gsi_*_bb() routines because they expect the basic |
3354 block to be in GIMPLE, instead of RTL. Therefore, we need to | 5475 block to be in GIMPLE, instead of RTL. Therefore, we need to |
3355 access the BB sequence directly. */ | 5476 access the BB sequence directly. */ |
5477 if (optimize) | |
5478 reorder_operands (bb); | |
3356 stmts = bb_seq (bb); | 5479 stmts = bb_seq (bb); |
3357 bb->il.gimple = NULL; | 5480 bb->il.gimple.seq = NULL; |
5481 bb->il.gimple.phi_nodes = NULL; | |
3358 rtl_profile_for_bb (bb); | 5482 rtl_profile_for_bb (bb); |
3359 init_rtl_bb_info (bb); | 5483 init_rtl_bb_info (bb); |
3360 bb->flags |= BB_RTL; | 5484 bb->flags |= BB_RTL; |
3361 | 5485 |
3362 /* Remove the RETURN_EXPR if we may fall though to the exit | 5486 /* Remove the RETURN_EXPR if we may fall though to the exit |
3363 instead. */ | 5487 instead. */ |
3364 gsi = gsi_last (stmts); | 5488 gsi = gsi_last (stmts); |
3365 if (!gsi_end_p (gsi) | 5489 if (!gsi_end_p (gsi) |
3366 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) | 5490 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) |
3367 { | 5491 { |
3368 gimple ret_stmt = gsi_stmt (gsi); | 5492 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi)); |
3369 | 5493 |
3370 gcc_assert (single_succ_p (bb)); | 5494 gcc_assert (single_succ_p (bb)); |
3371 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR); | 5495 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)); |
3372 | 5496 |
3373 if (bb->next_bb == EXIT_BLOCK_PTR | 5497 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun) |
3374 && !gimple_return_retval (ret_stmt)) | 5498 && !gimple_return_retval (ret_stmt)) |
3375 { | 5499 { |
3376 gsi_remove (&gsi, false); | 5500 gsi_remove (&gsi, false); |
3377 single_succ_edge (bb)->flags |= EDGE_FALLTHRU; | 5501 single_succ_edge (bb)->flags |= EDGE_FALLTHRU; |
3378 } | 5502 } |
3384 stmt = gsi_stmt (gsi); | 5508 stmt = gsi_stmt (gsi); |
3385 if (gimple_code (stmt) != GIMPLE_LABEL) | 5509 if (gimple_code (stmt) != GIMPLE_LABEL) |
3386 stmt = NULL; | 5510 stmt = NULL; |
3387 } | 5511 } |
3388 | 5512 |
3389 elt = pointer_map_contains (lab_rtx_for_bb, bb); | 5513 rtx_code_label **elt = lab_rtx_for_bb->get (bb); |
3390 | 5514 |
3391 if (stmt || elt) | 5515 if (stmt || elt) |
3392 { | 5516 { |
3393 last = get_last_insn (); | 5517 last = get_last_insn (); |
3394 | 5518 |
3397 expand_gimple_stmt (stmt); | 5521 expand_gimple_stmt (stmt); |
3398 gsi_next (&gsi); | 5522 gsi_next (&gsi); |
3399 } | 5523 } |
3400 | 5524 |
3401 if (elt) | 5525 if (elt) |
3402 emit_label ((rtx) *elt); | 5526 emit_label (*elt); |
3403 | 5527 |
3404 /* Java emits line number notes in the top of labels. | |
3405 ??? Make this go away once line number notes are obsoleted. */ | |
3406 BB_HEAD (bb) = NEXT_INSN (last); | 5528 BB_HEAD (bb) = NEXT_INSN (last); |
3407 if (NOTE_P (BB_HEAD (bb))) | 5529 if (NOTE_P (BB_HEAD (bb))) |
3408 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); | 5530 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); |
3409 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); | 5531 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); |
3410 | 5532 |
3411 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 5533 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
3412 } | 5534 } |
3413 else | 5535 else |
3414 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK); | 5536 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK); |
3415 | 5537 |
3416 NOTE_BASIC_BLOCK (note) = bb; | 5538 NOTE_BASIC_BLOCK (note) = bb; |
3417 | 5539 |
3418 for (; !gsi_end_p (gsi); gsi_next (&gsi)) | 5540 for (; !gsi_end_p (gsi); gsi_next (&gsi)) |
3419 { | 5541 { |
3449 && SA.values | 5571 && SA.values |
3450 && !is_gimple_debug (stmt)) | 5572 && !is_gimple_debug (stmt)) |
3451 { | 5573 { |
3452 ssa_op_iter iter; | 5574 ssa_op_iter iter; |
3453 tree op; | 5575 tree op; |
3454 gimple def; | 5576 gimple *def; |
3455 | 5577 |
3456 location_t sloc = get_curr_insn_source_location (); | 5578 location_t sloc = curr_insn_location (); |
3457 tree sblock = get_curr_insn_block (); | |
3458 | 5579 |
3459 /* Look for SSA names that have their last use here (TERed | 5580 /* Look for SSA names that have their last use here (TERed |
3460 names always have only one real use). */ | 5581 names always have only one real use). */ |
3461 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) | 5582 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) |
3462 if ((def = get_gimple_for_ssa_name (op))) | 5583 if ((def = get_gimple_for_ssa_name (op))) |
3474 } | 5595 } |
3475 } | 5596 } |
3476 | 5597 |
3477 if (have_debug_uses) | 5598 if (have_debug_uses) |
3478 { | 5599 { |
3479 /* OP is a TERed SSA name, with DEF it's defining | 5600 /* OP is a TERed SSA name, with DEF its defining |
3480 statement, and where OP is used in further debug | 5601 statement, and where OP is used in further debug |
3481 instructions. Generate a debug temporary, and | 5602 instructions. Generate a debug temporary, and |
3482 replace all uses of OP in debug insns with that | 5603 replace all uses of OP in debug insns with that |
3483 temporary. */ | 5604 temporary. */ |
3484 gimple debugstmt; | 5605 gimple *debugstmt; |
3485 tree value = gimple_assign_rhs_to_tree (def); | 5606 tree value = gimple_assign_rhs_to_tree (def); |
3486 tree vexpr = make_node (DEBUG_EXPR_DECL); | 5607 tree vexpr = make_node (DEBUG_EXPR_DECL); |
3487 rtx val; | 5608 rtx val; |
3488 enum machine_mode mode; | 5609 machine_mode mode; |
3489 | 5610 |
3490 set_curr_insn_source_location (gimple_location (def)); | 5611 set_curr_insn_location (gimple_location (def)); |
3491 set_curr_insn_block (gimple_block (def)); | |
3492 | 5612 |
3493 DECL_ARTIFICIAL (vexpr) = 1; | 5613 DECL_ARTIFICIAL (vexpr) = 1; |
3494 TREE_TYPE (vexpr) = TREE_TYPE (value); | 5614 TREE_TYPE (vexpr) = TREE_TYPE (value); |
3495 if (DECL_P (value)) | 5615 if (DECL_P (value)) |
3496 mode = DECL_MODE (value); | 5616 mode = DECL_MODE (value); |
3497 else | 5617 else |
3498 mode = TYPE_MODE (TREE_TYPE (value)); | 5618 mode = TYPE_MODE (TREE_TYPE (value)); |
3499 DECL_MODE (vexpr) = mode; | 5619 SET_DECL_MODE (vexpr, mode); |
3500 | 5620 |
3501 val = gen_rtx_VAR_LOCATION | 5621 val = gen_rtx_VAR_LOCATION |
3502 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED); | 5622 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED); |
3503 | 5623 |
3504 val = emit_debug_insn (val); | 5624 emit_debug_insn (val); |
3505 | 5625 |
3506 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op) | 5626 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op) |
3507 { | 5627 { |
3508 if (!gimple_debug_bind_p (debugstmt)) | 5628 if (!gimple_debug_bind_p (debugstmt)) |
3509 continue; | 5629 continue; |
3513 | 5633 |
3514 update_stmt (debugstmt); | 5634 update_stmt (debugstmt); |
3515 } | 5635 } |
3516 } | 5636 } |
3517 } | 5637 } |
3518 set_curr_insn_source_location (sloc); | 5638 set_curr_insn_location (sloc); |
3519 set_curr_insn_block (sblock); | |
3520 } | 5639 } |
3521 | 5640 |
3522 currently_expanding_gimple_stmt = stmt; | 5641 currently_expanding_gimple_stmt = stmt; |
3523 | 5642 |
3524 /* Expand this statement, then evaluate the resulting RTL and | 5643 /* Expand this statement, then evaluate the resulting RTL and |
3525 fixup the CFG accordingly. */ | 5644 fixup the CFG accordingly. */ |
3526 if (gimple_code (stmt) == GIMPLE_COND) | 5645 if (gimple_code (stmt) == GIMPLE_COND) |
3527 { | 5646 { |
3528 new_bb = expand_gimple_cond (bb, stmt); | 5647 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt)); |
3529 if (new_bb) | 5648 if (new_bb) |
3530 return new_bb; | 5649 return new_bb; |
3531 } | 5650 } |
3532 else if (gimple_debug_bind_p (stmt)) | 5651 else if (gimple_debug_bind_p (stmt)) |
3533 { | 5652 { |
3534 location_t sloc = get_curr_insn_source_location (); | 5653 location_t sloc = curr_insn_location (); |
3535 tree sblock = get_curr_insn_block (); | |
3536 gimple_stmt_iterator nsi = gsi; | 5654 gimple_stmt_iterator nsi = gsi; |
3537 | 5655 |
3538 for (;;) | 5656 for (;;) |
3539 { | 5657 { |
3540 tree var = gimple_debug_bind_get_var (stmt); | 5658 tree var = gimple_debug_bind_get_var (stmt); |
3541 tree value; | 5659 tree value; |
3542 rtx val; | 5660 rtx val; |
3543 enum machine_mode mode; | 5661 machine_mode mode; |
5662 | |
5663 if (TREE_CODE (var) != DEBUG_EXPR_DECL | |
5664 && TREE_CODE (var) != LABEL_DECL | |
5665 && !target_for_debug_bind (var)) | |
5666 goto delink_debug_stmt; | |
3544 | 5667 |
3545 if (gimple_debug_bind_has_value_p (stmt)) | 5668 if (gimple_debug_bind_has_value_p (stmt)) |
3546 value = gimple_debug_bind_get_value (stmt); | 5669 value = gimple_debug_bind_get_value (stmt); |
3547 else | 5670 else |
3548 value = NULL_TREE; | 5671 value = NULL_TREE; |
3549 | 5672 |
3550 last = get_last_insn (); | 5673 last = get_last_insn (); |
3551 | 5674 |
3552 set_curr_insn_source_location (gimple_location (stmt)); | 5675 set_curr_insn_location (gimple_location (stmt)); |
3553 set_curr_insn_block (gimple_block (stmt)); | |
3554 | 5676 |
3555 if (DECL_P (var)) | 5677 if (DECL_P (var)) |
3556 mode = DECL_MODE (var); | 5678 mode = DECL_MODE (var); |
3557 else | 5679 else |
3558 mode = TYPE_MODE (TREE_TYPE (var)); | 5680 mode = TYPE_MODE (TREE_TYPE (var)); |
3559 | 5681 |
3560 val = gen_rtx_VAR_LOCATION | 5682 val = gen_rtx_VAR_LOCATION |
3561 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED); | 5683 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED); |
3562 | 5684 |
3563 val = emit_debug_insn (val); | 5685 emit_debug_insn (val); |
3564 | 5686 |
3565 if (dump_file && (dump_flags & TDF_DETAILS)) | 5687 if (dump_file && (dump_flags & TDF_DETAILS)) |
3566 { | 5688 { |
3567 /* We can't dump the insn with a TREE where an RTX | 5689 /* We can't dump the insn with a TREE where an RTX |
3568 is expected. */ | 5690 is expected. */ |
3569 INSN_VAR_LOCATION_LOC (val) = const0_rtx; | 5691 PAT_VAR_LOCATION_LOC (val) = const0_rtx; |
3570 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 5692 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
3571 INSN_VAR_LOCATION_LOC (val) = (rtx)value; | 5693 PAT_VAR_LOCATION_LOC (val) = (rtx)value; |
3572 } | 5694 } |
3573 | 5695 |
5696 delink_debug_stmt: | |
3574 /* In order not to generate too many debug temporaries, | 5697 /* In order not to generate too many debug temporaries, |
3575 we delink all uses of debug statements we already expanded. | 5698 we delink all uses of debug statements we already expanded. |
3576 Therefore debug statements between definition and real | 5699 Therefore debug statements between definition and real |
3577 use of TERed SSA names will continue to use the SSA name, | 5700 use of TERed SSA names will continue to use the SSA name, |
3578 and not be replaced with debug temps. */ | 5701 and not be replaced with debug temps. */ |
3585 stmt = gsi_stmt (nsi); | 5708 stmt = gsi_stmt (nsi); |
3586 if (!gimple_debug_bind_p (stmt)) | 5709 if (!gimple_debug_bind_p (stmt)) |
3587 break; | 5710 break; |
3588 } | 5711 } |
3589 | 5712 |
3590 set_curr_insn_source_location (sloc); | 5713 set_curr_insn_location (sloc); |
3591 set_curr_insn_block (sblock); | 5714 } |
5715 else if (gimple_debug_source_bind_p (stmt)) | |
5716 { | |
5717 location_t sloc = curr_insn_location (); | |
5718 tree var = gimple_debug_source_bind_get_var (stmt); | |
5719 tree value = gimple_debug_source_bind_get_value (stmt); | |
5720 rtx val; | |
5721 machine_mode mode; | |
5722 | |
5723 last = get_last_insn (); | |
5724 | |
5725 set_curr_insn_location (gimple_location (stmt)); | |
5726 | |
5727 mode = DECL_MODE (var); | |
5728 | |
5729 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value, | |
5730 VAR_INIT_STATUS_UNINITIALIZED); | |
5731 | |
5732 emit_debug_insn (val); | |
5733 | |
5734 if (dump_file && (dump_flags & TDF_DETAILS)) | |
5735 { | |
5736 /* We can't dump the insn with a TREE where an RTX | |
5737 is expected. */ | |
5738 PAT_VAR_LOCATION_LOC (val) = const0_rtx; | |
5739 maybe_dump_rtl_for_gimple_stmt (stmt, last); | |
5740 PAT_VAR_LOCATION_LOC (val) = (rtx)value; | |
5741 } | |
5742 | |
5743 set_curr_insn_location (sloc); | |
3592 } | 5744 } |
3593 else | 5745 else |
3594 { | 5746 { |
3595 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) | 5747 gcall *call_stmt = dyn_cast <gcall *> (stmt); |
5748 if (call_stmt | |
5749 && gimple_call_tail_p (call_stmt) | |
5750 && disable_tail_calls) | |
5751 gimple_call_set_tail (call_stmt, false); | |
5752 | |
5753 if (call_stmt && gimple_call_tail_p (call_stmt)) | |
3596 { | 5754 { |
3597 bool can_fallthru; | 5755 bool can_fallthru; |
3598 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); | 5756 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru); |
3599 if (new_bb) | 5757 if (new_bb) |
3600 { | 5758 { |
3601 if (can_fallthru) | 5759 if (can_fallthru) |
3602 bb = new_bb; | 5760 bb = new_bb; |
3603 else | 5761 else |
3627 currently_expanding_gimple_stmt = NULL; | 5785 currently_expanding_gimple_stmt = NULL; |
3628 | 5786 |
3629 /* Expand implicit goto and convert goto_locus. */ | 5787 /* Expand implicit goto and convert goto_locus. */ |
3630 FOR_EACH_EDGE (e, ei, bb->succs) | 5788 FOR_EACH_EDGE (e, ei, bb->succs) |
3631 { | 5789 { |
3632 if (e->goto_locus && e->goto_block) | 5790 if (e->goto_locus != UNKNOWN_LOCATION) |
3633 { | 5791 set_curr_insn_location (e->goto_locus); |
3634 set_curr_insn_source_location (e->goto_locus); | |
3635 set_curr_insn_block (e->goto_block); | |
3636 e->goto_locus = curr_insn_locator (); | |
3637 } | |
3638 e->goto_block = NULL; | |
3639 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb) | 5792 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb) |
3640 { | 5793 { |
3641 emit_jump (label_rtx_for_bb (e->dest)); | 5794 emit_jump (label_rtx_for_bb (e->dest)); |
3642 e->flags &= ~EDGE_FALLTHRU; | 5795 e->flags &= ~EDGE_FALLTHRU; |
3643 } | 5796 } |
3647 This later might be assumed to be a jump to successor and break edge insertion. | 5800 This later might be assumed to be a jump to successor and break edge insertion. |
3648 We need to insert dummy move to prevent this. PR41440. */ | 5801 We need to insert dummy move to prevent this. PR41440. */ |
3649 if (single_succ_p (bb) | 5802 if (single_succ_p (bb) |
3650 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU) | 5803 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU) |
3651 && (last = get_last_insn ()) | 5804 && (last = get_last_insn ()) |
3652 && JUMP_P (last)) | 5805 && (JUMP_P (last) |
5806 || (DEBUG_INSN_P (last) | |
5807 && JUMP_P (prev_nondebug_insn (last))))) | |
3653 { | 5808 { |
3654 rtx dummy = gen_reg_rtx (SImode); | 5809 rtx dummy = gen_reg_rtx (SImode); |
3655 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL); | 5810 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL); |
3656 } | 5811 } |
3657 | 5812 |
3680 basic_block init_block, first_block; | 5835 basic_block init_block, first_block; |
3681 edge e = NULL; | 5836 edge e = NULL; |
3682 int flags; | 5837 int flags; |
3683 | 5838 |
3684 /* Multiple entry points not supported yet. */ | 5839 /* Multiple entry points not supported yet. */ |
3685 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1); | 5840 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1); |
3686 init_rtl_bb_info (ENTRY_BLOCK_PTR); | 5841 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
3687 init_rtl_bb_info (EXIT_BLOCK_PTR); | 5842 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun)); |
3688 ENTRY_BLOCK_PTR->flags |= BB_RTL; | 5843 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL; |
3689 EXIT_BLOCK_PTR->flags |= BB_RTL; | 5844 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL; |
3690 | 5845 |
3691 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0); | 5846 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0); |
3692 | 5847 |
3693 /* When entry edge points to first basic block, we don't need jump, | 5848 /* When entry edge points to first basic block, we don't need jump, |
3694 otherwise we have to jump into proper target. */ | 5849 otherwise we have to jump into proper target. */ |
3695 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb) | 5850 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) |
3696 { | 5851 { |
3697 tree label = gimple_block_label (e->dest); | 5852 tree label = gimple_block_label (e->dest); |
3698 | 5853 |
3699 emit_jump (label_rtx (label)); | 5854 emit_jump (jump_target_rtx (label)); |
3700 flags = 0; | 5855 flags = 0; |
3701 } | 5856 } |
3702 else | 5857 else |
3703 flags = EDGE_FALLTHRU; | 5858 flags = EDGE_FALLTHRU; |
3704 | 5859 |
3705 init_block = create_basic_block (NEXT_INSN (get_insns ()), | 5860 init_block = create_basic_block (NEXT_INSN (get_insns ()), |
3706 get_last_insn (), | 5861 get_last_insn (), |
3707 ENTRY_BLOCK_PTR); | 5862 ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
3708 init_block->frequency = ENTRY_BLOCK_PTR->frequency; | 5863 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency; |
3709 init_block->count = ENTRY_BLOCK_PTR->count; | 5864 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count; |
5865 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father); | |
3710 if (e) | 5866 if (e) |
3711 { | 5867 { |
3712 first_block = e->dest; | 5868 first_block = e->dest; |
3713 redirect_edge_succ (e, init_block); | 5869 redirect_edge_succ (e, init_block); |
3714 e = make_edge (init_block, first_block, flags); | 5870 e = make_single_succ_edge (init_block, first_block, flags); |
3715 } | 5871 } |
3716 else | 5872 else |
3717 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); | 5873 e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), |
3718 e->probability = REG_BR_PROB_BASE; | 5874 EDGE_FALLTHRU); |
3719 e->count = ENTRY_BLOCK_PTR->count; | |
3720 | 5875 |
3721 update_bb_for_insn (init_block); | 5876 update_bb_for_insn (init_block); |
3722 return init_block; | 5877 return init_block; |
3723 } | 5878 } |
3724 | 5879 |
3739 /* Create a block containing landing pads and similar stuff. */ | 5894 /* Create a block containing landing pads and similar stuff. */ |
3740 | 5895 |
3741 static void | 5896 static void |
3742 construct_exit_block (void) | 5897 construct_exit_block (void) |
3743 { | 5898 { |
3744 rtx head = get_last_insn (); | 5899 rtx_insn *head = get_last_insn (); |
3745 rtx end; | 5900 rtx_insn *end; |
3746 basic_block exit_block; | 5901 basic_block exit_block; |
3747 edge e, e2; | 5902 edge e, e2; |
3748 unsigned ix; | 5903 unsigned ix; |
3749 edge_iterator ei; | 5904 edge_iterator ei; |
3750 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb); | 5905 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb; |
3751 | 5906 rtx_insn *orig_end = BB_END (prev_bb); |
3752 rtl_profile_for_bb (EXIT_BLOCK_PTR); | 5907 |
5908 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)); | |
3753 | 5909 |
3754 /* Make sure the locus is set to the end of the function, so that | 5910 /* Make sure the locus is set to the end of the function, so that |
3755 epilogue line numbers and warnings are set properly. */ | 5911 epilogue line numbers and warnings are set properly. */ |
3756 if (cfun->function_end_locus != UNKNOWN_LOCATION) | 5912 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION) |
3757 input_location = cfun->function_end_locus; | 5913 input_location = cfun->function_end_locus; |
3758 | |
3759 /* The following insns belong to the top scope. */ | |
3760 set_curr_insn_block (DECL_INITIAL (current_function_decl)); | |
3761 | 5914 |
3762 /* Generate rtl for function exit. */ | 5915 /* Generate rtl for function exit. */ |
3763 expand_function_end (); | 5916 expand_function_end (); |
3764 | 5917 |
3765 end = get_last_insn (); | 5918 end = get_last_insn (); |
3766 if (head == end) | 5919 if (head == end) |
3767 return; | 5920 return; |
3768 /* While emitting the function end we could move end of the last basic block. | 5921 /* While emitting the function end we could move end of the last basic |
3769 */ | 5922 block. */ |
3770 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end; | 5923 BB_END (prev_bb) = orig_end; |
3771 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) | 5924 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) |
3772 head = NEXT_INSN (head); | 5925 head = NEXT_INSN (head); |
3773 exit_block = create_basic_block (NEXT_INSN (head), end, | 5926 /* But make sure exit_block starts with RETURN_LABEL, otherwise the |
3774 EXIT_BLOCK_PTR->prev_bb); | 5927 bb frequency counting will be confused. Any instructions before that |
3775 exit_block->frequency = EXIT_BLOCK_PTR->frequency; | 5928 label are emitted for the case where PREV_BB falls through into the |
3776 exit_block->count = EXIT_BLOCK_PTR->count; | 5929 exit block, so append those instructions to prev_bb in that case. */ |
5930 if (NEXT_INSN (head) != return_label) | |
5931 { | |
5932 while (NEXT_INSN (head) != return_label) | |
5933 { | |
5934 if (!NOTE_P (NEXT_INSN (head))) | |
5935 BB_END (prev_bb) = NEXT_INSN (head); | |
5936 head = NEXT_INSN (head); | |
5937 } | |
5938 } | |
5939 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb); | |
5940 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency; | |
5941 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count; | |
5942 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father); | |
3777 | 5943 |
3778 ix = 0; | 5944 ix = 0; |
3779 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds)) | 5945 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)) |
3780 { | 5946 { |
3781 e = EDGE_PRED (EXIT_BLOCK_PTR, ix); | 5947 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix); |
3782 if (!(e->flags & EDGE_ABNORMAL)) | 5948 if (!(e->flags & EDGE_ABNORMAL)) |
3783 redirect_edge_succ (e, exit_block); | 5949 redirect_edge_succ (e, exit_block); |
3784 else | 5950 else |
3785 ix++; | 5951 ix++; |
3786 } | 5952 } |
3787 | 5953 |
3788 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); | 5954 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), |
3789 e->probability = REG_BR_PROB_BASE; | 5955 EDGE_FALLTHRU); |
3790 e->count = EXIT_BLOCK_PTR->count; | 5956 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
3791 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds) | |
3792 if (e2 != e) | 5957 if (e2 != e) |
3793 { | 5958 { |
3794 e->count -= e2->count; | 5959 exit_block->count -= e2->count (); |
3795 exit_block->count -= e2->count; | |
3796 exit_block->frequency -= EDGE_FREQUENCY (e2); | 5960 exit_block->frequency -= EDGE_FREQUENCY (e2); |
3797 } | 5961 } |
3798 if (e->count < 0) | |
3799 e->count = 0; | |
3800 if (exit_block->count < 0) | |
3801 exit_block->count = 0; | |
3802 if (exit_block->frequency < 0) | 5962 if (exit_block->frequency < 0) |
3803 exit_block->frequency = 0; | 5963 exit_block->frequency = 0; |
3804 update_bb_for_insn (exit_block); | 5964 update_bb_for_insn (exit_block); |
3805 } | 5965 } |
3806 | 5966 |
3855 discover_nonconstant_array_refs (void) | 6015 discover_nonconstant_array_refs (void) |
3856 { | 6016 { |
3857 basic_block bb; | 6017 basic_block bb; |
3858 gimple_stmt_iterator gsi; | 6018 gimple_stmt_iterator gsi; |
3859 | 6019 |
3860 FOR_EACH_BB (bb) | 6020 FOR_EACH_BB_FN (bb, cfun) |
3861 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | 6021 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
3862 { | 6022 { |
3863 gimple stmt = gsi_stmt (gsi); | 6023 gimple *stmt = gsi_stmt (gsi); |
3864 if (!is_gimple_debug (stmt)) | 6024 if (!is_gimple_debug (stmt)) |
3865 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); | 6025 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); |
3866 } | 6026 } |
3867 } | 6027 } |
3868 | 6028 |
3936 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is | 6096 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is |
3937 needed. */ | 6097 needed. */ |
3938 fixup_tail_calls (); | 6098 fixup_tail_calls (); |
3939 } | 6099 } |
3940 } | 6100 } |
6101 | |
6102 | |
6103 static void | |
6104 expand_main_function (void) | |
6105 { | |
6106 #if (defined(INVOKE__main) \ | |
6107 || (!defined(HAS_INIT_SECTION) \ | |
6108 && !defined(INIT_SECTION_ASM_OP) \ | |
6109 && !defined(INIT_ARRAY_SECTION_ASM_OP))) | |
6110 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode); | |
6111 #endif | |
6112 } | |
6113 | |
6114 | |
6115 /* Expand code to initialize the stack_protect_guard. This is invoked at | |
6116 the beginning of a function to be protected. */ | |
6117 | |
6118 static void | |
6119 stack_protect_prologue (void) | |
6120 { | |
6121 tree guard_decl = targetm.stack_protect_guard (); | |
6122 rtx x, y; | |
6123 | |
6124 x = expand_normal (crtl->stack_protect_guard); | |
6125 if (guard_decl) | |
6126 y = expand_normal (guard_decl); | |
6127 else | |
6128 y = const0_rtx; | |
6129 | |
6130 /* Allow the target to copy from Y to X without leaking Y into a | |
6131 register. */ | |
6132 if (targetm.have_stack_protect_set ()) | |
6133 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y)) | |
6134 { | |
6135 emit_insn (insn); | |
6136 return; | |
6137 } | |
6138 | |
6139 /* Otherwise do a straight move. */ | |
6140 emit_move_insn (x, y); | |
6141 } | |
3941 | 6142 |
3942 /* Translate the intermediate representation contained in the CFG | 6143 /* Translate the intermediate representation contained in the CFG |
3943 from GIMPLE trees to RTL. | 6144 from GIMPLE trees to RTL. |
3944 | 6145 |
3945 We do conversion per basic block and preserve/update the tree CFG. | 6146 We do conversion per basic block and preserve/update the tree CFG. |
3946 This implies we have to do some magic as the CFG can simultaneously | 6147 This implies we have to do some magic as the CFG can simultaneously |
3947 consist of basic blocks containing RTL and GIMPLE trees. This can | 6148 consist of basic blocks containing RTL and GIMPLE trees. This can |
3948 confuse the CFG hooks, so be careful to not manipulate CFG during | 6149 confuse the CFG hooks, so be careful to not manipulate CFG during |
3949 the expansion. */ | 6150 the expansion. */ |
3950 | 6151 |
3951 static unsigned int | 6152 namespace { |
3952 gimple_expand_cfg (void) | 6153 |
6154 const pass_data pass_data_expand = | |
6155 { | |
6156 RTL_PASS, /* type */ | |
6157 "expand", /* name */ | |
6158 OPTGROUP_NONE, /* optinfo_flags */ | |
6159 TV_EXPAND, /* tv_id */ | |
6160 ( PROP_ssa | PROP_gimple_leh | PROP_cfg | |
6161 | PROP_gimple_lcx | |
6162 | PROP_gimple_lvec | |
6163 | PROP_gimple_lva), /* properties_required */ | |
6164 PROP_rtl, /* properties_provided */ | |
6165 ( PROP_ssa | PROP_trees ), /* properties_destroyed */ | |
6166 0, /* todo_flags_start */ | |
6167 0, /* todo_flags_finish */ | |
6168 }; | |
6169 | |
6170 class pass_expand : public rtl_opt_pass | |
6171 { | |
6172 public: | |
6173 pass_expand (gcc::context *ctxt) | |
6174 : rtl_opt_pass (pass_data_expand, ctxt) | |
6175 {} | |
6176 | |
6177 /* opt_pass methods: */ | |
6178 virtual unsigned int execute (function *); | |
6179 | |
6180 }; // class pass_expand | |
6181 | |
6182 unsigned int | |
6183 pass_expand::execute (function *fun) | |
3953 { | 6184 { |
3954 basic_block bb, init_block; | 6185 basic_block bb, init_block; |
3955 sbitmap blocks; | |
3956 edge_iterator ei; | 6186 edge_iterator ei; |
3957 edge e; | 6187 edge e; |
3958 rtx var_seq; | 6188 rtx_insn *var_seq, *var_ret_seq; |
3959 unsigned i; | 6189 unsigned i; |
3960 | 6190 |
3961 timevar_push (TV_OUT_OF_SSA); | 6191 timevar_push (TV_OUT_OF_SSA); |
3962 rewrite_out_of_ssa (&SA); | 6192 rewrite_out_of_ssa (&SA); |
3963 timevar_pop (TV_OUT_OF_SSA); | 6193 timevar_pop (TV_OUT_OF_SSA); |
3964 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions, | 6194 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions); |
3965 sizeof (rtx)); | 6195 |
6196 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter) | |
6197 { | |
6198 gimple_stmt_iterator gsi; | |
6199 FOR_EACH_BB_FN (bb, cfun) | |
6200 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
6201 if (gimple_debug_bind_p (gsi_stmt (gsi))) | |
6202 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0); | |
6203 } | |
6204 | |
6205 /* Make sure all values used by the optimization passes have sane | |
6206 defaults. */ | |
6207 reg_renumber = 0; | |
3966 | 6208 |
3967 /* Some backends want to know that we are expanding to RTL. */ | 6209 /* Some backends want to know that we are expanding to RTL. */ |
3968 currently_expanding_to_rtl = 1; | 6210 currently_expanding_to_rtl = 1; |
3969 | 6211 /* Dominators are not kept up-to-date as we may create new basic-blocks. */ |
3970 rtl_profile_for_bb (ENTRY_BLOCK_PTR); | 6212 free_dominance_info (CDI_DOMINATORS); |
3971 | 6213 |
3972 insn_locators_alloc (); | 6214 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)); |
6215 | |
6216 if (chkp_function_instrumented_p (current_function_decl)) | |
6217 chkp_reset_rtl_bounds (); | |
6218 | |
6219 insn_locations_init (); | |
3973 if (!DECL_IS_BUILTIN (current_function_decl)) | 6220 if (!DECL_IS_BUILTIN (current_function_decl)) |
3974 { | 6221 { |
3975 /* Eventually, all FEs should explicitly set function_start_locus. */ | 6222 /* Eventually, all FEs should explicitly set function_start_locus. */ |
3976 if (cfun->function_start_locus == UNKNOWN_LOCATION) | 6223 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION) |
3977 set_curr_insn_source_location | 6224 set_curr_insn_location |
3978 (DECL_SOURCE_LOCATION (current_function_decl)); | 6225 (DECL_SOURCE_LOCATION (current_function_decl)); |
3979 else | 6226 else |
3980 set_curr_insn_source_location (cfun->function_start_locus); | 6227 set_curr_insn_location (fun->function_start_locus); |
3981 } | 6228 } |
3982 else | 6229 else |
3983 set_curr_insn_source_location (UNKNOWN_LOCATION); | 6230 set_curr_insn_location (UNKNOWN_LOCATION); |
3984 set_curr_insn_block (DECL_INITIAL (current_function_decl)); | 6231 prologue_location = curr_insn_location (); |
3985 prologue_locator = curr_insn_locator (); | |
3986 | 6232 |
3987 #ifdef INSN_SCHEDULING | 6233 #ifdef INSN_SCHEDULING |
3988 init_sched_attrs (); | 6234 init_sched_attrs (); |
3989 #endif | 6235 #endif |
3990 | 6236 |
3995 | 6241 |
3996 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */ | 6242 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */ |
3997 discover_nonconstant_array_refs (); | 6243 discover_nonconstant_array_refs (); |
3998 | 6244 |
3999 targetm.expand_to_rtl_hook (); | 6245 targetm.expand_to_rtl_hook (); |
4000 crtl->stack_alignment_needed = STACK_BOUNDARY; | 6246 crtl->init_stack_alignment (); |
4001 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY; | 6247 fun->cfg->max_jumptable_ents = 0; |
4002 crtl->stack_alignment_estimated = 0; | |
4003 crtl->preferred_stack_boundary = STACK_BOUNDARY; | |
4004 cfun->cfg->max_jumptable_ents = 0; | |
4005 | 6248 |
4006 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge | 6249 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge |
4007 of the function section at exapnsion time to predict distance of calls. */ | 6250 of the function section at exapnsion time to predict distance of calls. */ |
4008 resolve_unique_section (current_function_decl, 0, flag_function_sections); | 6251 resolve_unique_section (current_function_decl, 0, flag_function_sections); |
4009 | 6252 |
4010 /* Expand the variables recorded during gimple lowering. */ | 6253 /* Expand the variables recorded during gimple lowering. */ |
4011 timevar_push (TV_VAR_EXPAND); | 6254 timevar_push (TV_VAR_EXPAND); |
4012 start_sequence (); | 6255 start_sequence (); |
4013 | 6256 |
4014 expand_used_vars (); | 6257 var_ret_seq = expand_used_vars (); |
4015 | 6258 |
4016 var_seq = get_insns (); | 6259 var_seq = get_insns (); |
4017 end_sequence (); | 6260 end_sequence (); |
4018 timevar_pop (TV_VAR_EXPAND); | 6261 timevar_pop (TV_VAR_EXPAND); |
4019 | 6262 |
4020 /* Honor stack protection warnings. */ | 6263 /* Honor stack protection warnings. */ |
4021 if (warn_stack_protect) | 6264 if (warn_stack_protect) |
4022 { | 6265 { |
4023 if (cfun->calls_alloca) | 6266 if (fun->calls_alloca) |
4024 warning (OPT_Wstack_protector, | 6267 warning (OPT_Wstack_protector, |
4025 "stack protector not protecting local variables: " | 6268 "stack protector not protecting local variables: " |
4026 "variable length buffer"); | 6269 "variable length buffer"); |
4027 if (has_short_buffer && !crtl->stack_protect_guard) | 6270 if (has_short_buffer && !crtl->stack_protect_guard) |
4028 warning (OPT_Wstack_protector, | 6271 warning (OPT_Wstack_protector, |
4029 "stack protector not protecting function: " | 6272 "stack protector not protecting function: " |
4030 "all local arrays are less than %d bytes long", | 6273 "all local arrays are less than %d bytes long", |
4031 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); | 6274 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); |
4032 } | 6275 } |
4033 | 6276 |
4034 /* Set up parameters and prepare for return, for the function. */ | 6277 /* Set up parameters and prepare for return, for the function. */ |
4035 expand_function_start (current_function_decl); | 6278 expand_function_start (current_function_decl); |
4044 before parm_birth_insn. We've just insertted an alloca call. | 6287 before parm_birth_insn. We've just insertted an alloca call. |
4045 Adjust the pointer to match. */ | 6288 Adjust the pointer to match. */ |
4046 parm_birth_insn = var_seq; | 6289 parm_birth_insn = var_seq; |
4047 } | 6290 } |
4048 | 6291 |
4049 /* Now that we also have the parameter RTXs, copy them over to our | 6292 /* Now propagate the RTL assignment of each partition to the |
4050 partitions. */ | 6293 underlying var of each SSA_NAME. */ |
4051 for (i = 0; i < SA.map->num_partitions; i++) | 6294 tree name; |
4052 { | 6295 |
4053 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i)); | 6296 FOR_EACH_SSA_NAME (i, name, cfun) |
4054 | 6297 { |
4055 if (TREE_CODE (var) != VAR_DECL | 6298 /* We might have generated new SSA names in |
4056 && !SA.partition_to_pseudo[i]) | 6299 update_alias_info_with_stack_vars. They will have a NULL |
4057 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var); | 6300 defining statements, and won't be part of the partitioning, |
4058 gcc_assert (SA.partition_to_pseudo[i]); | 6301 so ignore those. */ |
6302 if (!SSA_NAME_DEF_STMT (name)) | |
6303 continue; | |
6304 | |
6305 adjust_one_expanded_partition_var (name); | |
6306 } | |
6307 | |
6308 /* Clean up RTL of variables that straddle across multiple | |
6309 partitions, and check that the rtl of any PARM_DECLs that are not | |
6310 cleaned up is that of their default defs. */ | |
6311 FOR_EACH_SSA_NAME (i, name, cfun) | |
6312 { | |
6313 int part; | |
6314 | |
6315 /* We might have generated new SSA names in | |
6316 update_alias_info_with_stack_vars. They will have a NULL | |
6317 defining statements, and won't be part of the partitioning, | |
6318 so ignore those. */ | |
6319 if (!SSA_NAME_DEF_STMT (name)) | |
6320 continue; | |
6321 part = var_to_partition (SA.map, name); | |
6322 if (part == NO_PARTITION) | |
6323 continue; | |
4059 | 6324 |
4060 /* If this decl was marked as living in multiple places, reset | 6325 /* If this decl was marked as living in multiple places, reset |
4061 this now to NULL. */ | 6326 this now to NULL. */ |
4062 if (DECL_RTL_IF_SET (var) == pc_rtx) | 6327 tree var = SSA_NAME_VAR (name); |
6328 if (var && DECL_RTL_IF_SET (var) == pc_rtx) | |
4063 SET_DECL_RTL (var, NULL); | 6329 SET_DECL_RTL (var, NULL); |
4064 | 6330 /* Check that the pseudos chosen by assign_parms are those of |
4065 /* Some RTL parts really want to look at DECL_RTL(x) when x | 6331 the corresponding default defs. */ |
4066 was a decl marked in REG_ATTR or MEM_ATTR. We could use | 6332 else if (SSA_NAME_IS_DEFAULT_DEF (name) |
4067 SET_DECL_RTL here making this available, but that would mean | 6333 && (TREE_CODE (var) == PARM_DECL |
4068 to select one of the potentially many RTLs for one DECL. Instead | 6334 || TREE_CODE (var) == RESULT_DECL)) |
4069 of doing that we simply reset the MEM_EXPR of the RTL in question, | 6335 { |
4070 then nobody can get at it and hence nobody can call DECL_RTL on it. */ | 6336 rtx in = DECL_RTL_IF_SET (var); |
4071 if (!DECL_RTL_SET_P (var)) | 6337 gcc_assert (in); |
4072 { | 6338 rtx out = SA.partition_to_pseudo[part]; |
4073 if (MEM_P (SA.partition_to_pseudo[i])) | 6339 gcc_assert (in == out); |
4074 set_mem_expr (SA.partition_to_pseudo[i], NULL); | 6340 |
6341 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match | |
6342 those expected by debug backends for each parm and for | |
6343 the result. This is particularly important for stabs, | |
6344 whose register elimination from parm's DECL_RTL may cause | |
6345 -fcompare-debug differences as SET_DECL_RTL changes reg's | |
6346 attrs. So, make sure the RTL already has the parm as the | |
6347 EXPR, so that it won't change. */ | |
6348 SET_DECL_RTL (var, NULL_RTX); | |
6349 if (MEM_P (in)) | |
6350 set_mem_attributes (in, var, true); | |
6351 SET_DECL_RTL (var, in); | |
4075 } | 6352 } |
4076 } | 6353 } |
4077 | 6354 |
4078 /* If this function is `main', emit a call to `__main' | 6355 /* If this function is `main', emit a call to `__main' |
4079 to run global initializers, etc. */ | 6356 to run global initializers, etc. */ |
4082 && DECL_FILE_SCOPE_P (current_function_decl)) | 6359 && DECL_FILE_SCOPE_P (current_function_decl)) |
4083 expand_main_function (); | 6360 expand_main_function (); |
4084 | 6361 |
4085 /* Initialize the stack_protect_guard field. This must happen after the | 6362 /* Initialize the stack_protect_guard field. This must happen after the |
4086 call to __main (if any) so that the external decl is initialized. */ | 6363 call to __main (if any) so that the external decl is initialized. */ |
4087 if (crtl->stack_protect_guard) | 6364 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ()) |
4088 stack_protect_prologue (); | 6365 stack_protect_prologue (); |
4089 | 6366 |
4090 expand_phi_nodes (&SA); | 6367 expand_phi_nodes (&SA); |
6368 | |
6369 /* Release any stale SSA redirection data. */ | |
6370 redirect_edge_var_map_empty (); | |
4091 | 6371 |
4092 /* Register rtl specific functions for cfg. */ | 6372 /* Register rtl specific functions for cfg. */ |
4093 rtl_register_cfg_hooks (); | 6373 rtl_register_cfg_hooks (); |
4094 | 6374 |
4095 init_block = construct_init_block (); | 6375 init_block = construct_init_block (); |
4096 | 6376 |
4097 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the | 6377 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the |
4098 remaining edges later. */ | 6378 remaining edges later. */ |
4099 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) | 6379 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs) |
4100 e->flags &= ~EDGE_EXECUTABLE; | 6380 e->flags &= ~EDGE_EXECUTABLE; |
4101 | 6381 |
4102 lab_rtx_for_bb = pointer_map_create (); | 6382 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>; |
4103 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb) | 6383 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun), |
4104 bb = expand_gimple_basic_block (bb); | 6384 next_bb) |
6385 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX); | |
4105 | 6386 |
4106 if (MAY_HAVE_DEBUG_INSNS) | 6387 if (MAY_HAVE_DEBUG_INSNS) |
4107 expand_debug_locations (); | 6388 expand_debug_locations (); |
4108 | 6389 |
4109 execute_free_datastructures (); | 6390 if (deep_ter_debug_map) |
6391 { | |
6392 delete deep_ter_debug_map; | |
6393 deep_ter_debug_map = NULL; | |
6394 } | |
6395 | |
6396 /* Free stuff we no longer need after GIMPLE optimizations. */ | |
6397 free_dominance_info (CDI_DOMINATORS); | |
6398 free_dominance_info (CDI_POST_DOMINATORS); | |
6399 delete_tree_cfg_annotations (fun); | |
6400 | |
4110 timevar_push (TV_OUT_OF_SSA); | 6401 timevar_push (TV_OUT_OF_SSA); |
4111 finish_out_of_ssa (&SA); | 6402 finish_out_of_ssa (&SA); |
4112 timevar_pop (TV_OUT_OF_SSA); | 6403 timevar_pop (TV_OUT_OF_SSA); |
4113 | 6404 |
4114 timevar_push (TV_POST_EXPAND); | 6405 timevar_push (TV_POST_EXPAND); |
4115 /* We are no longer in SSA form. */ | 6406 /* We are no longer in SSA form. */ |
4116 cfun->gimple_df->in_ssa_p = false; | 6407 fun->gimple_df->in_ssa_p = false; |
6408 loops_state_clear (LOOP_CLOSED_SSA); | |
4117 | 6409 |
4118 /* Expansion is used by optimization passes too, set maybe_hot_insn_p | 6410 /* Expansion is used by optimization passes too, set maybe_hot_insn_p |
4119 conservatively to true until they are all profile aware. */ | 6411 conservatively to true until they are all profile aware. */ |
4120 pointer_map_destroy (lab_rtx_for_bb); | 6412 delete lab_rtx_for_bb; |
4121 free_histograms (); | 6413 free_histograms (fun); |
4122 | 6414 |
4123 construct_exit_block (); | 6415 construct_exit_block (); |
4124 set_curr_insn_block (DECL_INITIAL (current_function_decl)); | 6416 insn_locations_finalize (); |
4125 insn_locators_finalize (); | 6417 |
6418 if (var_ret_seq) | |
6419 { | |
6420 rtx_insn *after = return_label; | |
6421 rtx_insn *next = NEXT_INSN (after); | |
6422 if (next && NOTE_INSN_BASIC_BLOCK_P (next)) | |
6423 after = next; | |
6424 emit_insn_after (var_ret_seq, after); | |
6425 } | |
4126 | 6426 |
4127 /* Zap the tree EH table. */ | 6427 /* Zap the tree EH table. */ |
4128 set_eh_throw_stmt_table (cfun, NULL); | 6428 set_eh_throw_stmt_table (fun, NULL); |
4129 | 6429 |
6430 /* We need JUMP_LABEL be set in order to redirect jumps, and hence | |
6431 split edges which edge insertions might do. */ | |
4130 rebuild_jump_labels (get_insns ()); | 6432 rebuild_jump_labels (get_insns ()); |
4131 | 6433 |
4132 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) | 6434 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun), |
6435 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb) | |
4133 { | 6436 { |
4134 edge e; | 6437 edge e; |
4135 edge_iterator ei; | 6438 edge_iterator ei; |
4136 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | 6439 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
4137 { | 6440 { |
4138 if (e->insns.r) | 6441 if (e->insns.r) |
4139 { | 6442 { |
4140 /* Avoid putting insns before parm_birth_insn. */ | 6443 rebuild_jump_labels_chain (e->insns.r); |
4141 if (e->src == ENTRY_BLOCK_PTR | 6444 /* Put insns after parm birth, but before |
4142 && single_succ_p (ENTRY_BLOCK_PTR) | 6445 NOTE_INSNS_FUNCTION_BEG. */ |
4143 && parm_birth_insn) | 6446 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun) |
6447 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun))) | |
4144 { | 6448 { |
4145 rtx insns = e->insns.r; | 6449 rtx_insn *insns = e->insns.r; |
4146 e->insns.r = NULL_RTX; | 6450 e->insns.r = NULL; |
4147 emit_insn_after_noloc (insns, parm_birth_insn, e->dest); | 6451 if (NOTE_P (parm_birth_insn) |
6452 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG) | |
6453 emit_insn_before_noloc (insns, parm_birth_insn, e->dest); | |
6454 else | |
6455 emit_insn_after_noloc (insns, parm_birth_insn, e->dest); | |
4148 } | 6456 } |
4149 else | 6457 else |
4150 commit_one_edge_insertion (e); | 6458 commit_one_edge_insertion (e); |
4151 } | 6459 } |
4152 else | 6460 else |
4155 } | 6463 } |
4156 | 6464 |
4157 /* We're done expanding trees to RTL. */ | 6465 /* We're done expanding trees to RTL. */ |
4158 currently_expanding_to_rtl = 0; | 6466 currently_expanding_to_rtl = 0; |
4159 | 6467 |
4160 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb) | 6468 flush_mark_addressable_queue (); |
6469 | |
6470 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb, | |
6471 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb) | |
4161 { | 6472 { |
4162 edge e; | 6473 edge e; |
4163 edge_iterator ei; | 6474 edge_iterator ei; |
4164 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | 6475 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
4165 { | 6476 { |
4176 else | 6487 else |
4177 ei_next (&ei); | 6488 ei_next (&ei); |
4178 } | 6489 } |
4179 } | 6490 } |
4180 | 6491 |
4181 blocks = sbitmap_alloc (last_basic_block); | 6492 auto_sbitmap blocks (last_basic_block_for_fn (fun)); |
4182 sbitmap_ones (blocks); | 6493 bitmap_ones (blocks); |
4183 find_many_sub_basic_blocks (blocks); | 6494 find_many_sub_basic_blocks (blocks); |
4184 sbitmap_free (blocks); | |
4185 purge_all_dead_edges (); | 6495 purge_all_dead_edges (); |
4186 | 6496 |
4187 compact_blocks (); | |
4188 | |
4189 expand_stack_alignment (); | 6497 expand_stack_alignment (); |
4190 | 6498 |
4191 #ifdef ENABLE_CHECKING | 6499 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this |
4192 verify_flow_info (); | 6500 function. */ |
4193 #endif | 6501 if (crtl->tail_call_emit) |
6502 fixup_tail_calls (); | |
6503 | |
6504 /* After initial rtl generation, call back to finish generating | |
6505 exception support code. We need to do this before cleaning up | |
6506 the CFG as the code does not expect dead landing pads. */ | |
6507 if (fun->eh->region_tree != NULL) | |
6508 finish_eh_generation (); | |
6509 | |
6510 /* BB subdivision may have created basic blocks that are are only reachable | |
6511 from unlikely bbs but not marked as such in the profile. */ | |
6512 if (optimize) | |
6513 propagate_unlikely_bbs_forward (); | |
6514 | |
6515 /* Remove unreachable blocks, otherwise we cannot compute dominators | |
6516 which are needed for loop state verification. As a side-effect | |
6517 this also compacts blocks. | |
6518 ??? We cannot remove trivially dead insns here as for example | |
6519 the DRAP reg on i?86 is not magically live at this point. | |
6520 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */ | |
6521 cleanup_cfg (CLEANUP_NO_INSN_DEL); | |
6522 | |
6523 checking_verify_flow_info (); | |
6524 | |
6525 /* Initialize pseudos allocated for hard registers. */ | |
6526 emit_initial_value_sets (); | |
6527 | |
6528 /* And finally unshare all RTL. */ | |
6529 unshare_all_rtl (); | |
4194 | 6530 |
4195 /* There's no need to defer outputting this function any more; we | 6531 /* There's no need to defer outputting this function any more; we |
4196 know we want to output it. */ | 6532 know we want to output it. */ |
4197 DECL_DEFER_OUTPUT (current_function_decl) = 0; | 6533 DECL_DEFER_OUTPUT (current_function_decl) = 0; |
4198 | 6534 |
4207 /* And the pass manager will dump RTL for us. */ | 6543 /* And the pass manager will dump RTL for us. */ |
4208 } | 6544 } |
4209 | 6545 |
4210 /* If we're emitting a nested function, make sure its parent gets | 6546 /* If we're emitting a nested function, make sure its parent gets |
4211 emitted as well. Doing otherwise confuses debug info. */ | 6547 emitted as well. Doing otherwise confuses debug info. */ |
4212 { | 6548 { |
4213 tree parent; | 6549 tree parent; |
4214 for (parent = DECL_CONTEXT (current_function_decl); | 6550 for (parent = DECL_CONTEXT (current_function_decl); |
4215 parent != NULL_TREE; | 6551 parent != NULL_TREE; |
4216 parent = get_containing_scope (parent)) | 6552 parent = get_containing_scope (parent)) |
4217 if (TREE_CODE (parent) == FUNCTION_DECL) | 6553 if (TREE_CODE (parent) == FUNCTION_DECL) |
4218 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; | 6554 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; |
4219 } | 6555 } |
4220 | |
4221 /* We are now committed to emitting code for this function. Do any | |
4222 preparation, such as emitting abstract debug info for the inline | |
4223 before it gets mangled by optimization. */ | |
4224 if (cgraph_function_possibly_inlined_p (current_function_decl)) | |
4225 (*debug_hooks->outlining_inline_function) (current_function_decl); | |
4226 | 6556 |
4227 TREE_ASM_WRITTEN (current_function_decl) = 1; | 6557 TREE_ASM_WRITTEN (current_function_decl) = 1; |
4228 | 6558 |
4229 /* After expanding, the return labels are no longer needed. */ | 6559 /* After expanding, the return labels are no longer needed. */ |
4230 return_label = NULL; | 6560 return_label = NULL; |
4231 naked_return_label = NULL; | 6561 naked_return_label = NULL; |
6562 | |
6563 /* After expanding, the tm_restart map is no longer needed. */ | |
6564 if (fun->gimple_df->tm_restart) | |
6565 fun->gimple_df->tm_restart = NULL; | |
6566 | |
4232 /* Tag the blocks with a depth number so that change_scope can find | 6567 /* Tag the blocks with a depth number so that change_scope can find |
4233 the common parent easily. */ | 6568 the common parent easily. */ |
4234 set_block_levels (DECL_INITIAL (cfun->decl), 0); | 6569 set_block_levels (DECL_INITIAL (fun->decl), 0); |
4235 default_rtl_profile (); | 6570 default_rtl_profile (); |
6571 | |
6572 /* For -dx discard loops now, otherwise IL verify in clean_state will | |
6573 ICE. */ | |
6574 if (rtl_dump_and_exit) | |
6575 { | |
6576 cfun->curr_properties &= ~PROP_loops; | |
6577 loop_optimizer_finalize (); | |
6578 } | |
6579 | |
4236 timevar_pop (TV_POST_EXPAND); | 6580 timevar_pop (TV_POST_EXPAND); |
6581 | |
4237 return 0; | 6582 return 0; |
4238 } | 6583 } |
4239 | 6584 |
4240 struct rtl_opt_pass pass_expand = | 6585 } // anon namespace |
4241 { | 6586 |
4242 { | 6587 rtl_opt_pass * |
4243 RTL_PASS, | 6588 make_pass_expand (gcc::context *ctxt) |
4244 "expand", /* name */ | 6589 { |
4245 NULL, /* gate */ | 6590 return new pass_expand (ctxt); |
4246 gimple_expand_cfg, /* execute */ | 6591 } |
4247 NULL, /* sub */ | |
4248 NULL, /* next */ | |
4249 0, /* static_pass_number */ | |
4250 TV_EXPAND, /* tv_id */ | |
4251 PROP_ssa | PROP_gimple_leh | PROP_cfg | |
4252 | PROP_gimple_lcx, /* properties_required */ | |
4253 PROP_rtl, /* properties_provided */ | |
4254 PROP_ssa | PROP_trees, /* properties_destroyed */ | |
4255 TODO_verify_ssa | TODO_verify_flow | |
4256 | TODO_verify_stmts, /* todo_flags_start */ | |
4257 TODO_dump_func | |
4258 | TODO_ggc_collect /* todo_flags_finish */ | |
4259 } | |
4260 }; |