Mercurial > hg > CbC > CbC_gcc
comparison gcc/cfgexpand.c.orig @ 57:326d9e06c2e3
modify c-parser.c
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Mon, 15 Feb 2010 00:54:17 +0900 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
54:f62c169bbc24 | 57:326d9e06c2e3 |
---|---|
1 /* A pass for lowering trees to RTL. | |
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009 | |
3 Free Software Foundation, Inc. | |
4 | |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify | |
8 it under the terms of the GNU General Public License as published by | |
9 the Free Software Foundation; either version 3, or (at your option) | |
10 any later version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, | |
13 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 GNU General Public License for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
24 #include "tm.h" | |
25 #include "tree.h" | |
26 #include "rtl.h" | |
27 #include "tm_p.h" | |
28 #include "basic-block.h" | |
29 #include "function.h" | |
30 #include "expr.h" | |
31 #include "langhooks.h" | |
32 #include "tree-flow.h" | |
33 #include "timevar.h" | |
34 #include "tree-dump.h" | |
35 #include "tree-pass.h" | |
36 #include "except.h" | |
37 #include "flags.h" | |
38 #include "diagnostic.h" | |
39 #include "toplev.h" | |
40 #include "debug.h" | |
41 #include "params.h" | |
42 #include "tree-inline.h" | |
43 #include "value-prof.h" | |
44 #include "target.h" | |
45 #ifndef noCbC | |
46 #include "cbc-tree.h" | |
47 #endif | |
48 #include "ssaexpand.h" | |
49 | |
50 | |
51 /* This variable holds information helping the rewriting of SSA trees | |
52 into RTL. */ | |
53 struct ssaexpand SA; | |
54 | |
55 /* This variable holds the currently expanded gimple statement for purposes | |
56 of comminucating the profile info to the builtin expanders. */ | |
57 gimple currently_expanding_gimple_stmt; | |
58 | |
59 /* Return an expression tree corresponding to the RHS of GIMPLE | |
60 statement STMT. */ | |
61 | |
62 tree | |
63 gimple_assign_rhs_to_tree (gimple stmt) | |
64 { | |
65 tree t; | |
66 enum gimple_rhs_class grhs_class; | |
67 | |
68 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt)); | |
69 | |
70 if (grhs_class == GIMPLE_BINARY_RHS) | |
71 t = build2 (gimple_assign_rhs_code (stmt), | |
72 TREE_TYPE (gimple_assign_lhs (stmt)), | |
73 gimple_assign_rhs1 (stmt), | |
74 gimple_assign_rhs2 (stmt)); | |
75 else if (grhs_class == GIMPLE_UNARY_RHS) | |
76 t = build1 (gimple_assign_rhs_code (stmt), | |
77 TREE_TYPE (gimple_assign_lhs (stmt)), | |
78 gimple_assign_rhs1 (stmt)); | |
79 else if (grhs_class == GIMPLE_SINGLE_RHS) | |
80 { | |
81 t = gimple_assign_rhs1 (stmt); | |
82 /* Avoid modifying this tree in place below. */ | |
83 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) | |
84 && gimple_location (stmt) != EXPR_LOCATION (t)) | |
85 t = copy_node (t); | |
86 } | |
87 else | |
88 gcc_unreachable (); | |
89 | |
90 return t; | |
91 } | |
92 | |
93 | |
94 /* Verify that there is exactly single jump instruction since last and attach | |
95 REG_BR_PROB note specifying probability. | |
96 ??? We really ought to pass the probability down to RTL expanders and let it | |
97 re-distribute it when the conditional expands into multiple conditionals. | |
98 This is however difficult to do. */ | |
99 void | |
100 add_reg_br_prob_note (rtx last, int probability) | |
101 { | |
102 if (profile_status == PROFILE_ABSENT) | |
103 return; | |
104 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last)) | |
105 if (JUMP_P (last)) | |
106 { | |
107 /* It is common to emit condjump-around-jump sequence when we don't know | |
108 how to reverse the conditional. Special case this. */ | |
109 if (!any_condjump_p (last) | |
110 || !JUMP_P (NEXT_INSN (last)) | |
111 || !simplejump_p (NEXT_INSN (last)) | |
112 || !NEXT_INSN (NEXT_INSN (last)) | |
113 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last))) | |
114 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last))) | |
115 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))) | |
116 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))) | |
117 goto failed; | |
118 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); | |
119 add_reg_note (last, REG_BR_PROB, | |
120 GEN_INT (REG_BR_PROB_BASE - probability)); | |
121 return; | |
122 } | |
123 if (!last || !JUMP_P (last) || !any_condjump_p (last)) | |
124 goto failed; | |
125 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); | |
126 add_reg_note (last, REG_BR_PROB, GEN_INT (probability)); | |
127 return; | |
128 failed: | |
129 if (dump_file) | |
130 fprintf (dump_file, "Failed to add probability note\n"); | |
131 } | |
132 | |
133 | |
134 #ifndef STACK_ALIGNMENT_NEEDED | |
135 #define STACK_ALIGNMENT_NEEDED 1 | |
136 #endif | |
137 | |
138 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x) | |
139 | |
140 /* Associate declaration T with storage space X. If T is no | |
141 SSA name this is exactly SET_DECL_RTL, otherwise make the | |
142 partition of T associated with X. */ | |
143 static inline void | |
144 set_rtl (tree t, rtx x) | |
145 { | |
146 if (TREE_CODE (t) == SSA_NAME) | |
147 { | |
148 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x; | |
149 if (x && !MEM_P (x)) | |
150 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x); | |
151 /* For the benefit of debug information at -O0 (where vartracking | |
152 doesn't run) record the place also in the base DECL if it's | |
153 a normal variable (not a parameter). */ | |
154 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL) | |
155 { | |
156 tree var = SSA_NAME_VAR (t); | |
157 /* If we don't yet have something recorded, just record it now. */ | |
158 if (!DECL_RTL_SET_P (var)) | |
159 SET_DECL_RTL (var, x); | |
160 /* If we have it set alrady to "multiple places" don't | |
161 change this. */ | |
162 else if (DECL_RTL (var) == pc_rtx) | |
163 ; | |
164 /* If we have something recorded and it's not the same place | |
165 as we want to record now, we have multiple partitions for the | |
166 same base variable, with different places. We can't just | |
167 randomly chose one, hence we have to say that we don't know. | |
168 This only happens with optimization, and there var-tracking | |
169 will figure out the right thing. */ | |
170 else if (DECL_RTL (var) != x) | |
171 SET_DECL_RTL (var, pc_rtx); | |
172 } | |
173 } | |
174 else | |
175 SET_DECL_RTL (t, x); | |
176 } | |
177 | |
178 /* This structure holds data relevant to one variable that will be | |
179 placed in a stack slot. */ | |
180 struct stack_var | |
181 { | |
182 /* The Variable. */ | |
183 tree decl; | |
184 | |
185 /* The offset of the variable. During partitioning, this is the | |
186 offset relative to the partition. After partitioning, this | |
187 is relative to the stack frame. */ | |
188 HOST_WIDE_INT offset; | |
189 | |
190 /* Initially, the size of the variable. Later, the size of the partition, | |
191 if this variable becomes it's partition's representative. */ | |
192 HOST_WIDE_INT size; | |
193 | |
194 /* The *byte* alignment required for this variable. Or as, with the | |
195 size, the alignment for this partition. */ | |
196 unsigned int alignb; | |
197 | |
198 /* The partition representative. */ | |
199 size_t representative; | |
200 | |
201 /* The next stack variable in the partition, or EOC. */ | |
202 size_t next; | |
203 | |
204 /* The numbers of conflicting stack variables. */ | |
205 bitmap conflicts; | |
206 }; | |
207 | |
208 #define EOC ((size_t)-1) | |
209 | |
210 /* We have an array of such objects while deciding allocation. */ | |
211 static struct stack_var *stack_vars; | |
212 static size_t stack_vars_alloc; | |
213 static size_t stack_vars_num; | |
214 | |
215 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size | |
216 is non-decreasing. */ | |
217 static size_t *stack_vars_sorted; | |
218 | |
219 /* The phase of the stack frame. This is the known misalignment of | |
220 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is, | |
221 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */ | |
222 static int frame_phase; | |
223 | |
224 /* Used during expand_used_vars to remember if we saw any decls for | |
225 which we'd like to enable stack smashing protection. */ | |
226 static bool has_protected_decls; | |
227 | |
228 /* Used during expand_used_vars. Remember if we say a character buffer | |
229 smaller than our cutoff threshold. Used for -Wstack-protector. */ | |
230 static bool has_short_buffer; | |
231 | |
232 /* Discover the byte alignment to use for DECL. Ignore alignment | |
233 we can't do with expected alignment of the stack boundary. */ | |
234 | |
235 static unsigned int | |
236 get_decl_align_unit (tree decl) | |
237 { | |
238 unsigned int align; | |
239 | |
240 align = LOCAL_DECL_ALIGNMENT (decl); | |
241 | |
242 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | |
243 align = MAX_SUPPORTED_STACK_ALIGNMENT; | |
244 | |
245 if (SUPPORTS_STACK_ALIGNMENT) | |
246 { | |
247 if (crtl->stack_alignment_estimated < align) | |
248 { | |
249 gcc_assert(!crtl->stack_realign_processed); | |
250 crtl->stack_alignment_estimated = align; | |
251 } | |
252 } | |
253 | |
254 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted. | |
255 So here we only make sure stack_alignment_needed >= align. */ | |
256 if (crtl->stack_alignment_needed < align) | |
257 crtl->stack_alignment_needed = align; | |
258 if (crtl->max_used_stack_slot_alignment < align) | |
259 crtl->max_used_stack_slot_alignment = align; | |
260 | |
261 return align / BITS_PER_UNIT; | |
262 } | |
263 | |
264 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. | |
265 Return the frame offset. */ | |
266 | |
267 static HOST_WIDE_INT | |
268 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align) | |
269 { | |
270 HOST_WIDE_INT offset, new_frame_offset; | |
271 | |
272 new_frame_offset = frame_offset; | |
273 if (FRAME_GROWS_DOWNWARD) | |
274 { | |
275 new_frame_offset -= size + frame_phase; | |
276 new_frame_offset &= -align; | |
277 new_frame_offset += frame_phase; | |
278 offset = new_frame_offset; | |
279 } | |
280 else | |
281 { | |
282 new_frame_offset -= frame_phase; | |
283 new_frame_offset += align - 1; | |
284 new_frame_offset &= -align; | |
285 new_frame_offset += frame_phase; | |
286 offset = new_frame_offset; | |
287 new_frame_offset += size; | |
288 } | |
289 frame_offset = new_frame_offset; | |
290 | |
291 if (frame_offset_overflow (frame_offset, cfun->decl)) | |
292 frame_offset = offset = 0; | |
293 | |
294 return offset; | |
295 } | |
296 | |
297 /* Accumulate DECL into STACK_VARS. */ | |
298 | |
299 static void | |
300 add_stack_var (tree decl) | |
301 { | |
302 if (stack_vars_num >= stack_vars_alloc) | |
303 { | |
304 if (stack_vars_alloc) | |
305 stack_vars_alloc = stack_vars_alloc * 3 / 2; | |
306 else | |
307 stack_vars_alloc = 32; | |
308 stack_vars | |
309 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); | |
310 } | |
311 stack_vars[stack_vars_num].decl = decl; | |
312 stack_vars[stack_vars_num].offset = 0; | |
313 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1); | |
314 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl)); | |
315 | |
316 /* All variables are initially in their own partition. */ | |
317 stack_vars[stack_vars_num].representative = stack_vars_num; | |
318 stack_vars[stack_vars_num].next = EOC; | |
319 | |
320 /* All variables initially conflict with no other. */ | |
321 stack_vars[stack_vars_num].conflicts = NULL; | |
322 | |
323 /* Ensure that this decl doesn't get put onto the list twice. */ | |
324 set_rtl (decl, pc_rtx); | |
325 | |
326 stack_vars_num++; | |
327 } | |
328 | |
329 /* Make the decls associated with luid's X and Y conflict. */ | |
330 | |
331 static void | |
332 add_stack_var_conflict (size_t x, size_t y) | |
333 { | |
334 struct stack_var *a = &stack_vars[x]; | |
335 struct stack_var *b = &stack_vars[y]; | |
336 if (!a->conflicts) | |
337 a->conflicts = BITMAP_ALLOC (NULL); | |
338 if (!b->conflicts) | |
339 b->conflicts = BITMAP_ALLOC (NULL); | |
340 bitmap_set_bit (a->conflicts, y); | |
341 bitmap_set_bit (b->conflicts, x); | |
342 } | |
343 | |
344 /* Check whether the decls associated with luid's X and Y conflict. */ | |
345 | |
346 static bool | |
347 stack_var_conflict_p (size_t x, size_t y) | |
348 { | |
349 struct stack_var *a = &stack_vars[x]; | |
350 struct stack_var *b = &stack_vars[y]; | |
351 if (!a->conflicts || !b->conflicts) | |
352 return false; | |
353 return bitmap_bit_p (a->conflicts, y); | |
354 } | |
355 | |
356 /* Returns true if TYPE is or contains a union type. */ | |
357 | |
358 static bool | |
359 aggregate_contains_union_type (tree type) | |
360 { | |
361 tree field; | |
362 | |
363 if (TREE_CODE (type) == UNION_TYPE | |
364 || TREE_CODE (type) == QUAL_UNION_TYPE) | |
365 return true; | |
366 if (TREE_CODE (type) == ARRAY_TYPE) | |
367 return aggregate_contains_union_type (TREE_TYPE (type)); | |
368 if (TREE_CODE (type) != RECORD_TYPE) | |
369 return false; | |
370 | |
371 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) | |
372 if (TREE_CODE (field) == FIELD_DECL) | |
373 if (aggregate_contains_union_type (TREE_TYPE (field))) | |
374 return true; | |
375 | |
376 return false; | |
377 } | |
378 | |
379 /* A subroutine of expand_used_vars. If two variables X and Y have alias | |
380 sets that do not conflict, then do add a conflict for these variables | |
381 in the interference graph. We also need to make sure to add conflicts | |
382 for union containing structures. Else RTL alias analysis comes along | |
383 and due to type based aliasing rules decides that for two overlapping | |
384 union temporaries { short s; int i; } accesses to the same mem through | |
385 different types may not alias and happily reorders stores across | |
386 life-time boundaries of the temporaries (See PR25654). | |
387 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */ | |
388 | |
389 static void | |
390 add_alias_set_conflicts (void) | |
391 { | |
392 size_t i, j, n = stack_vars_num; | |
393 | |
394 for (i = 0; i < n; ++i) | |
395 { | |
396 tree type_i = TREE_TYPE (stack_vars[i].decl); | |
397 bool aggr_i = AGGREGATE_TYPE_P (type_i); | |
398 bool contains_union; | |
399 | |
400 contains_union = aggregate_contains_union_type (type_i); | |
401 for (j = 0; j < i; ++j) | |
402 { | |
403 tree type_j = TREE_TYPE (stack_vars[j].decl); | |
404 bool aggr_j = AGGREGATE_TYPE_P (type_j); | |
405 if (aggr_i != aggr_j | |
406 /* Either the objects conflict by means of type based | |
407 aliasing rules, or we need to add a conflict. */ | |
408 || !objects_must_conflict_p (type_i, type_j) | |
409 /* In case the types do not conflict ensure that access | |
410 to elements will conflict. In case of unions we have | |
411 to be careful as type based aliasing rules may say | |
412 access to the same memory does not conflict. So play | |
413 safe and add a conflict in this case. */ | |
414 || contains_union) | |
415 add_stack_var_conflict (i, j); | |
416 } | |
417 } | |
418 } | |
419 | |
420 /* A subroutine of partition_stack_vars. A comparison function for qsort, | |
421 sorting an array of indices by the size and type of the object. */ | |
422 | |
423 static int | |
424 stack_var_size_cmp (const void *a, const void *b) | |
425 { | |
426 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size; | |
427 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size; | |
428 tree decla, declb; | |
429 unsigned int uida, uidb; | |
430 | |
431 if (sa < sb) | |
432 return -1; | |
433 if (sa > sb) | |
434 return 1; | |
435 decla = stack_vars[*(const size_t *)a].decl; | |
436 declb = stack_vars[*(const size_t *)b].decl; | |
437 /* For stack variables of the same size use and id of the decls | |
438 to make the sort stable. Two SSA names are compared by their | |
439 version, SSA names come before non-SSA names, and two normal | |
440 decls are compared by their DECL_UID. */ | |
441 if (TREE_CODE (decla) == SSA_NAME) | |
442 { | |
443 if (TREE_CODE (declb) == SSA_NAME) | |
444 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb); | |
445 else | |
446 return -1; | |
447 } | |
448 else if (TREE_CODE (declb) == SSA_NAME) | |
449 return 1; | |
450 else | |
451 uida = DECL_UID (decla), uidb = DECL_UID (declb); | |
452 if (uida < uidb) | |
453 return -1; | |
454 if (uida > uidb) | |
455 return 1; | |
456 return 0; | |
457 } | |
458 | |
459 | |
460 /* If the points-to solution *PI points to variables that are in a partition | |
461 together with other variables add all partition members to the pointed-to | |
462 variables bitmap. */ | |
463 | |
464 static void | |
465 add_partitioned_vars_to_ptset (struct pt_solution *pt, | |
466 struct pointer_map_t *decls_to_partitions, | |
467 struct pointer_set_t *visited, bitmap temp) | |
468 { | |
469 bitmap_iterator bi; | |
470 unsigned i; | |
471 bitmap *part; | |
472 | |
473 if (pt->anything | |
474 || pt->vars == NULL | |
475 /* The pointed-to vars bitmap is shared, it is enough to | |
476 visit it once. */ | |
477 || pointer_set_insert(visited, pt->vars)) | |
478 return; | |
479 | |
480 bitmap_clear (temp); | |
481 | |
482 /* By using a temporary bitmap to store all members of the partitions | |
483 we have to add we make sure to visit each of the partitions only | |
484 once. */ | |
485 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi) | |
486 if ((!temp | |
487 || !bitmap_bit_p (temp, i)) | |
488 && (part = (bitmap *) pointer_map_contains (decls_to_partitions, | |
489 (void *)(size_t) i))) | |
490 bitmap_ior_into (temp, *part); | |
491 if (!bitmap_empty_p (temp)) | |
492 bitmap_ior_into (pt->vars, temp); | |
493 } | |
494 | |
495 /* Update points-to sets based on partition info, so we can use them on RTL. | |
496 The bitmaps representing stack partitions will be saved until expand, | |
497 where partitioned decls used as bases in memory expressions will be | |
498 rewritten. */ | |
499 | |
500 static void | |
501 update_alias_info_with_stack_vars (void) | |
502 { | |
503 struct pointer_map_t *decls_to_partitions = NULL; | |
504 size_t i, j; | |
505 tree var = NULL_TREE; | |
506 | |
507 for (i = 0; i < stack_vars_num; i++) | |
508 { | |
509 bitmap part = NULL; | |
510 tree name; | |
511 struct ptr_info_def *pi; | |
512 | |
513 /* Not interested in partitions with single variable. */ | |
514 if (stack_vars[i].representative != i | |
515 || stack_vars[i].next == EOC) | |
516 continue; | |
517 | |
518 if (!decls_to_partitions) | |
519 { | |
520 decls_to_partitions = pointer_map_create (); | |
521 cfun->gimple_df->decls_to_pointers = pointer_map_create (); | |
522 } | |
523 | |
524 /* Create an SSA_NAME that points to the partition for use | |
525 as base during alias-oracle queries on RTL for bases that | |
526 have been partitioned. */ | |
527 if (var == NULL_TREE) | |
528 var = create_tmp_var (ptr_type_node, NULL); | |
529 name = make_ssa_name (var, NULL); | |
530 | |
531 /* Create bitmaps representing partitions. They will be used for | |
532 points-to sets later, so use GGC alloc. */ | |
533 part = BITMAP_GGC_ALLOC (); | |
534 for (j = i; j != EOC; j = stack_vars[j].next) | |
535 { | |
536 tree decl = stack_vars[j].decl; | |
537 unsigned int uid = DECL_UID (decl); | |
538 /* We should never end up partitioning SSA names (though they | |
539 may end up on the stack). Neither should we allocate stack | |
540 space to something that is unused and thus unreferenced. */ | |
541 gcc_assert (DECL_P (decl) | |
542 && referenced_var_lookup (uid)); | |
543 bitmap_set_bit (part, uid); | |
544 *((bitmap *) pointer_map_insert (decls_to_partitions, | |
545 (void *)(size_t) uid)) = part; | |
546 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers, | |
547 decl)) = name; | |
548 } | |
549 | |
550 /* Make the SSA name point to all partition members. */ | |
551 pi = get_ptr_info (name); | |
552 pt_solution_set (&pi->pt, part); | |
553 } | |
554 | |
555 /* Make all points-to sets that contain one member of a partition | |
556 contain all members of the partition. */ | |
557 if (decls_to_partitions) | |
558 { | |
559 unsigned i; | |
560 struct pointer_set_t *visited = pointer_set_create (); | |
561 bitmap temp = BITMAP_ALLOC (NULL); | |
562 | |
563 for (i = 1; i < num_ssa_names; i++) | |
564 { | |
565 tree name = ssa_name (i); | |
566 struct ptr_info_def *pi; | |
567 | |
568 if (name | |
569 && POINTER_TYPE_P (TREE_TYPE (name)) | |
570 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL)) | |
571 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, | |
572 visited, temp); | |
573 } | |
574 | |
575 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, | |
576 decls_to_partitions, visited, temp); | |
577 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused, | |
578 decls_to_partitions, visited, temp); | |
579 | |
580 pointer_set_destroy (visited); | |
581 pointer_map_destroy (decls_to_partitions); | |
582 BITMAP_FREE (temp); | |
583 } | |
584 } | |
585 | |
586 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND | |
587 partitioning algorithm. Partitions A and B are known to be non-conflicting. | |
588 Merge them into a single partition A. | |
589 | |
590 At the same time, add OFFSET to all variables in partition B. At the end | |
591 of the partitioning process we've have a nice block easy to lay out within | |
592 the stack frame. */ | |
593 | |
594 static void | |
595 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset) | |
596 { | |
597 size_t i, last; | |
598 struct stack_var *vb = &stack_vars[b]; | |
599 bitmap_iterator bi; | |
600 unsigned u; | |
601 | |
602 /* Update each element of partition B with the given offset, | |
603 and merge them into partition A. */ | |
604 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next) | |
605 { | |
606 stack_vars[i].offset += offset; | |
607 stack_vars[i].representative = a; | |
608 } | |
609 stack_vars[last].next = stack_vars[a].next; | |
610 stack_vars[a].next = b; | |
611 | |
612 /* Update the required alignment of partition A to account for B. */ | |
613 if (stack_vars[a].alignb < stack_vars[b].alignb) | |
614 stack_vars[a].alignb = stack_vars[b].alignb; | |
615 | |
616 /* Update the interference graph and merge the conflicts. */ | |
617 if (vb->conflicts) | |
618 { | |
619 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi) | |
620 add_stack_var_conflict (a, stack_vars[u].representative); | |
621 BITMAP_FREE (vb->conflicts); | |
622 } | |
623 } | |
624 | |
625 /* A subroutine of expand_used_vars. Binpack the variables into | |
626 partitions constrained by the interference graph. The overall | |
627 algorithm used is as follows: | |
628 | |
629 Sort the objects by size. | |
630 For each object A { | |
631 S = size(A) | |
632 O = 0 | |
633 loop { | |
634 Look for the largest non-conflicting object B with size <= S. | |
635 UNION (A, B) | |
636 offset(B) = O | |
637 O += size(B) | |
638 S -= size(B) | |
639 } | |
640 } | |
641 */ | |
642 | |
643 static void | |
644 partition_stack_vars (void) | |
645 { | |
646 size_t si, sj, n = stack_vars_num; | |
647 | |
648 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | |
649 for (si = 0; si < n; ++si) | |
650 stack_vars_sorted[si] = si; | |
651 | |
652 if (n == 1) | |
653 return; | |
654 | |
655 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp); | |
656 | |
657 for (si = 0; si < n; ++si) | |
658 { | |
659 size_t i = stack_vars_sorted[si]; | |
660 HOST_WIDE_INT isize = stack_vars[i].size; | |
661 HOST_WIDE_INT offset = 0; | |
662 | |
663 for (sj = si; sj-- > 0; ) | |
664 { | |
665 size_t j = stack_vars_sorted[sj]; | |
666 HOST_WIDE_INT jsize = stack_vars[j].size; | |
667 unsigned int jalign = stack_vars[j].alignb; | |
668 | |
669 /* Ignore objects that aren't partition representatives. */ | |
670 if (stack_vars[j].representative != j) | |
671 continue; | |
672 | |
673 /* Ignore objects too large for the remaining space. */ | |
674 if (isize < jsize) | |
675 continue; | |
676 | |
677 /* Ignore conflicting objects. */ | |
678 if (stack_var_conflict_p (i, j)) | |
679 continue; | |
680 | |
681 /* Refine the remaining space check to include alignment. */ | |
682 if (offset & (jalign - 1)) | |
683 { | |
684 HOST_WIDE_INT toff = offset; | |
685 toff += jalign - 1; | |
686 toff &= -(HOST_WIDE_INT)jalign; | |
687 if (isize - (toff - offset) < jsize) | |
688 continue; | |
689 | |
690 isize -= toff - offset; | |
691 offset = toff; | |
692 } | |
693 | |
694 /* UNION the objects, placing J at OFFSET. */ | |
695 union_stack_vars (i, j, offset); | |
696 | |
697 isize -= jsize; | |
698 if (isize == 0) | |
699 break; | |
700 } | |
701 } | |
702 | |
703 if (optimize) | |
704 update_alias_info_with_stack_vars (); | |
705 } | |
706 | |
707 /* A debugging aid for expand_used_vars. Dump the generated partitions. */ | |
708 | |
709 static void | |
710 dump_stack_var_partition (void) | |
711 { | |
712 size_t si, i, j, n = stack_vars_num; | |
713 | |
714 for (si = 0; si < n; ++si) | |
715 { | |
716 i = stack_vars_sorted[si]; | |
717 | |
718 /* Skip variables that aren't partition representatives, for now. */ | |
719 if (stack_vars[i].representative != i) | |
720 continue; | |
721 | |
722 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC | |
723 " align %u\n", (unsigned long) i, stack_vars[i].size, | |
724 stack_vars[i].alignb); | |
725 | |
726 for (j = i; j != EOC; j = stack_vars[j].next) | |
727 { | |
728 fputc ('\t', dump_file); | |
729 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); | |
730 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", | |
731 stack_vars[j].offset); | |
732 } | |
733 } | |
734 } | |
735 | |
736 /* Assign rtl to DECL at frame offset OFFSET. */ | |
737 | |
738 static void | |
739 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset) | |
740 { | |
741 /* Alignment is unsigned. */ | |
742 unsigned HOST_WIDE_INT align; | |
743 rtx x; | |
744 | |
745 /* If this fails, we've overflowed the stack frame. Error nicely? */ | |
746 gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); | |
747 | |
748 x = plus_constant (virtual_stack_vars_rtx, offset); | |
749 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x); | |
750 | |
751 if (TREE_CODE (decl) != SSA_NAME) | |
752 { | |
753 /* Set alignment we actually gave this decl if it isn't an SSA name. | |
754 If it is we generate stack slots only accidentally so it isn't as | |
755 important, we'll simply use the alignment that is already set. */ | |
756 offset -= frame_phase; | |
757 align = offset & -offset; | |
758 align *= BITS_PER_UNIT; | |
759 if (align == 0) | |
760 align = STACK_BOUNDARY; | |
761 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | |
762 align = MAX_SUPPORTED_STACK_ALIGNMENT; | |
763 | |
764 DECL_ALIGN (decl) = align; | |
765 DECL_USER_ALIGN (decl) = 0; | |
766 } | |
767 | |
768 set_mem_attributes (x, SSAVAR (decl), true); | |
769 set_rtl (decl, x); | |
770 } | |
771 | |
772 /* A subroutine of expand_used_vars. Give each partition representative | |
773 a unique location within the stack frame. Update each partition member | |
774 with that location. */ | |
775 | |
776 static void | |
777 expand_stack_vars (bool (*pred) (tree)) | |
778 { | |
779 size_t si, i, j, n = stack_vars_num; | |
780 | |
781 for (si = 0; si < n; ++si) | |
782 { | |
783 HOST_WIDE_INT offset; | |
784 | |
785 i = stack_vars_sorted[si]; | |
786 | |
787 /* Skip variables that aren't partition representatives, for now. */ | |
788 if (stack_vars[i].representative != i) | |
789 continue; | |
790 | |
791 /* Skip variables that have already had rtl assigned. See also | |
792 add_stack_var where we perpetrate this pc_rtx hack. */ | |
793 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME | |
794 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)] | |
795 : DECL_RTL (stack_vars[i].decl)) != pc_rtx) | |
796 continue; | |
797 | |
798 /* Check the predicate to see whether this variable should be | |
799 allocated in this pass. */ | |
800 if (pred && !pred (stack_vars[i].decl)) | |
801 continue; | |
802 | |
803 offset = alloc_stack_frame_space (stack_vars[i].size, | |
804 stack_vars[i].alignb); | |
805 | |
806 /* Create rtl for each variable based on their location within the | |
807 partition. */ | |
808 for (j = i; j != EOC; j = stack_vars[j].next) | |
809 { | |
810 gcc_assert (stack_vars[j].offset <= stack_vars[i].size); | |
811 expand_one_stack_var_at (stack_vars[j].decl, | |
812 stack_vars[j].offset + offset); | |
813 } | |
814 } | |
815 } | |
816 | |
817 /* Take into account all sizes of partitions and reset DECL_RTLs. */ | |
818 static HOST_WIDE_INT | |
819 account_stack_vars (void) | |
820 { | |
821 size_t si, j, i, n = stack_vars_num; | |
822 HOST_WIDE_INT size = 0; | |
823 | |
824 for (si = 0; si < n; ++si) | |
825 { | |
826 i = stack_vars_sorted[si]; | |
827 | |
828 /* Skip variables that aren't partition representatives, for now. */ | |
829 if (stack_vars[i].representative != i) | |
830 continue; | |
831 | |
832 size += stack_vars[i].size; | |
833 for (j = i; j != EOC; j = stack_vars[j].next) | |
834 set_rtl (stack_vars[j].decl, NULL); | |
835 } | |
836 return size; | |
837 } | |
838 | |
839 /* A subroutine of expand_one_var. Called to immediately assign rtl | |
840 to a variable to be allocated in the stack frame. */ | |
841 | |
842 static void | |
843 expand_one_stack_var (tree var) | |
844 { | |
845 HOST_WIDE_INT size, offset, align; | |
846 | |
847 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1); | |
848 align = get_decl_align_unit (SSAVAR (var)); | |
849 offset = alloc_stack_frame_space (size, align); | |
850 | |
851 expand_one_stack_var_at (var, offset); | |
852 } | |
853 | |
854 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL | |
855 that will reside in a hard register. */ | |
856 | |
857 static void | |
858 expand_one_hard_reg_var (tree var) | |
859 { | |
860 rest_of_decl_compilation (var, 0, 0); | |
861 } | |
862 | |
863 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL | |
864 that will reside in a pseudo register. */ | |
865 | |
866 static void | |
867 expand_one_register_var (tree var) | |
868 { | |
869 tree decl = SSAVAR (var); | |
870 tree type = TREE_TYPE (decl); | |
871 enum machine_mode reg_mode = promote_decl_mode (decl, NULL); | |
872 rtx x = gen_reg_rtx (reg_mode); | |
873 | |
874 set_rtl (var, x); | |
875 | |
876 /* Note if the object is a user variable. */ | |
877 if (!DECL_ARTIFICIAL (decl)) | |
878 mark_user_reg (x); | |
879 | |
880 if (POINTER_TYPE_P (type)) | |
881 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type))); | |
882 } | |
883 | |
884 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that | |
885 has some associated error, e.g. its type is error-mark. We just need | |
886 to pick something that won't crash the rest of the compiler. */ | |
887 | |
888 static void | |
889 expand_one_error_var (tree var) | |
890 { | |
891 enum machine_mode mode = DECL_MODE (var); | |
892 rtx x; | |
893 | |
894 if (mode == BLKmode) | |
895 x = gen_rtx_MEM (BLKmode, const0_rtx); | |
896 else if (mode == VOIDmode) | |
897 x = const0_rtx; | |
898 else | |
899 x = gen_reg_rtx (mode); | |
900 | |
901 SET_DECL_RTL (var, x); | |
902 } | |
903 | |
904 /* A subroutine of expand_one_var. VAR is a variable that will be | |
905 allocated to the local stack frame. Return true if we wish to | |
906 add VAR to STACK_VARS so that it will be coalesced with other | |
907 variables. Return false to allocate VAR immediately. | |
908 | |
909 This function is used to reduce the number of variables considered | |
910 for coalescing, which reduces the size of the quadratic problem. */ | |
911 | |
912 static bool | |
913 defer_stack_allocation (tree var, bool toplevel) | |
914 { | |
915 /* If stack protection is enabled, *all* stack variables must be deferred, | |
916 so that we can re-order the strings to the top of the frame. */ | |
917 if (flag_stack_protect) | |
918 return true; | |
919 | |
920 /* Variables in the outermost scope automatically conflict with | |
921 every other variable. The only reason to want to defer them | |
922 at all is that, after sorting, we can more efficiently pack | |
923 small variables in the stack frame. Continue to defer at -O2. */ | |
924 if (toplevel && optimize < 2) | |
925 return false; | |
926 | |
927 /* Without optimization, *most* variables are allocated from the | |
928 stack, which makes the quadratic problem large exactly when we | |
929 want compilation to proceed as quickly as possible. On the | |
930 other hand, we don't want the function's stack frame size to | |
931 get completely out of hand. So we avoid adding scalars and | |
932 "small" aggregates to the list at all. */ | |
933 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32) | |
934 return false; | |
935 | |
936 return true; | |
937 } | |
938 | |
939 /* A subroutine of expand_used_vars. Expand one variable according to | |
940 its flavor. Variables to be placed on the stack are not actually | |
941 expanded yet, merely recorded. | |
942 When REALLY_EXPAND is false, only add stack values to be allocated. | |
943 Return stack usage this variable is supposed to take. | |
944 */ | |
945 | |
946 static HOST_WIDE_INT | |
947 expand_one_var (tree var, bool toplevel, bool really_expand) | |
948 { | |
949 tree origvar = var; | |
950 var = SSAVAR (var); | |
951 | |
952 if (SUPPORTS_STACK_ALIGNMENT | |
953 && TREE_TYPE (var) != error_mark_node | |
954 && TREE_CODE (var) == VAR_DECL) | |
955 { | |
956 unsigned int align; | |
957 | |
958 /* Because we don't know if VAR will be in register or on stack, | |
959 we conservatively assume it will be on stack even if VAR is | |
960 eventually put into register after RA pass. For non-automatic | |
961 variables, which won't be on stack, we collect alignment of | |
962 type and ignore user specified alignment. */ | |
963 if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | |
964 align = MINIMUM_ALIGNMENT (TREE_TYPE (var), | |
965 TYPE_MODE (TREE_TYPE (var)), | |
966 TYPE_ALIGN (TREE_TYPE (var))); | |
967 else | |
968 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); | |
969 | |
970 if (crtl->stack_alignment_estimated < align) | |
971 { | |
972 /* stack_alignment_estimated shouldn't change after stack | |
973 realign decision made */ | |
974 gcc_assert(!crtl->stack_realign_processed); | |
975 crtl->stack_alignment_estimated = align; | |
976 } | |
977 } | |
978 | |
979 if (TREE_CODE (origvar) == SSA_NAME) | |
980 { | |
981 gcc_assert (TREE_CODE (var) != VAR_DECL | |
982 || (!DECL_EXTERNAL (var) | |
983 && !DECL_HAS_VALUE_EXPR_P (var) | |
984 && !TREE_STATIC (var) | |
985 && TREE_TYPE (var) != error_mark_node | |
986 && !DECL_HARD_REGISTER (var) | |
987 && really_expand)); | |
988 } | |
989 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME) | |
990 ; | |
991 else if (DECL_EXTERNAL (var)) | |
992 ; | |
993 else if (DECL_HAS_VALUE_EXPR_P (var)) | |
994 ; | |
995 else if (TREE_STATIC (var)) | |
996 ; | |
997 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var)) | |
998 ; | |
999 else if (TREE_TYPE (var) == error_mark_node) | |
1000 { | |
1001 if (really_expand) | |
1002 expand_one_error_var (var); | |
1003 } | |
1004 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var)) | |
1005 { | |
1006 if (really_expand) | |
1007 expand_one_hard_reg_var (var); | |
1008 } | |
1009 else if (use_register_for_decl (var)) | |
1010 { | |
1011 if (really_expand) | |
1012 expand_one_register_var (origvar); | |
1013 } | |
1014 else if (defer_stack_allocation (var, toplevel)) | |
1015 add_stack_var (origvar); | |
1016 else | |
1017 { | |
1018 if (really_expand) | |
1019 expand_one_stack_var (origvar); | |
1020 return tree_low_cst (DECL_SIZE_UNIT (var), 1); | |
1021 } | |
1022 return 0; | |
1023 } | |
1024 | |
1025 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | |
1026 expanding variables. Those variables that can be put into registers | |
1027 are allocated pseudos; those that can't are put on the stack. | |
1028 | |
1029 TOPLEVEL is true if this is the outermost BLOCK. */ | |
1030 | |
1031 static void | |
1032 expand_used_vars_for_block (tree block, bool toplevel) | |
1033 { | |
1034 size_t i, j, old_sv_num, this_sv_num, new_sv_num; | |
1035 tree t; | |
1036 | |
1037 old_sv_num = toplevel ? 0 : stack_vars_num; | |
1038 | |
1039 /* Expand all variables at this level. */ | |
1040 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
1041 if (TREE_USED (t)) | |
1042 expand_one_var (t, toplevel, true); | |
1043 | |
1044 this_sv_num = stack_vars_num; | |
1045 | |
1046 /* Expand all variables at containing levels. */ | |
1047 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1048 expand_used_vars_for_block (t, false); | |
1049 | |
1050 /* Since we do not track exact variable lifetimes (which is not even | |
1051 possible for variables whose address escapes), we mirror the block | |
1052 tree in the interference graph. Here we cause all variables at this | |
1053 level, and all sublevels, to conflict. */ | |
1054 if (old_sv_num < this_sv_num) | |
1055 { | |
1056 new_sv_num = stack_vars_num; | |
1057 | |
1058 for (i = old_sv_num; i < new_sv_num; ++i) | |
1059 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;) | |
1060 add_stack_var_conflict (i, j); | |
1061 } | |
1062 } | |
1063 | |
1064 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | |
1065 and clear TREE_USED on all local variables. */ | |
1066 | |
1067 static void | |
1068 clear_tree_used (tree block) | |
1069 { | |
1070 tree t; | |
1071 | |
1072 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
1073 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */ | |
1074 TREE_USED (t) = 0; | |
1075 | |
1076 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1077 clear_tree_used (t); | |
1078 } | |
1079 | |
1080 /* Examine TYPE and determine a bit mask of the following features. */ | |
1081 | |
1082 #define SPCT_HAS_LARGE_CHAR_ARRAY 1 | |
1083 #define SPCT_HAS_SMALL_CHAR_ARRAY 2 | |
1084 #define SPCT_HAS_ARRAY 4 | |
1085 #define SPCT_HAS_AGGREGATE 8 | |
1086 | |
1087 static unsigned int | |
1088 stack_protect_classify_type (tree type) | |
1089 { | |
1090 unsigned int ret = 0; | |
1091 tree t; | |
1092 | |
1093 switch (TREE_CODE (type)) | |
1094 { | |
1095 case ARRAY_TYPE: | |
1096 t = TYPE_MAIN_VARIANT (TREE_TYPE (type)); | |
1097 if (t == char_type_node | |
1098 || t == signed_char_type_node | |
1099 || t == unsigned_char_type_node) | |
1100 { | |
1101 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); | |
1102 unsigned HOST_WIDE_INT len; | |
1103 | |
1104 if (!TYPE_SIZE_UNIT (type) | |
1105 || !host_integerp (TYPE_SIZE_UNIT (type), 1)) | |
1106 len = max; | |
1107 else | |
1108 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); | |
1109 | |
1110 if (len < max) | |
1111 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1112 else | |
1113 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1114 } | |
1115 else | |
1116 ret = SPCT_HAS_ARRAY; | |
1117 break; | |
1118 | |
1119 case UNION_TYPE: | |
1120 case QUAL_UNION_TYPE: | |
1121 case RECORD_TYPE: | |
1122 ret = SPCT_HAS_AGGREGATE; | |
1123 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t)) | |
1124 if (TREE_CODE (t) == FIELD_DECL) | |
1125 ret |= stack_protect_classify_type (TREE_TYPE (t)); | |
1126 break; | |
1127 | |
1128 default: | |
1129 break; | |
1130 } | |
1131 | |
1132 return ret; | |
1133 } | |
1134 | |
1135 /* Return nonzero if DECL should be segregated into the "vulnerable" upper | |
1136 part of the local stack frame. Remember if we ever return nonzero for | |
1137 any variable in this function. The return value is the phase number in | |
1138 which the variable should be allocated. */ | |
1139 | |
1140 static int | |
1141 stack_protect_decl_phase (tree decl) | |
1142 { | |
1143 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl)); | |
1144 int ret = 0; | |
1145 | |
1146 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY) | |
1147 has_short_buffer = true; | |
1148 | |
1149 if (flag_stack_protect == 2) | |
1150 { | |
1151 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) | |
1152 && !(bits & SPCT_HAS_AGGREGATE)) | |
1153 ret = 1; | |
1154 else if (bits & SPCT_HAS_ARRAY) | |
1155 ret = 2; | |
1156 } | |
1157 else | |
1158 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0; | |
1159 | |
1160 if (ret) | |
1161 has_protected_decls = true; | |
1162 | |
1163 return ret; | |
1164 } | |
1165 | |
1166 /* Two helper routines that check for phase 1 and phase 2. These are used | |
1167 as callbacks for expand_stack_vars. */ | |
1168 | |
1169 static bool | |
1170 stack_protect_decl_phase_1 (tree decl) | |
1171 { | |
1172 return stack_protect_decl_phase (decl) == 1; | |
1173 } | |
1174 | |
1175 static bool | |
1176 stack_protect_decl_phase_2 (tree decl) | |
1177 { | |
1178 return stack_protect_decl_phase (decl) == 2; | |
1179 } | |
1180 | |
1181 /* Ensure that variables in different stack protection phases conflict | |
1182 so that they are not merged and share the same stack slot. */ | |
1183 | |
1184 static void | |
1185 add_stack_protection_conflicts (void) | |
1186 { | |
1187 size_t i, j, n = stack_vars_num; | |
1188 unsigned char *phase; | |
1189 | |
1190 phase = XNEWVEC (unsigned char, n); | |
1191 for (i = 0; i < n; ++i) | |
1192 phase[i] = stack_protect_decl_phase (stack_vars[i].decl); | |
1193 | |
1194 for (i = 0; i < n; ++i) | |
1195 { | |
1196 unsigned char ph_i = phase[i]; | |
1197 for (j = 0; j < i; ++j) | |
1198 if (ph_i != phase[j]) | |
1199 add_stack_var_conflict (i, j); | |
1200 } | |
1201 | |
1202 XDELETEVEC (phase); | |
1203 } | |
1204 | |
1205 /* Create a decl for the guard at the top of the stack frame. */ | |
1206 | |
1207 static void | |
1208 create_stack_guard (void) | |
1209 { | |
1210 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl), | |
1211 VAR_DECL, NULL, ptr_type_node); | |
1212 TREE_THIS_VOLATILE (guard) = 1; | |
1213 TREE_USED (guard) = 1; | |
1214 expand_one_stack_var (guard); | |
1215 crtl->stack_protect_guard = guard; | |
1216 } | |
1217 | |
1218 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | |
1219 expanding variables. Those variables that can be put into registers | |
1220 are allocated pseudos; those that can't are put on the stack. | |
1221 | |
1222 TOPLEVEL is true if this is the outermost BLOCK. */ | |
1223 | |
1224 static HOST_WIDE_INT | |
1225 account_used_vars_for_block (tree block, bool toplevel) | |
1226 { | |
1227 tree t; | |
1228 HOST_WIDE_INT size = 0; | |
1229 | |
1230 /* Expand all variables at this level. */ | |
1231 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
1232 if (TREE_USED (t)) | |
1233 size += expand_one_var (t, toplevel, false); | |
1234 | |
1235 /* Expand all variables at containing levels. */ | |
1236 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1237 size += account_used_vars_for_block (t, false); | |
1238 | |
1239 return size; | |
1240 } | |
1241 | |
1242 /* Prepare for expanding variables. */ | |
1243 static void | |
1244 init_vars_expansion (void) | |
1245 { | |
1246 tree t; | |
1247 /* Set TREE_USED on all variables in the local_decls. */ | |
1248 for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) | |
1249 TREE_USED (TREE_VALUE (t)) = 1; | |
1250 | |
1251 /* Clear TREE_USED on all variables associated with a block scope. */ | |
1252 clear_tree_used (DECL_INITIAL (current_function_decl)); | |
1253 | |
1254 /* Initialize local stack smashing state. */ | |
1255 has_protected_decls = false; | |
1256 has_short_buffer = false; | |
1257 } | |
1258 | |
1259 /* Free up stack variable graph data. */ | |
1260 static void | |
1261 fini_vars_expansion (void) | |
1262 { | |
1263 size_t i, n = stack_vars_num; | |
1264 for (i = 0; i < n; i++) | |
1265 BITMAP_FREE (stack_vars[i].conflicts); | |
1266 XDELETEVEC (stack_vars); | |
1267 XDELETEVEC (stack_vars_sorted); | |
1268 stack_vars = NULL; | |
1269 stack_vars_alloc = stack_vars_num = 0; | |
1270 } | |
1271 | |
1272 /* Make a fair guess for the size of the stack frame of the current | |
1273 function. This doesn't have to be exact, the result is only used | |
1274 in the inline heuristics. So we don't want to run the full stack | |
1275 var packing algorithm (which is quadratic in the number of stack | |
1276 vars). Instead, we calculate the total size of all stack vars. | |
1277 This turns out to be a pretty fair estimate -- packing of stack | |
1278 vars doesn't happen very often. */ | |
1279 | |
1280 HOST_WIDE_INT | |
1281 estimated_stack_frame_size (void) | |
1282 { | |
1283 HOST_WIDE_INT size = 0; | |
1284 size_t i; | |
1285 tree t, outer_block = DECL_INITIAL (current_function_decl); | |
1286 | |
1287 init_vars_expansion (); | |
1288 | |
1289 for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) | |
1290 { | |
1291 tree var = TREE_VALUE (t); | |
1292 | |
1293 if (TREE_USED (var)) | |
1294 size += expand_one_var (var, true, false); | |
1295 TREE_USED (var) = 1; | |
1296 } | |
1297 size += account_used_vars_for_block (outer_block, true); | |
1298 | |
1299 if (stack_vars_num > 0) | |
1300 { | |
1301 /* Fake sorting the stack vars for account_stack_vars (). */ | |
1302 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | |
1303 for (i = 0; i < stack_vars_num; ++i) | |
1304 stack_vars_sorted[i] = i; | |
1305 size += account_stack_vars (); | |
1306 fini_vars_expansion (); | |
1307 } | |
1308 | |
1309 return size; | |
1310 } | |
1311 | |
1312 /* Expand all variables used in the function. */ | |
1313 | |
1314 static void | |
1315 expand_used_vars (void) | |
1316 { | |
1317 tree t, next, outer_block = DECL_INITIAL (current_function_decl); | |
1318 unsigned i; | |
1319 | |
1320 /* Compute the phase of the stack frame for this function. */ | |
1321 { | |
1322 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
1323 int off = STARTING_FRAME_OFFSET % align; | |
1324 frame_phase = off ? align - off : 0; | |
1325 } | |
1326 | |
1327 init_vars_expansion (); | |
1328 | |
1329 for (i = 0; i < SA.map->num_partitions; i++) | |
1330 { | |
1331 tree var = partition_to_var (SA.map, i); | |
1332 | |
1333 gcc_assert (is_gimple_reg (var)); | |
1334 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL) | |
1335 expand_one_var (var, true, true); | |
1336 else | |
1337 { | |
1338 /* This is a PARM_DECL or RESULT_DECL. For those partitions that | |
1339 contain the default def (representing the parm or result itself) | |
1340 we don't do anything here. But those which don't contain the | |
1341 default def (representing a temporary based on the parm/result) | |
1342 we need to allocate space just like for normal VAR_DECLs. */ | |
1343 if (!bitmap_bit_p (SA.partition_has_default_def, i)) | |
1344 { | |
1345 expand_one_var (var, true, true); | |
1346 gcc_assert (SA.partition_to_pseudo[i]); | |
1347 } | |
1348 } | |
1349 } | |
1350 | |
1351 /* At this point all variables on the local_decls with TREE_USED | |
1352 set are not associated with any block scope. Lay them out. */ | |
1353 t = cfun->local_decls; | |
1354 cfun->local_decls = NULL_TREE; | |
1355 for (; t; t = next) | |
1356 { | |
1357 tree var = TREE_VALUE (t); | |
1358 bool expand_now = false; | |
1359 | |
1360 next = TREE_CHAIN (t); | |
1361 | |
1362 /* Expanded above already. */ | |
1363 if (is_gimple_reg (var)) | |
1364 { | |
1365 TREE_USED (var) = 0; | |
1366 ggc_free (t); | |
1367 continue; | |
1368 } | |
1369 /* We didn't set a block for static or extern because it's hard | |
1370 to tell the difference between a global variable (re)declared | |
1371 in a local scope, and one that's really declared there to | |
1372 begin with. And it doesn't really matter much, since we're | |
1373 not giving them stack space. Expand them now. */ | |
1374 else if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | |
1375 expand_now = true; | |
1376 | |
1377 /* If the variable is not associated with any block, then it | |
1378 was created by the optimizers, and could be live anywhere | |
1379 in the function. */ | |
1380 else if (TREE_USED (var)) | |
1381 expand_now = true; | |
1382 | |
1383 /* Finally, mark all variables on the list as used. We'll use | |
1384 this in a moment when we expand those associated with scopes. */ | |
1385 TREE_USED (var) = 1; | |
1386 | |
1387 if (expand_now) | |
1388 { | |
1389 expand_one_var (var, true, true); | |
1390 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var)) | |
1391 { | |
1392 rtx rtl = DECL_RTL_IF_SET (var); | |
1393 | |
1394 /* Keep artificial non-ignored vars in cfun->local_decls | |
1395 chain until instantiate_decls. */ | |
1396 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) | |
1397 { | |
1398 TREE_CHAIN (t) = cfun->local_decls; | |
1399 cfun->local_decls = t; | |
1400 continue; | |
1401 } | |
1402 } | |
1403 } | |
1404 | |
1405 ggc_free (t); | |
1406 } | |
1407 | |
1408 /* At this point, all variables within the block tree with TREE_USED | |
1409 set are actually used by the optimized function. Lay them out. */ | |
1410 expand_used_vars_for_block (outer_block, true); | |
1411 | |
1412 if (stack_vars_num > 0) | |
1413 { | |
1414 /* Due to the way alias sets work, no variables with non-conflicting | |
1415 alias sets may be assigned the same address. Add conflicts to | |
1416 reflect this. */ | |
1417 add_alias_set_conflicts (); | |
1418 | |
1419 /* If stack protection is enabled, we don't share space between | |
1420 vulnerable data and non-vulnerable data. */ | |
1421 if (flag_stack_protect) | |
1422 add_stack_protection_conflicts (); | |
1423 | |
1424 /* Now that we have collected all stack variables, and have computed a | |
1425 minimal interference graph, attempt to save some stack space. */ | |
1426 partition_stack_vars (); | |
1427 if (dump_file) | |
1428 dump_stack_var_partition (); | |
1429 } | |
1430 | |
1431 /* There are several conditions under which we should create a | |
1432 stack guard: protect-all, alloca used, protected decls present. */ | |
1433 if (flag_stack_protect == 2 | |
1434 || (flag_stack_protect | |
1435 && (cfun->calls_alloca || has_protected_decls))) | |
1436 create_stack_guard (); | |
1437 | |
1438 /* Assign rtl to each variable based on these partitions. */ | |
1439 if (stack_vars_num > 0) | |
1440 { | |
1441 /* Reorder decls to be protected by iterating over the variables | |
1442 array multiple times, and allocating out of each phase in turn. */ | |
1443 /* ??? We could probably integrate this into the qsort we did | |
1444 earlier, such that we naturally see these variables first, | |
1445 and thus naturally allocate things in the right order. */ | |
1446 if (has_protected_decls) | |
1447 { | |
1448 /* Phase 1 contains only character arrays. */ | |
1449 expand_stack_vars (stack_protect_decl_phase_1); | |
1450 | |
1451 /* Phase 2 contains other kinds of arrays. */ | |
1452 if (flag_stack_protect == 2) | |
1453 expand_stack_vars (stack_protect_decl_phase_2); | |
1454 } | |
1455 | |
1456 expand_stack_vars (NULL); | |
1457 | |
1458 fini_vars_expansion (); | |
1459 } | |
1460 | |
1461 /* If the target requires that FRAME_OFFSET be aligned, do it. */ | |
1462 if (STACK_ALIGNMENT_NEEDED) | |
1463 { | |
1464 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
1465 if (!FRAME_GROWS_DOWNWARD) | |
1466 frame_offset += align - 1; | |
1467 frame_offset &= -align; | |
1468 } | |
1469 } | |
1470 | |
1471 | |
1472 /* If we need to produce a detailed dump, print the tree representation | |
1473 for STMT to the dump file. SINCE is the last RTX after which the RTL | |
1474 generated for STMT should have been appended. */ | |
1475 | |
1476 static void | |
1477 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since) | |
1478 { | |
1479 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1480 { | |
1481 fprintf (dump_file, "\n;; "); | |
1482 print_gimple_stmt (dump_file, stmt, 0, | |
1483 TDF_SLIM | (dump_flags & TDF_LINENO)); | |
1484 fprintf (dump_file, "\n"); | |
1485 | |
1486 print_rtl (dump_file, since ? NEXT_INSN (since) : since); | |
1487 } | |
1488 } | |
1489 | |
1490 /* Maps the blocks that do not contain tree labels to rtx labels. */ | |
1491 | |
1492 static struct pointer_map_t *lab_rtx_for_bb; | |
1493 | |
1494 /* Returns the label_rtx expression for a label starting basic block BB. */ | |
1495 | |
1496 static rtx | |
1497 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED) | |
1498 { | |
1499 gimple_stmt_iterator gsi; | |
1500 tree lab; | |
1501 gimple lab_stmt; | |
1502 void **elt; | |
1503 | |
1504 if (bb->flags & BB_RTL) | |
1505 return block_label (bb); | |
1506 | |
1507 elt = pointer_map_contains (lab_rtx_for_bb, bb); | |
1508 if (elt) | |
1509 return (rtx) *elt; | |
1510 | |
1511 /* Find the tree label if it is present. */ | |
1512 | |
1513 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1514 { | |
1515 lab_stmt = gsi_stmt (gsi); | |
1516 if (gimple_code (lab_stmt) != GIMPLE_LABEL) | |
1517 break; | |
1518 | |
1519 lab = gimple_label_label (lab_stmt); | |
1520 if (DECL_NONLOCAL (lab)) | |
1521 break; | |
1522 | |
1523 return label_rtx (lab); | |
1524 } | |
1525 | |
1526 elt = pointer_map_insert (lab_rtx_for_bb, bb); | |
1527 *elt = gen_label_rtx (); | |
1528 return (rtx) *elt; | |
1529 } | |
1530 | |
1531 | |
1532 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge | |
1533 of a basic block where we just expanded the conditional at the end, | |
1534 possibly clean up the CFG and instruction sequence. LAST is the | |
1535 last instruction before the just emitted jump sequence. */ | |
1536 | |
1537 static void | |
1538 maybe_cleanup_end_of_block (edge e, rtx last) | |
1539 { | |
1540 /* Special case: when jumpif decides that the condition is | |
1541 trivial it emits an unconditional jump (and the necessary | |
1542 barrier). But we still have two edges, the fallthru one is | |
1543 wrong. purge_dead_edges would clean this up later. Unfortunately | |
1544 we have to insert insns (and split edges) before | |
1545 find_many_sub_basic_blocks and hence before purge_dead_edges. | |
1546 But splitting edges might create new blocks which depend on the | |
1547 fact that if there are two edges there's no barrier. So the | |
1548 barrier would get lost and verify_flow_info would ICE. Instead | |
1549 of auditing all edge splitters to care for the barrier (which | |
1550 normally isn't there in a cleaned CFG), fix it here. */ | |
1551 if (BARRIER_P (get_last_insn ())) | |
1552 { | |
1553 rtx insn; | |
1554 remove_edge (e); | |
1555 /* Now, we have a single successor block, if we have insns to | |
1556 insert on the remaining edge we potentially will insert | |
1557 it at the end of this block (if the dest block isn't feasible) | |
1558 in order to avoid splitting the edge. This insertion will take | |
1559 place in front of the last jump. But we might have emitted | |
1560 multiple jumps (conditional and one unconditional) to the | |
1561 same destination. Inserting in front of the last one then | |
1562 is a problem. See PR 40021. We fix this by deleting all | |
1563 jumps except the last unconditional one. */ | |
1564 insn = PREV_INSN (get_last_insn ()); | |
1565 /* Make sure we have an unconditional jump. Otherwise we're | |
1566 confused. */ | |
1567 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn)); | |
1568 for (insn = PREV_INSN (insn); insn != last;) | |
1569 { | |
1570 insn = PREV_INSN (insn); | |
1571 if (JUMP_P (NEXT_INSN (insn))) | |
1572 delete_insn (NEXT_INSN (insn)); | |
1573 } | |
1574 } | |
1575 } | |
1576 | |
1577 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND. | |
1578 Returns a new basic block if we've terminated the current basic | |
1579 block and created a new one. */ | |
1580 | |
1581 static basic_block | |
1582 expand_gimple_cond (basic_block bb, gimple stmt) | |
1583 { | |
1584 basic_block new_bb, dest; | |
1585 edge new_edge; | |
1586 edge true_edge; | |
1587 edge false_edge; | |
1588 rtx last2, last; | |
1589 enum tree_code code; | |
1590 tree op0, op1; | |
1591 | |
1592 code = gimple_cond_code (stmt); | |
1593 op0 = gimple_cond_lhs (stmt); | |
1594 op1 = gimple_cond_rhs (stmt); | |
1595 /* We're sometimes presented with such code: | |
1596 D.123_1 = x < y; | |
1597 if (D.123_1 != 0) | |
1598 ... | |
1599 This would expand to two comparisons which then later might | |
1600 be cleaned up by combine. But some pattern matchers like if-conversion | |
1601 work better when there's only one compare, so make up for this | |
1602 here as special exception if TER would have made the same change. */ | |
1603 if (gimple_cond_single_var_p (stmt) | |
1604 && SA.values | |
1605 && TREE_CODE (op0) == SSA_NAME | |
1606 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0))) | |
1607 { | |
1608 gimple second = SSA_NAME_DEF_STMT (op0); | |
1609 if (gimple_code (second) == GIMPLE_ASSIGN) | |
1610 { | |
1611 enum tree_code code2 = gimple_assign_rhs_code (second); | |
1612 if (TREE_CODE_CLASS (code2) == tcc_comparison) | |
1613 { | |
1614 code = code2; | |
1615 op0 = gimple_assign_rhs1 (second); | |
1616 op1 = gimple_assign_rhs2 (second); | |
1617 } | |
1618 /* If jumps are cheap turn some more codes into | |
1619 jumpy sequences. */ | |
1620 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4) | |
1621 { | |
1622 if ((code2 == BIT_AND_EXPR | |
1623 && TYPE_PRECISION (TREE_TYPE (op0)) == 1 | |
1624 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST) | |
1625 || code2 == TRUTH_AND_EXPR) | |
1626 { | |
1627 code = TRUTH_ANDIF_EXPR; | |
1628 op0 = gimple_assign_rhs1 (second); | |
1629 op1 = gimple_assign_rhs2 (second); | |
1630 } | |
1631 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR) | |
1632 { | |
1633 code = TRUTH_ORIF_EXPR; | |
1634 op0 = gimple_assign_rhs1 (second); | |
1635 op1 = gimple_assign_rhs2 (second); | |
1636 } | |
1637 } | |
1638 } | |
1639 } | |
1640 | |
1641 last2 = last = get_last_insn (); | |
1642 | |
1643 extract_true_false_edges_from_block (bb, &true_edge, &false_edge); | |
1644 if (gimple_has_location (stmt)) | |
1645 { | |
1646 set_curr_insn_source_location (gimple_location (stmt)); | |
1647 set_curr_insn_block (gimple_block (stmt)); | |
1648 } | |
1649 | |
1650 /* These flags have no purpose in RTL land. */ | |
1651 true_edge->flags &= ~EDGE_TRUE_VALUE; | |
1652 false_edge->flags &= ~EDGE_FALSE_VALUE; | |
1653 | |
1654 /* We can either have a pure conditional jump with one fallthru edge or | |
1655 two-way jump that needs to be decomposed into two basic blocks. */ | |
1656 if (false_edge->dest == bb->next_bb) | |
1657 { | |
1658 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); | |
1659 add_reg_br_prob_note (last, true_edge->probability); | |
1660 maybe_dump_rtl_for_gimple_stmt (stmt, last); | |
1661 if (true_edge->goto_locus) | |
1662 { | |
1663 set_curr_insn_source_location (true_edge->goto_locus); | |
1664 set_curr_insn_block (true_edge->goto_block); | |
1665 true_edge->goto_locus = curr_insn_locator (); | |
1666 } | |
1667 true_edge->goto_block = NULL; | |
1668 false_edge->flags |= EDGE_FALLTHRU; | |
1669 maybe_cleanup_end_of_block (false_edge, last); | |
1670 return NULL; | |
1671 } | |
1672 if (true_edge->dest == bb->next_bb) | |
1673 { | |
1674 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest)); | |
1675 add_reg_br_prob_note (last, false_edge->probability); | |
1676 maybe_dump_rtl_for_gimple_stmt (stmt, last); | |
1677 if (false_edge->goto_locus) | |
1678 { | |
1679 set_curr_insn_source_location (false_edge->goto_locus); | |
1680 set_curr_insn_block (false_edge->goto_block); | |
1681 false_edge->goto_locus = curr_insn_locator (); | |
1682 } | |
1683 false_edge->goto_block = NULL; | |
1684 true_edge->flags |= EDGE_FALLTHRU; | |
1685 maybe_cleanup_end_of_block (true_edge, last); | |
1686 return NULL; | |
1687 } | |
1688 | |
1689 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); | |
1690 add_reg_br_prob_note (last, true_edge->probability); | |
1691 last = get_last_insn (); | |
1692 if (false_edge->goto_locus) | |
1693 { | |
1694 set_curr_insn_source_location (false_edge->goto_locus); | |
1695 set_curr_insn_block (false_edge->goto_block); | |
1696 false_edge->goto_locus = curr_insn_locator (); | |
1697 } | |
1698 false_edge->goto_block = NULL; | |
1699 emit_jump (label_rtx_for_bb (false_edge->dest)); | |
1700 | |
1701 BB_END (bb) = last; | |
1702 if (BARRIER_P (BB_END (bb))) | |
1703 BB_END (bb) = PREV_INSN (BB_END (bb)); | |
1704 update_bb_for_insn (bb); | |
1705 | |
1706 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); | |
1707 dest = false_edge->dest; | |
1708 redirect_edge_succ (false_edge, new_bb); | |
1709 false_edge->flags |= EDGE_FALLTHRU; | |
1710 new_bb->count = false_edge->count; | |
1711 new_bb->frequency = EDGE_FREQUENCY (false_edge); | |
1712 new_edge = make_edge (new_bb, dest, 0); | |
1713 new_edge->probability = REG_BR_PROB_BASE; | |
1714 new_edge->count = new_bb->count; | |
1715 if (BARRIER_P (BB_END (new_bb))) | |
1716 BB_END (new_bb) = PREV_INSN (BB_END (new_bb)); | |
1717 update_bb_for_insn (new_bb); | |
1718 | |
1719 maybe_dump_rtl_for_gimple_stmt (stmt, last2); | |
1720 | |
1721 if (true_edge->goto_locus) | |
1722 { | |
1723 set_curr_insn_source_location (true_edge->goto_locus); | |
1724 set_curr_insn_block (true_edge->goto_block); | |
1725 true_edge->goto_locus = curr_insn_locator (); | |
1726 } | |
1727 true_edge->goto_block = NULL; | |
1728 | |
1729 return new_bb; | |
1730 } | |
1731 | |
1732 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL | |
1733 statement STMT. */ | |
1734 | |
1735 static void | |
1736 expand_call_stmt (gimple stmt) | |
1737 { | |
1738 tree exp; | |
1739 tree lhs = gimple_call_lhs (stmt); | |
1740 size_t i; | |
1741 | |
1742 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); | |
1743 | |
1744 CALL_EXPR_FN (exp) = gimple_call_fn (stmt); | |
1745 TREE_TYPE (exp) = gimple_call_return_type (stmt); | |
1746 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt); | |
1747 | |
1748 for (i = 0; i < gimple_call_num_args (stmt); i++) | |
1749 CALL_EXPR_ARG (exp, i) = gimple_call_arg (stmt, i); | |
1750 | |
1751 if (gimple_has_side_effects (stmt)) | |
1752 TREE_SIDE_EFFECTS (exp) = 1; | |
1753 | |
1754 if (gimple_call_nothrow_p (stmt)) | |
1755 TREE_NOTHROW (exp) = 1; | |
1756 | |
1757 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt); | |
1758 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt); | |
1759 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt); | |
1760 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt); | |
1761 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt); | |
1762 SET_EXPR_LOCATION (exp, gimple_location (stmt)); | |
1763 TREE_BLOCK (exp) = gimple_block (stmt); | |
1764 | |
1765 if (lhs) | |
1766 expand_assignment (lhs, exp, false); | |
1767 else | |
1768 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL); | |
1769 } | |
1770 | |
1771 /* A subroutine of expand_gimple_stmt, expanding one gimple statement | |
1772 STMT that doesn't require special handling for outgoing edges. That | |
1773 is no tailcalls and no GIMPLE_COND. */ | |
1774 | |
1775 static void | |
1776 expand_gimple_stmt_1 (gimple stmt) | |
1777 { | |
1778 tree op0; | |
1779 switch (gimple_code (stmt)) | |
1780 { | |
1781 case GIMPLE_GOTO: | |
1782 op0 = gimple_goto_dest (stmt); | |
1783 if (TREE_CODE (op0) == LABEL_DECL) | |
1784 expand_goto (op0); | |
1785 else | |
1786 expand_computed_goto (op0); | |
1787 break; | |
1788 case GIMPLE_LABEL: | |
1789 expand_label (gimple_label_label (stmt)); | |
1790 break; | |
1791 case GIMPLE_NOP: | |
1792 case GIMPLE_PREDICT: | |
1793 break; | |
1794 case GIMPLE_SWITCH: | |
1795 expand_case (stmt); | |
1796 break; | |
1797 case GIMPLE_ASM: | |
1798 expand_asm_stmt (stmt); | |
1799 break; | |
1800 case GIMPLE_CALL: | |
1801 expand_call_stmt (stmt); | |
1802 break; | |
1803 | |
1804 case GIMPLE_RETURN: | |
1805 op0 = gimple_return_retval (stmt); | |
1806 | |
1807 if (op0 && op0 != error_mark_node) | |
1808 { | |
1809 tree result = DECL_RESULT (current_function_decl); | |
1810 | |
1811 /* If we are not returning the current function's RESULT_DECL, | |
1812 build an assignment to it. */ | |
1813 if (op0 != result) | |
1814 { | |
1815 /* I believe that a function's RESULT_DECL is unique. */ | |
1816 gcc_assert (TREE_CODE (op0) != RESULT_DECL); | |
1817 | |
1818 /* ??? We'd like to use simply expand_assignment here, | |
1819 but this fails if the value is of BLKmode but the return | |
1820 decl is a register. expand_return has special handling | |
1821 for this combination, which eventually should move | |
1822 to common code. See comments there. Until then, let's | |
1823 build a modify expression :-/ */ | |
1824 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result), | |
1825 result, op0); | |
1826 } | |
1827 } | |
1828 if (!op0) | |
1829 expand_null_return (); | |
1830 else | |
1831 expand_return (op0); | |
1832 break; | |
1833 | |
1834 case GIMPLE_ASSIGN: | |
1835 { | |
1836 tree lhs = gimple_assign_lhs (stmt); | |
1837 | |
1838 /* Tree expand used to fiddle with |= and &= of two bitfield | |
1839 COMPONENT_REFs here. This can't happen with gimple, the LHS | |
1840 of binary assigns must be a gimple reg. */ | |
1841 | |
1842 if (TREE_CODE (lhs) != SSA_NAME | |
1843 || get_gimple_rhs_class (gimple_expr_code (stmt)) | |
1844 == GIMPLE_SINGLE_RHS) | |
1845 { | |
1846 tree rhs = gimple_assign_rhs1 (stmt); | |
1847 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt)) | |
1848 == GIMPLE_SINGLE_RHS); | |
1849 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)) | |
1850 SET_EXPR_LOCATION (rhs, gimple_location (stmt)); | |
1851 expand_assignment (lhs, rhs, | |
1852 gimple_assign_nontemporal_move_p (stmt)); | |
1853 } | |
1854 else | |
1855 { | |
1856 rtx target, temp; | |
1857 bool nontemporal = gimple_assign_nontemporal_move_p (stmt); | |
1858 struct separate_ops ops; | |
1859 bool promoted = false; | |
1860 | |
1861 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE); | |
1862 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) | |
1863 promoted = true; | |
1864 | |
1865 ops.code = gimple_assign_rhs_code (stmt); | |
1866 ops.type = TREE_TYPE (lhs); | |
1867 switch (get_gimple_rhs_class (gimple_expr_code (stmt))) | |
1868 { | |
1869 case GIMPLE_BINARY_RHS: | |
1870 ops.op1 = gimple_assign_rhs2 (stmt); | |
1871 /* Fallthru */ | |
1872 case GIMPLE_UNARY_RHS: | |
1873 ops.op0 = gimple_assign_rhs1 (stmt); | |
1874 break; | |
1875 default: | |
1876 gcc_unreachable (); | |
1877 } | |
1878 ops.location = gimple_location (stmt); | |
1879 | |
1880 /* If we want to use a nontemporal store, force the value to | |
1881 register first. If we store into a promoted register, | |
1882 don't directly expand to target. */ | |
1883 temp = nontemporal || promoted ? NULL_RTX : target; | |
1884 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target), | |
1885 EXPAND_NORMAL); | |
1886 | |
1887 if (temp == target) | |
1888 ; | |
1889 else if (promoted) | |
1890 { | |
1891 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target); | |
1892 /* If TEMP is a VOIDmode constant, use convert_modes to make | |
1893 sure that we properly convert it. */ | |
1894 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | |
1895 { | |
1896 temp = convert_modes (GET_MODE (target), | |
1897 TYPE_MODE (ops.type), | |
1898 temp, unsignedp); | |
1899 temp = convert_modes (GET_MODE (SUBREG_REG (target)), | |
1900 GET_MODE (target), temp, unsignedp); | |
1901 } | |
1902 | |
1903 convert_move (SUBREG_REG (target), temp, unsignedp); | |
1904 } | |
1905 else if (nontemporal && emit_storent_insn (target, temp)) | |
1906 ; | |
1907 else | |
1908 { | |
1909 temp = force_operand (temp, target); | |
1910 if (temp != target) | |
1911 emit_move_insn (target, temp); | |
1912 } | |
1913 } | |
1914 } | |
1915 break; | |
1916 | |
1917 default: | |
1918 gcc_unreachable (); | |
1919 } | |
1920 } | |
1921 | |
1922 /* Expand one gimple statement STMT and return the last RTL instruction | |
1923 before any of the newly generated ones. | |
1924 | |
1925 In addition to generating the necessary RTL instructions this also | |
1926 sets REG_EH_REGION notes if necessary and sets the current source | |
1927 location for diagnostics. */ | |
1928 | |
1929 static rtx | |
1930 expand_gimple_stmt (gimple stmt) | |
1931 { | |
1932 int lp_nr = 0; | |
1933 rtx last = NULL; | |
1934 location_t saved_location = input_location; | |
1935 | |
1936 last = get_last_insn (); | |
1937 | |
1938 /* If this is an expression of some kind and it has an associated line | |
1939 number, then emit the line number before expanding the expression. | |
1940 | |
1941 We need to save and restore the file and line information so that | |
1942 errors discovered during expansion are emitted with the right | |
1943 information. It would be better of the diagnostic routines | |
1944 used the file/line information embedded in the tree nodes rather | |
1945 than globals. */ | |
1946 gcc_assert (cfun); | |
1947 | |
1948 if (gimple_has_location (stmt)) | |
1949 { | |
1950 input_location = gimple_location (stmt); | |
1951 set_curr_insn_source_location (input_location); | |
1952 | |
1953 /* Record where the insns produced belong. */ | |
1954 set_curr_insn_block (gimple_block (stmt)); | |
1955 } | |
1956 | |
1957 expand_gimple_stmt_1 (stmt); | |
1958 /* Free any temporaries used to evaluate this statement. */ | |
1959 free_temp_slots (); | |
1960 | |
1961 input_location = saved_location; | |
1962 | |
1963 /* Mark all insns that may trap. */ | |
1964 lp_nr = lookup_stmt_eh_lp (stmt); | |
1965 if (lp_nr) | |
1966 { | |
1967 rtx insn; | |
1968 for (insn = next_real_insn (last); insn; | |
1969 insn = next_real_insn (insn)) | |
1970 { | |
1971 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) | |
1972 /* If we want exceptions for non-call insns, any | |
1973 may_trap_p instruction may throw. */ | |
1974 && GET_CODE (PATTERN (insn)) != CLOBBER | |
1975 && GET_CODE (PATTERN (insn)) != USE | |
1976 && insn_could_throw_p (insn)) | |
1977 make_reg_eh_region_note (insn, 0, lp_nr); | |
1978 } | |
1979 } | |
1980 | |
1981 return last; | |
1982 } | |
1983 | |
1984 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL | |
1985 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually | |
1986 generated a tail call (something that might be denied by the ABI | |
1987 rules governing the call; see calls.c). | |
1988 | |
1989 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and | |
1990 can still reach the rest of BB. The case here is __builtin_sqrt, | |
1991 where the NaN result goes through the external function (with a | |
1992 tailcall) and the normal result happens via a sqrt instruction. */ | |
1993 | |
1994 static basic_block | |
1995 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru) | |
1996 { | |
1997 rtx last2, last; | |
1998 edge e; | |
1999 edge_iterator ei; | |
2000 int probability; | |
2001 gcov_type count; | |
2002 | |
2003 last2 = last = expand_gimple_stmt (stmt); | |
2004 | |
2005 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last)) | |
2006 if (CALL_P (last) && SIBLING_CALL_P (last)) | |
2007 goto found; | |
2008 | |
2009 maybe_dump_rtl_for_gimple_stmt (stmt, last2); | |
2010 | |
2011 *can_fallthru = true; | |
2012 return NULL; | |
2013 | |
2014 found: | |
2015 /* ??? Wouldn't it be better to just reset any pending stack adjust? | |
2016 Any instructions emitted here are about to be deleted. */ | |
2017 do_pending_stack_adjust (); | |
2018 | |
2019 /* Remove any non-eh, non-abnormal edges that don't go to exit. */ | |
2020 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be | |
2021 EH or abnormal edges, we shouldn't have created a tail call in | |
2022 the first place. So it seems to me we should just be removing | |
2023 all edges here, or redirecting the existing fallthru edge to | |
2024 the exit block. */ | |
2025 | |
2026 probability = 0; | |
2027 count = 0; | |
2028 | |
2029 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
2030 { | |
2031 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) | |
2032 { | |
2033 if (e->dest != EXIT_BLOCK_PTR) | |
2034 { | |
2035 e->dest->count -= e->count; | |
2036 e->dest->frequency -= EDGE_FREQUENCY (e); | |
2037 if (e->dest->count < 0) | |
2038 e->dest->count = 0; | |
2039 if (e->dest->frequency < 0) | |
2040 e->dest->frequency = 0; | |
2041 } | |
2042 count += e->count; | |
2043 probability += e->probability; | |
2044 remove_edge (e); | |
2045 } | |
2046 else | |
2047 ei_next (&ei); | |
2048 } | |
2049 | |
2050 /* This is somewhat ugly: the call_expr expander often emits instructions | |
2051 after the sibcall (to perform the function return). These confuse the | |
2052 find_many_sub_basic_blocks code, so we need to get rid of these. */ | |
2053 last = NEXT_INSN (last); | |
2054 gcc_assert (BARRIER_P (last)); | |
2055 | |
2056 *can_fallthru = false; | |
2057 while (NEXT_INSN (last)) | |
2058 { | |
2059 /* For instance an sqrt builtin expander expands if with | |
2060 sibcall in the then and label for `else`. */ | |
2061 if (LABEL_P (NEXT_INSN (last))) | |
2062 { | |
2063 *can_fallthru = true; | |
2064 break; | |
2065 } | |
2066 delete_insn (NEXT_INSN (last)); | |
2067 } | |
2068 | |
2069 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL); | |
2070 e->probability += probability; | |
2071 e->count += count; | |
2072 BB_END (bb) = last; | |
2073 update_bb_for_insn (bb); | |
2074 | |
2075 if (NEXT_INSN (last)) | |
2076 { | |
2077 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); | |
2078 | |
2079 last = BB_END (bb); | |
2080 if (BARRIER_P (last)) | |
2081 BB_END (bb) = PREV_INSN (last); | |
2082 } | |
2083 | |
2084 maybe_dump_rtl_for_gimple_stmt (stmt, last2); | |
2085 | |
2086 return bb; | |
2087 } | |
2088 | |
2089 /* Return the difference between the floor and the truncated result of | |
2090 a signed division by OP1 with remainder MOD. */ | |
2091 static rtx | |
2092 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2093 { | |
2094 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */ | |
2095 return gen_rtx_IF_THEN_ELSE | |
2096 (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2097 gen_rtx_IF_THEN_ELSE | |
2098 (mode, gen_rtx_LT (BImode, | |
2099 gen_rtx_DIV (mode, op1, mod), | |
2100 const0_rtx), | |
2101 constm1_rtx, const0_rtx), | |
2102 const0_rtx); | |
2103 } | |
2104 | |
2105 /* Return the difference between the ceil and the truncated result of | |
2106 a signed division by OP1 with remainder MOD. */ | |
2107 static rtx | |
2108 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2109 { | |
2110 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */ | |
2111 return gen_rtx_IF_THEN_ELSE | |
2112 (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2113 gen_rtx_IF_THEN_ELSE | |
2114 (mode, gen_rtx_GT (BImode, | |
2115 gen_rtx_DIV (mode, op1, mod), | |
2116 const0_rtx), | |
2117 const1_rtx, const0_rtx), | |
2118 const0_rtx); | |
2119 } | |
2120 | |
2121 /* Return the difference between the ceil and the truncated result of | |
2122 an unsigned division by OP1 with remainder MOD. */ | |
2123 static rtx | |
2124 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED) | |
2125 { | |
2126 /* (mod != 0 ? 1 : 0) */ | |
2127 return gen_rtx_IF_THEN_ELSE | |
2128 (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2129 const1_rtx, const0_rtx); | |
2130 } | |
2131 | |
2132 /* Return the difference between the rounded and the truncated result | |
2133 of a signed division by OP1 with remainder MOD. Halfway cases are | |
2134 rounded away from zero, rather than to the nearest even number. */ | |
2135 static rtx | |
2136 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2137 { | |
2138 /* (abs (mod) >= abs (op1) - abs (mod) | |
2139 ? (op1 / mod > 0 ? 1 : -1) | |
2140 : 0) */ | |
2141 return gen_rtx_IF_THEN_ELSE | |
2142 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod), | |
2143 gen_rtx_MINUS (mode, | |
2144 gen_rtx_ABS (mode, op1), | |
2145 gen_rtx_ABS (mode, mod))), | |
2146 gen_rtx_IF_THEN_ELSE | |
2147 (mode, gen_rtx_GT (BImode, | |
2148 gen_rtx_DIV (mode, op1, mod), | |
2149 const0_rtx), | |
2150 const1_rtx, constm1_rtx), | |
2151 const0_rtx); | |
2152 } | |
2153 | |
2154 /* Return the difference between the rounded and the truncated result | |
2155 of a unsigned division by OP1 with remainder MOD. Halfway cases | |
2156 are rounded away from zero, rather than to the nearest even | |
2157 number. */ | |
2158 static rtx | |
2159 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2160 { | |
2161 /* (mod >= op1 - mod ? 1 : 0) */ | |
2162 return gen_rtx_IF_THEN_ELSE | |
2163 (mode, gen_rtx_GE (BImode, mod, | |
2164 gen_rtx_MINUS (mode, op1, mod)), | |
2165 const1_rtx, const0_rtx); | |
2166 } | |
2167 | |
2168 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting | |
2169 any rtl. */ | |
2170 | |
2171 static rtx | |
2172 convert_debug_memory_address (enum machine_mode mode, rtx x) | |
2173 { | |
2174 enum machine_mode xmode = GET_MODE (x); | |
2175 | |
2176 #ifndef POINTERS_EXTEND_UNSIGNED | |
2177 gcc_assert (mode == Pmode); | |
2178 gcc_assert (xmode == mode || xmode == VOIDmode); | |
2179 #else | |
2180 gcc_assert (mode == Pmode || mode == ptr_mode); | |
2181 | |
2182 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode) | |
2183 return x; | |
2184 | |
2185 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode)) | |
2186 x = simplify_gen_subreg (mode, x, xmode, | |
2187 subreg_lowpart_offset | |
2188 (mode, xmode)); | |
2189 else if (POINTERS_EXTEND_UNSIGNED > 0) | |
2190 x = gen_rtx_ZERO_EXTEND (mode, x); | |
2191 else if (!POINTERS_EXTEND_UNSIGNED) | |
2192 x = gen_rtx_SIGN_EXTEND (mode, x); | |
2193 else | |
2194 gcc_unreachable (); | |
2195 #endif /* POINTERS_EXTEND_UNSIGNED */ | |
2196 | |
2197 return x; | |
2198 } | |
2199 | |
2200 /* Return an RTX equivalent to the value of the tree expression | |
2201 EXP. */ | |
2202 | |
2203 static rtx | |
2204 expand_debug_expr (tree exp) | |
2205 { | |
2206 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX; | |
2207 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); | |
2208 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); | |
2209 addr_space_t as; | |
2210 enum machine_mode address_mode; | |
2211 | |
2212 switch (TREE_CODE_CLASS (TREE_CODE (exp))) | |
2213 { | |
2214 case tcc_expression: | |
2215 switch (TREE_CODE (exp)) | |
2216 { | |
2217 case COND_EXPR: | |
2218 goto ternary; | |
2219 | |
2220 case TRUTH_ANDIF_EXPR: | |
2221 case TRUTH_ORIF_EXPR: | |
2222 case TRUTH_AND_EXPR: | |
2223 case TRUTH_OR_EXPR: | |
2224 case TRUTH_XOR_EXPR: | |
2225 goto binary; | |
2226 | |
2227 case TRUTH_NOT_EXPR: | |
2228 goto unary; | |
2229 | |
2230 default: | |
2231 break; | |
2232 } | |
2233 break; | |
2234 | |
2235 ternary: | |
2236 op2 = expand_debug_expr (TREE_OPERAND (exp, 2)); | |
2237 if (!op2) | |
2238 return NULL_RTX; | |
2239 /* Fall through. */ | |
2240 | |
2241 binary: | |
2242 case tcc_binary: | |
2243 case tcc_comparison: | |
2244 op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); | |
2245 if (!op1) | |
2246 return NULL_RTX; | |
2247 /* Fall through. */ | |
2248 | |
2249 unary: | |
2250 case tcc_unary: | |
2251 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2252 if (!op0) | |
2253 return NULL_RTX; | |
2254 break; | |
2255 | |
2256 case tcc_type: | |
2257 case tcc_statement: | |
2258 gcc_unreachable (); | |
2259 | |
2260 case tcc_constant: | |
2261 case tcc_exceptional: | |
2262 case tcc_declaration: | |
2263 case tcc_reference: | |
2264 case tcc_vl_exp: | |
2265 break; | |
2266 } | |
2267 | |
2268 switch (TREE_CODE (exp)) | |
2269 { | |
2270 case STRING_CST: | |
2271 if (!lookup_constant_def (exp)) | |
2272 { | |
2273 if (strlen (TREE_STRING_POINTER (exp)) + 1 | |
2274 != (size_t) TREE_STRING_LENGTH (exp)) | |
2275 return NULL_RTX; | |
2276 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp)); | |
2277 op0 = gen_rtx_MEM (BLKmode, op0); | |
2278 set_mem_attributes (op0, exp, 0); | |
2279 return op0; | |
2280 } | |
2281 /* Fall through... */ | |
2282 | |
2283 case INTEGER_CST: | |
2284 case REAL_CST: | |
2285 case FIXED_CST: | |
2286 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER); | |
2287 return op0; | |
2288 | |
2289 case COMPLEX_CST: | |
2290 gcc_assert (COMPLEX_MODE_P (mode)); | |
2291 op0 = expand_debug_expr (TREE_REALPART (exp)); | |
2292 op1 = expand_debug_expr (TREE_IMAGPART (exp)); | |
2293 return gen_rtx_CONCAT (mode, op0, op1); | |
2294 | |
2295 case DEBUG_EXPR_DECL: | |
2296 op0 = DECL_RTL_IF_SET (exp); | |
2297 | |
2298 if (op0) | |
2299 return op0; | |
2300 | |
2301 op0 = gen_rtx_DEBUG_EXPR (mode); | |
2302 DEBUG_EXPR_TREE_DECL (op0) = exp; | |
2303 SET_DECL_RTL (exp, op0); | |
2304 | |
2305 return op0; | |
2306 | |
2307 case VAR_DECL: | |
2308 case PARM_DECL: | |
2309 case FUNCTION_DECL: | |
2310 case LABEL_DECL: | |
2311 case CONST_DECL: | |
2312 case RESULT_DECL: | |
2313 op0 = DECL_RTL_IF_SET (exp); | |
2314 | |
2315 /* This decl was probably optimized away. */ | |
2316 if (!op0) | |
2317 { | |
2318 if (TREE_CODE (exp) != VAR_DECL | |
2319 || DECL_EXTERNAL (exp) | |
2320 || !TREE_STATIC (exp) | |
2321 || !DECL_NAME (exp) | |
2322 || DECL_HARD_REGISTER (exp) | |
2323 || mode == VOIDmode) | |
2324 return NULL; | |
2325 | |
2326 op0 = DECL_RTL (exp); | |
2327 SET_DECL_RTL (exp, NULL); | |
2328 if (!MEM_P (op0) | |
2329 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF | |
2330 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp) | |
2331 return NULL; | |
2332 } | |
2333 else | |
2334 op0 = copy_rtx (op0); | |
2335 | |
2336 if (GET_MODE (op0) == BLKmode) | |
2337 { | |
2338 gcc_assert (MEM_P (op0)); | |
2339 op0 = adjust_address_nv (op0, mode, 0); | |
2340 return op0; | |
2341 } | |
2342 | |
2343 /* Fall through. */ | |
2344 | |
2345 adjust_mode: | |
2346 case PAREN_EXPR: | |
2347 case NOP_EXPR: | |
2348 case CONVERT_EXPR: | |
2349 { | |
2350 enum machine_mode inner_mode = GET_MODE (op0); | |
2351 | |
2352 if (mode == inner_mode) | |
2353 return op0; | |
2354 | |
2355 if (inner_mode == VOIDmode) | |
2356 { | |
2357 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
2358 if (mode == inner_mode) | |
2359 return op0; | |
2360 } | |
2361 | |
2362 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode)) | |
2363 { | |
2364 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode)) | |
2365 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0); | |
2366 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode)) | |
2367 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode); | |
2368 else | |
2369 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode); | |
2370 } | |
2371 else if (FLOAT_MODE_P (mode)) | |
2372 { | |
2373 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
2374 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode); | |
2375 else | |
2376 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode); | |
2377 } | |
2378 else if (FLOAT_MODE_P (inner_mode)) | |
2379 { | |
2380 if (unsignedp) | |
2381 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode); | |
2382 else | |
2383 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode); | |
2384 } | |
2385 else if (CONSTANT_P (op0) | |
2386 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode)) | |
2387 op0 = simplify_gen_subreg (mode, op0, inner_mode, | |
2388 subreg_lowpart_offset (mode, | |
2389 inner_mode)); | |
2390 else if (unsignedp) | |
2391 op0 = gen_rtx_ZERO_EXTEND (mode, op0); | |
2392 else | |
2393 op0 = gen_rtx_SIGN_EXTEND (mode, op0); | |
2394 | |
2395 return op0; | |
2396 } | |
2397 | |
2398 case INDIRECT_REF: | |
2399 case ALIGN_INDIRECT_REF: | |
2400 case MISALIGNED_INDIRECT_REF: | |
2401 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2402 if (!op0) | |
2403 return NULL; | |
2404 | |
2405 if (POINTER_TYPE_P (TREE_TYPE (exp))) | |
2406 { | |
2407 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); | |
2408 address_mode = targetm.addr_space.address_mode (as); | |
2409 } | |
2410 else | |
2411 { | |
2412 as = ADDR_SPACE_GENERIC; | |
2413 address_mode = Pmode; | |
2414 } | |
2415 | |
2416 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF) | |
2417 { | |
2418 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp)); | |
2419 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align)); | |
2420 } | |
2421 | |
2422 op0 = gen_rtx_MEM (mode, op0); | |
2423 | |
2424 set_mem_attributes (op0, exp, 0); | |
2425 set_mem_addr_space (op0, as); | |
2426 | |
2427 return op0; | |
2428 | |
2429 case TARGET_MEM_REF: | |
2430 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp))) | |
2431 return NULL; | |
2432 | |
2433 op0 = expand_debug_expr | |
2434 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); | |
2435 if (!op0) | |
2436 return NULL; | |
2437 | |
2438 as = TYPE_ADDR_SPACE (TREE_TYPE (exp)); | |
2439 | |
2440 op0 = gen_rtx_MEM (mode, op0); | |
2441 | |
2442 set_mem_attributes (op0, exp, 0); | |
2443 set_mem_addr_space (op0, as); | |
2444 | |
2445 return op0; | |
2446 | |
2447 case ARRAY_REF: | |
2448 case ARRAY_RANGE_REF: | |
2449 case COMPONENT_REF: | |
2450 case BIT_FIELD_REF: | |
2451 case REALPART_EXPR: | |
2452 case IMAGPART_EXPR: | |
2453 case VIEW_CONVERT_EXPR: | |
2454 { | |
2455 enum machine_mode mode1; | |
2456 HOST_WIDE_INT bitsize, bitpos; | |
2457 tree offset; | |
2458 int volatilep = 0; | |
2459 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, | |
2460 &mode1, &unsignedp, &volatilep, false); | |
2461 rtx orig_op0; | |
2462 | |
2463 if (bitsize == 0) | |
2464 return NULL; | |
2465 | |
2466 orig_op0 = op0 = expand_debug_expr (tem); | |
2467 | |
2468 if (!op0) | |
2469 return NULL; | |
2470 | |
2471 if (offset) | |
2472 { | |
2473 enum machine_mode addrmode, offmode; | |
2474 | |
2475 gcc_assert (MEM_P (op0)); | |
2476 | |
2477 op0 = XEXP (op0, 0); | |
2478 addrmode = GET_MODE (op0); | |
2479 if (addrmode == VOIDmode) | |
2480 addrmode = Pmode; | |
2481 | |
2482 op1 = expand_debug_expr (offset); | |
2483 if (!op1) | |
2484 return NULL; | |
2485 | |
2486 offmode = GET_MODE (op1); | |
2487 if (offmode == VOIDmode) | |
2488 offmode = TYPE_MODE (TREE_TYPE (offset)); | |
2489 | |
2490 if (addrmode != offmode) | |
2491 op1 = simplify_gen_subreg (addrmode, op1, offmode, | |
2492 subreg_lowpart_offset (addrmode, | |
2493 offmode)); | |
2494 | |
2495 /* Don't use offset_address here, we don't need a | |
2496 recognizable address, and we don't want to generate | |
2497 code. */ | |
2498 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1)); | |
2499 } | |
2500 | |
2501 if (MEM_P (op0)) | |
2502 { | |
2503 if (mode1 == VOIDmode) | |
2504 /* Bitfield. */ | |
2505 mode1 = smallest_mode_for_size (bitsize, MODE_INT); | |
2506 if (bitpos >= BITS_PER_UNIT) | |
2507 { | |
2508 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); | |
2509 bitpos %= BITS_PER_UNIT; | |
2510 } | |
2511 else if (bitpos < 0) | |
2512 { | |
2513 HOST_WIDE_INT units | |
2514 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT; | |
2515 op0 = adjust_address_nv (op0, mode1, units); | |
2516 bitpos += units * BITS_PER_UNIT; | |
2517 } | |
2518 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode)) | |
2519 op0 = adjust_address_nv (op0, mode, 0); | |
2520 else if (GET_MODE (op0) != mode1) | |
2521 op0 = adjust_address_nv (op0, mode1, 0); | |
2522 else | |
2523 op0 = copy_rtx (op0); | |
2524 if (op0 == orig_op0) | |
2525 op0 = shallow_copy_rtx (op0); | |
2526 set_mem_attributes (op0, exp, 0); | |
2527 } | |
2528 | |
2529 if (bitpos == 0 && mode == GET_MODE (op0)) | |
2530 return op0; | |
2531 | |
2532 if (bitpos < 0) | |
2533 return NULL; | |
2534 | |
2535 if ((bitpos % BITS_PER_UNIT) == 0 | |
2536 && bitsize == GET_MODE_BITSIZE (mode1)) | |
2537 { | |
2538 enum machine_mode opmode = GET_MODE (op0); | |
2539 | |
2540 gcc_assert (opmode != BLKmode); | |
2541 | |
2542 if (opmode == VOIDmode) | |
2543 opmode = mode1; | |
2544 | |
2545 /* This condition may hold if we're expanding the address | |
2546 right past the end of an array that turned out not to | |
2547 be addressable (i.e., the address was only computed in | |
2548 debug stmts). The gen_subreg below would rightfully | |
2549 crash, and the address doesn't really exist, so just | |
2550 drop it. */ | |
2551 if (bitpos >= GET_MODE_BITSIZE (opmode)) | |
2552 return NULL; | |
2553 | |
2554 return simplify_gen_subreg (mode, op0, opmode, | |
2555 bitpos / BITS_PER_UNIT); | |
2556 } | |
2557 | |
2558 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0)) | |
2559 && TYPE_UNSIGNED (TREE_TYPE (exp)) | |
2560 ? SIGN_EXTRACT | |
2561 : ZERO_EXTRACT, mode, | |
2562 GET_MODE (op0) != VOIDmode | |
2563 ? GET_MODE (op0) : mode1, | |
2564 op0, GEN_INT (bitsize), GEN_INT (bitpos)); | |
2565 } | |
2566 | |
2567 case ABS_EXPR: | |
2568 return gen_rtx_ABS (mode, op0); | |
2569 | |
2570 case NEGATE_EXPR: | |
2571 return gen_rtx_NEG (mode, op0); | |
2572 | |
2573 case BIT_NOT_EXPR: | |
2574 return gen_rtx_NOT (mode, op0); | |
2575 | |
2576 case FLOAT_EXPR: | |
2577 if (unsignedp) | |
2578 return gen_rtx_UNSIGNED_FLOAT (mode, op0); | |
2579 else | |
2580 return gen_rtx_FLOAT (mode, op0); | |
2581 | |
2582 case FIX_TRUNC_EXPR: | |
2583 if (unsignedp) | |
2584 return gen_rtx_UNSIGNED_FIX (mode, op0); | |
2585 else | |
2586 return gen_rtx_FIX (mode, op0); | |
2587 | |
2588 case POINTER_PLUS_EXPR: | |
2589 case PLUS_EXPR: | |
2590 return gen_rtx_PLUS (mode, op0, op1); | |
2591 | |
2592 case MINUS_EXPR: | |
2593 return gen_rtx_MINUS (mode, op0, op1); | |
2594 | |
2595 case MULT_EXPR: | |
2596 return gen_rtx_MULT (mode, op0, op1); | |
2597 | |
2598 case RDIV_EXPR: | |
2599 case TRUNC_DIV_EXPR: | |
2600 case EXACT_DIV_EXPR: | |
2601 if (unsignedp) | |
2602 return gen_rtx_UDIV (mode, op0, op1); | |
2603 else | |
2604 return gen_rtx_DIV (mode, op0, op1); | |
2605 | |
2606 case TRUNC_MOD_EXPR: | |
2607 if (unsignedp) | |
2608 return gen_rtx_UMOD (mode, op0, op1); | |
2609 else | |
2610 return gen_rtx_MOD (mode, op0, op1); | |
2611 | |
2612 case FLOOR_DIV_EXPR: | |
2613 if (unsignedp) | |
2614 return gen_rtx_UDIV (mode, op0, op1); | |
2615 else | |
2616 { | |
2617 rtx div = gen_rtx_DIV (mode, op0, op1); | |
2618 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2619 rtx adj = floor_sdiv_adjust (mode, mod, op1); | |
2620 return gen_rtx_PLUS (mode, div, adj); | |
2621 } | |
2622 | |
2623 case FLOOR_MOD_EXPR: | |
2624 if (unsignedp) | |
2625 return gen_rtx_UMOD (mode, op0, op1); | |
2626 else | |
2627 { | |
2628 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2629 rtx adj = floor_sdiv_adjust (mode, mod, op1); | |
2630 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2631 return gen_rtx_PLUS (mode, mod, adj); | |
2632 } | |
2633 | |
2634 case CEIL_DIV_EXPR: | |
2635 if (unsignedp) | |
2636 { | |
2637 rtx div = gen_rtx_UDIV (mode, op0, op1); | |
2638 rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2639 rtx adj = ceil_udiv_adjust (mode, mod, op1); | |
2640 return gen_rtx_PLUS (mode, div, adj); | |
2641 } | |
2642 else | |
2643 { | |
2644 rtx div = gen_rtx_DIV (mode, op0, op1); | |
2645 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2646 rtx adj = ceil_sdiv_adjust (mode, mod, op1); | |
2647 return gen_rtx_PLUS (mode, div, adj); | |
2648 } | |
2649 | |
2650 case CEIL_MOD_EXPR: | |
2651 if (unsignedp) | |
2652 { | |
2653 rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2654 rtx adj = ceil_udiv_adjust (mode, mod, op1); | |
2655 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2656 return gen_rtx_PLUS (mode, mod, adj); | |
2657 } | |
2658 else | |
2659 { | |
2660 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2661 rtx adj = ceil_sdiv_adjust (mode, mod, op1); | |
2662 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2663 return gen_rtx_PLUS (mode, mod, adj); | |
2664 } | |
2665 | |
2666 case ROUND_DIV_EXPR: | |
2667 if (unsignedp) | |
2668 { | |
2669 rtx div = gen_rtx_UDIV (mode, op0, op1); | |
2670 rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2671 rtx adj = round_udiv_adjust (mode, mod, op1); | |
2672 return gen_rtx_PLUS (mode, div, adj); | |
2673 } | |
2674 else | |
2675 { | |
2676 rtx div = gen_rtx_DIV (mode, op0, op1); | |
2677 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2678 rtx adj = round_sdiv_adjust (mode, mod, op1); | |
2679 return gen_rtx_PLUS (mode, div, adj); | |
2680 } | |
2681 | |
2682 case ROUND_MOD_EXPR: | |
2683 if (unsignedp) | |
2684 { | |
2685 rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2686 rtx adj = round_udiv_adjust (mode, mod, op1); | |
2687 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2688 return gen_rtx_PLUS (mode, mod, adj); | |
2689 } | |
2690 else | |
2691 { | |
2692 rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2693 rtx adj = round_sdiv_adjust (mode, mod, op1); | |
2694 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2695 return gen_rtx_PLUS (mode, mod, adj); | |
2696 } | |
2697 | |
2698 case LSHIFT_EXPR: | |
2699 return gen_rtx_ASHIFT (mode, op0, op1); | |
2700 | |
2701 case RSHIFT_EXPR: | |
2702 if (unsignedp) | |
2703 return gen_rtx_LSHIFTRT (mode, op0, op1); | |
2704 else | |
2705 return gen_rtx_ASHIFTRT (mode, op0, op1); | |
2706 | |
2707 case LROTATE_EXPR: | |
2708 return gen_rtx_ROTATE (mode, op0, op1); | |
2709 | |
2710 case RROTATE_EXPR: | |
2711 return gen_rtx_ROTATERT (mode, op0, op1); | |
2712 | |
2713 case MIN_EXPR: | |
2714 if (unsignedp) | |
2715 return gen_rtx_UMIN (mode, op0, op1); | |
2716 else | |
2717 return gen_rtx_SMIN (mode, op0, op1); | |
2718 | |
2719 case MAX_EXPR: | |
2720 if (unsignedp) | |
2721 return gen_rtx_UMAX (mode, op0, op1); | |
2722 else | |
2723 return gen_rtx_SMAX (mode, op0, op1); | |
2724 | |
2725 case BIT_AND_EXPR: | |
2726 case TRUTH_AND_EXPR: | |
2727 return gen_rtx_AND (mode, op0, op1); | |
2728 | |
2729 case BIT_IOR_EXPR: | |
2730 case TRUTH_OR_EXPR: | |
2731 return gen_rtx_IOR (mode, op0, op1); | |
2732 | |
2733 case BIT_XOR_EXPR: | |
2734 case TRUTH_XOR_EXPR: | |
2735 return gen_rtx_XOR (mode, op0, op1); | |
2736 | |
2737 case TRUTH_ANDIF_EXPR: | |
2738 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx); | |
2739 | |
2740 case TRUTH_ORIF_EXPR: | |
2741 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1); | |
2742 | |
2743 case TRUTH_NOT_EXPR: | |
2744 return gen_rtx_EQ (mode, op0, const0_rtx); | |
2745 | |
2746 case LT_EXPR: | |
2747 if (unsignedp) | |
2748 return gen_rtx_LTU (mode, op0, op1); | |
2749 else | |
2750 return gen_rtx_LT (mode, op0, op1); | |
2751 | |
2752 case LE_EXPR: | |
2753 if (unsignedp) | |
2754 return gen_rtx_LEU (mode, op0, op1); | |
2755 else | |
2756 return gen_rtx_LE (mode, op0, op1); | |
2757 | |
2758 case GT_EXPR: | |
2759 if (unsignedp) | |
2760 return gen_rtx_GTU (mode, op0, op1); | |
2761 else | |
2762 return gen_rtx_GT (mode, op0, op1); | |
2763 | |
2764 case GE_EXPR: | |
2765 if (unsignedp) | |
2766 return gen_rtx_GEU (mode, op0, op1); | |
2767 else | |
2768 return gen_rtx_GE (mode, op0, op1); | |
2769 | |
2770 case EQ_EXPR: | |
2771 return gen_rtx_EQ (mode, op0, op1); | |
2772 | |
2773 case NE_EXPR: | |
2774 return gen_rtx_NE (mode, op0, op1); | |
2775 | |
2776 case UNORDERED_EXPR: | |
2777 return gen_rtx_UNORDERED (mode, op0, op1); | |
2778 | |
2779 case ORDERED_EXPR: | |
2780 return gen_rtx_ORDERED (mode, op0, op1); | |
2781 | |
2782 case UNLT_EXPR: | |
2783 return gen_rtx_UNLT (mode, op0, op1); | |
2784 | |
2785 case UNLE_EXPR: | |
2786 return gen_rtx_UNLE (mode, op0, op1); | |
2787 | |
2788 case UNGT_EXPR: | |
2789 return gen_rtx_UNGT (mode, op0, op1); | |
2790 | |
2791 case UNGE_EXPR: | |
2792 return gen_rtx_UNGE (mode, op0, op1); | |
2793 | |
2794 case UNEQ_EXPR: | |
2795 return gen_rtx_UNEQ (mode, op0, op1); | |
2796 | |
2797 case LTGT_EXPR: | |
2798 return gen_rtx_LTGT (mode, op0, op1); | |
2799 | |
2800 case COND_EXPR: | |
2801 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2); | |
2802 | |
2803 case COMPLEX_EXPR: | |
2804 gcc_assert (COMPLEX_MODE_P (mode)); | |
2805 if (GET_MODE (op0) == VOIDmode) | |
2806 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0); | |
2807 if (GET_MODE (op1) == VOIDmode) | |
2808 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1); | |
2809 return gen_rtx_CONCAT (mode, op0, op1); | |
2810 | |
2811 case CONJ_EXPR: | |
2812 if (GET_CODE (op0) == CONCAT) | |
2813 return gen_rtx_CONCAT (mode, XEXP (op0, 0), | |
2814 gen_rtx_NEG (GET_MODE_INNER (mode), | |
2815 XEXP (op0, 1))); | |
2816 else | |
2817 { | |
2818 enum machine_mode imode = GET_MODE_INNER (mode); | |
2819 rtx re, im; | |
2820 | |
2821 if (MEM_P (op0)) | |
2822 { | |
2823 re = adjust_address_nv (op0, imode, 0); | |
2824 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode)); | |
2825 } | |
2826 else | |
2827 { | |
2828 enum machine_mode ifmode = int_mode_for_mode (mode); | |
2829 enum machine_mode ihmode = int_mode_for_mode (imode); | |
2830 rtx halfsize; | |
2831 if (ifmode == BLKmode || ihmode == BLKmode) | |
2832 return NULL; | |
2833 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode)); | |
2834 re = op0; | |
2835 if (mode != ifmode) | |
2836 re = gen_rtx_SUBREG (ifmode, re, 0); | |
2837 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx); | |
2838 if (imode != ihmode) | |
2839 re = gen_rtx_SUBREG (imode, re, 0); | |
2840 im = copy_rtx (op0); | |
2841 if (mode != ifmode) | |
2842 im = gen_rtx_SUBREG (ifmode, im, 0); | |
2843 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize); | |
2844 if (imode != ihmode) | |
2845 im = gen_rtx_SUBREG (imode, im, 0); | |
2846 } | |
2847 im = gen_rtx_NEG (imode, im); | |
2848 return gen_rtx_CONCAT (mode, re, im); | |
2849 } | |
2850 | |
2851 case ADDR_EXPR: | |
2852 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2853 if (!op0 || !MEM_P (op0)) | |
2854 return NULL; | |
2855 | |
2856 op0 = convert_debug_memory_address (mode, XEXP (op0, 0)); | |
2857 | |
2858 return op0; | |
2859 | |
2860 case VECTOR_CST: | |
2861 exp = build_constructor_from_list (TREE_TYPE (exp), | |
2862 TREE_VECTOR_CST_ELTS (exp)); | |
2863 /* Fall through. */ | |
2864 | |
2865 case CONSTRUCTOR: | |
2866 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) | |
2867 { | |
2868 unsigned i; | |
2869 tree val; | |
2870 | |
2871 op0 = gen_rtx_CONCATN | |
2872 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))); | |
2873 | |
2874 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val) | |
2875 { | |
2876 op1 = expand_debug_expr (val); | |
2877 if (!op1) | |
2878 return NULL; | |
2879 XVECEXP (op0, 0, i) = op1; | |
2880 } | |
2881 | |
2882 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))) | |
2883 { | |
2884 op1 = expand_debug_expr | |
2885 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node)); | |
2886 | |
2887 if (!op1) | |
2888 return NULL; | |
2889 | |
2890 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++) | |
2891 XVECEXP (op0, 0, i) = op1; | |
2892 } | |
2893 | |
2894 return op0; | |
2895 } | |
2896 else | |
2897 goto flag_unsupported; | |
2898 | |
2899 case CALL_EXPR: | |
2900 /* ??? Maybe handle some builtins? */ | |
2901 return NULL; | |
2902 | |
2903 case SSA_NAME: | |
2904 { | |
2905 int part = var_to_partition (SA.map, exp); | |
2906 | |
2907 if (part == NO_PARTITION) | |
2908 return NULL; | |
2909 | |
2910 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions); | |
2911 | |
2912 op0 = SA.partition_to_pseudo[part]; | |
2913 goto adjust_mode; | |
2914 } | |
2915 | |
2916 case ERROR_MARK: | |
2917 return NULL; | |
2918 | |
2919 default: | |
2920 flag_unsupported: | |
2921 #ifdef ENABLE_CHECKING | |
2922 debug_tree (exp); | |
2923 gcc_unreachable (); | |
2924 #else | |
2925 return NULL; | |
2926 #endif | |
2927 } | |
2928 } | |
2929 | |
2930 /* Expand the _LOCs in debug insns. We run this after expanding all | |
2931 regular insns, so that any variables referenced in the function | |
2932 will have their DECL_RTLs set. */ | |
2933 | |
2934 static void | |
2935 expand_debug_locations (void) | |
2936 { | |
2937 rtx insn; | |
2938 rtx last = get_last_insn (); | |
2939 int save_strict_alias = flag_strict_aliasing; | |
2940 | |
2941 /* New alias sets while setting up memory attributes cause | |
2942 -fcompare-debug failures, even though it doesn't bring about any | |
2943 codegen changes. */ | |
2944 flag_strict_aliasing = 0; | |
2945 | |
2946 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
2947 if (DEBUG_INSN_P (insn)) | |
2948 { | |
2949 tree value = (tree)INSN_VAR_LOCATION_LOC (insn); | |
2950 rtx val; | |
2951 enum machine_mode mode; | |
2952 | |
2953 if (value == NULL_TREE) | |
2954 val = NULL_RTX; | |
2955 else | |
2956 { | |
2957 val = expand_debug_expr (value); | |
2958 gcc_assert (last == get_last_insn ()); | |
2959 } | |
2960 | |
2961 if (!val) | |
2962 val = gen_rtx_UNKNOWN_VAR_LOC (); | |
2963 else | |
2964 { | |
2965 mode = GET_MODE (INSN_VAR_LOCATION (insn)); | |
2966 | |
2967 gcc_assert (mode == GET_MODE (val) | |
2968 || (GET_MODE (val) == VOIDmode | |
2969 && (CONST_INT_P (val) | |
2970 || GET_CODE (val) == CONST_FIXED | |
2971 || GET_CODE (val) == CONST_DOUBLE | |
2972 || GET_CODE (val) == LABEL_REF))); | |
2973 } | |
2974 | |
2975 INSN_VAR_LOCATION_LOC (insn) = val; | |
2976 } | |
2977 | |
2978 flag_strict_aliasing = save_strict_alias; | |
2979 } | |
2980 | |
2981 /* Expand basic block BB from GIMPLE trees to RTL. */ | |
2982 | |
2983 static basic_block | |
2984 expand_gimple_basic_block (basic_block bb) | |
2985 { | |
2986 gimple_stmt_iterator gsi; | |
2987 gimple_seq stmts; | |
2988 gimple stmt = NULL; | |
2989 rtx note, last; | |
2990 edge e; | |
2991 edge_iterator ei; | |
2992 void **elt; | |
2993 | |
2994 if (dump_file) | |
2995 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", | |
2996 bb->index); | |
2997 | |
2998 /* Note that since we are now transitioning from GIMPLE to RTL, we | |
2999 cannot use the gsi_*_bb() routines because they expect the basic | |
3000 block to be in GIMPLE, instead of RTL. Therefore, we need to | |
3001 access the BB sequence directly. */ | |
3002 stmts = bb_seq (bb); | |
3003 bb->il.gimple = NULL; | |
3004 rtl_profile_for_bb (bb); | |
3005 init_rtl_bb_info (bb); | |
3006 bb->flags |= BB_RTL; | |
3007 | |
3008 /* Remove the RETURN_EXPR if we may fall though to the exit | |
3009 instead. */ | |
3010 gsi = gsi_last (stmts); | |
3011 if (!gsi_end_p (gsi) | |
3012 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) | |
3013 { | |
3014 gimple ret_stmt = gsi_stmt (gsi); | |
3015 | |
3016 gcc_assert (single_succ_p (bb)); | |
3017 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR); | |
3018 | |
3019 if (bb->next_bb == EXIT_BLOCK_PTR | |
3020 && !gimple_return_retval (ret_stmt)) | |
3021 { | |
3022 gsi_remove (&gsi, false); | |
3023 single_succ_edge (bb)->flags |= EDGE_FALLTHRU; | |
3024 } | |
3025 } | |
3026 | |
3027 gsi = gsi_start (stmts); | |
3028 if (!gsi_end_p (gsi)) | |
3029 { | |
3030 stmt = gsi_stmt (gsi); | |
3031 if (gimple_code (stmt) != GIMPLE_LABEL) | |
3032 stmt = NULL; | |
3033 } | |
3034 | |
3035 elt = pointer_map_contains (lab_rtx_for_bb, bb); | |
3036 | |
3037 if (stmt || elt) | |
3038 { | |
3039 last = get_last_insn (); | |
3040 | |
3041 if (stmt) | |
3042 { | |
3043 expand_gimple_stmt (stmt); | |
3044 gsi_next (&gsi); | |
3045 } | |
3046 | |
3047 if (elt) | |
3048 emit_label ((rtx) *elt); | |
3049 | |
3050 /* Java emits line number notes in the top of labels. | |
3051 ??? Make this go away once line number notes are obsoleted. */ | |
3052 BB_HEAD (bb) = NEXT_INSN (last); | |
3053 if (NOTE_P (BB_HEAD (bb))) | |
3054 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); | |
3055 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); | |
3056 | |
3057 maybe_dump_rtl_for_gimple_stmt (stmt, last); | |
3058 } | |
3059 else | |
3060 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK); | |
3061 | |
3062 NOTE_BASIC_BLOCK (note) = bb; | |
3063 | |
3064 for (; !gsi_end_p (gsi); gsi_next (&gsi)) | |
3065 { | |
3066 basic_block new_bb; | |
3067 | |
3068 stmt = gsi_stmt (gsi); | |
3069 currently_expanding_gimple_stmt = stmt; | |
3070 | |
3071 /* Expand this statement, then evaluate the resulting RTL and | |
3072 fixup the CFG accordingly. */ | |
3073 if (gimple_code (stmt) == GIMPLE_COND) | |
3074 { | |
3075 new_bb = expand_gimple_cond (bb, stmt); | |
3076 if (new_bb) | |
3077 return new_bb; | |
3078 } | |
3079 else if (gimple_debug_bind_p (stmt)) | |
3080 { | |
3081 location_t sloc = get_curr_insn_source_location (); | |
3082 tree sblock = get_curr_insn_block (); | |
3083 gimple_stmt_iterator nsi = gsi; | |
3084 | |
3085 for (;;) | |
3086 { | |
3087 tree var = gimple_debug_bind_get_var (stmt); | |
3088 tree value; | |
3089 rtx val; | |
3090 enum machine_mode mode; | |
3091 | |
3092 if (gimple_debug_bind_has_value_p (stmt)) | |
3093 value = gimple_debug_bind_get_value (stmt); | |
3094 else | |
3095 value = NULL_TREE; | |
3096 | |
3097 last = get_last_insn (); | |
3098 | |
3099 set_curr_insn_source_location (gimple_location (stmt)); | |
3100 set_curr_insn_block (gimple_block (stmt)); | |
3101 | |
3102 if (DECL_P (var)) | |
3103 mode = DECL_MODE (var); | |
3104 else | |
3105 mode = TYPE_MODE (TREE_TYPE (var)); | |
3106 | |
3107 val = gen_rtx_VAR_LOCATION | |
3108 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED); | |
3109 | |
3110 val = emit_debug_insn (val); | |
3111 | |
3112 if (dump_file && (dump_flags & TDF_DETAILS)) | |
3113 { | |
3114 /* We can't dump the insn with a TREE where an RTX | |
3115 is expected. */ | |
3116 INSN_VAR_LOCATION_LOC (val) = const0_rtx; | |
3117 maybe_dump_rtl_for_gimple_stmt (stmt, last); | |
3118 INSN_VAR_LOCATION_LOC (val) = (rtx)value; | |
3119 } | |
3120 | |
3121 gsi = nsi; | |
3122 gsi_next (&nsi); | |
3123 if (gsi_end_p (nsi)) | |
3124 break; | |
3125 stmt = gsi_stmt (nsi); | |
3126 if (!gimple_debug_bind_p (stmt)) | |
3127 break; | |
3128 } | |
3129 | |
3130 set_curr_insn_source_location (sloc); | |
3131 set_curr_insn_block (sblock); | |
3132 } | |
3133 else | |
3134 { | |
3135 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) | |
3136 { | |
3137 bool can_fallthru; | |
3138 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); | |
3139 if (new_bb) | |
3140 { | |
3141 if (can_fallthru) | |
3142 bb = new_bb; | |
3143 else | |
3144 return new_bb; | |
3145 } | |
3146 } | |
3147 else | |
3148 { | |
3149 def_operand_p def_p; | |
3150 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF); | |
3151 | |
3152 if (def_p != NULL) | |
3153 { | |
3154 /* Ignore this stmt if it is in the list of | |
3155 replaceable expressions. */ | |
3156 if (SA.values | |
3157 && bitmap_bit_p (SA.values, | |
3158 SSA_NAME_VERSION (DEF_FROM_PTR (def_p)))) | |
3159 continue; | |
3160 } | |
3161 last = expand_gimple_stmt (stmt); | |
3162 maybe_dump_rtl_for_gimple_stmt (stmt, last); | |
3163 } | |
3164 } | |
3165 } | |
3166 | |
3167 currently_expanding_gimple_stmt = NULL; | |
3168 | |
3169 /* Expand implicit goto and convert goto_locus. */ | |
3170 FOR_EACH_EDGE (e, ei, bb->succs) | |
3171 { | |
3172 if (e->goto_locus && e->goto_block) | |
3173 { | |
3174 set_curr_insn_source_location (e->goto_locus); | |
3175 set_curr_insn_block (e->goto_block); | |
3176 e->goto_locus = curr_insn_locator (); | |
3177 } | |
3178 e->goto_block = NULL; | |
3179 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb) | |
3180 { | |
3181 emit_jump (label_rtx_for_bb (e->dest)); | |
3182 e->flags &= ~EDGE_FALLTHRU; | |
3183 } | |
3184 } | |
3185 | |
3186 /* Expanded RTL can create a jump in the last instruction of block. | |
3187 This later might be assumed to be a jump to successor and break edge insertion. | |
3188 We need to insert dummy move to prevent this. PR41440. */ | |
3189 if (single_succ_p (bb) | |
3190 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU) | |
3191 && (last = get_last_insn ()) | |
3192 && JUMP_P (last)) | |
3193 { | |
3194 rtx dummy = gen_reg_rtx (SImode); | |
3195 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL); | |
3196 } | |
3197 | |
3198 do_pending_stack_adjust (); | |
3199 | |
3200 /* Find the block tail. The last insn in the block is the insn | |
3201 before a barrier and/or table jump insn. */ | |
3202 last = get_last_insn (); | |
3203 if (BARRIER_P (last)) | |
3204 last = PREV_INSN (last); | |
3205 if (JUMP_TABLE_DATA_P (last)) | |
3206 last = PREV_INSN (PREV_INSN (last)); | |
3207 BB_END (bb) = last; | |
3208 | |
3209 update_bb_for_insn (bb); | |
3210 | |
3211 return bb; | |
3212 } | |
3213 | |
3214 | |
3215 /* Create a basic block for initialization code. */ | |
3216 | |
3217 static basic_block | |
3218 construct_init_block (void) | |
3219 { | |
3220 basic_block init_block, first_block; | |
3221 edge e = NULL; | |
3222 int flags; | |
3223 | |
3224 /* Multiple entry points not supported yet. */ | |
3225 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1); | |
3226 init_rtl_bb_info (ENTRY_BLOCK_PTR); | |
3227 init_rtl_bb_info (EXIT_BLOCK_PTR); | |
3228 ENTRY_BLOCK_PTR->flags |= BB_RTL; | |
3229 EXIT_BLOCK_PTR->flags |= BB_RTL; | |
3230 | |
3231 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0); | |
3232 | |
3233 /* When entry edge points to first basic block, we don't need jump, | |
3234 otherwise we have to jump into proper target. */ | |
3235 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb) | |
3236 { | |
3237 tree label = gimple_block_label (e->dest); | |
3238 | |
3239 emit_jump (label_rtx (label)); | |
3240 flags = 0; | |
3241 } | |
3242 else | |
3243 flags = EDGE_FALLTHRU; | |
3244 | |
3245 init_block = create_basic_block (NEXT_INSN (get_insns ()), | |
3246 get_last_insn (), | |
3247 ENTRY_BLOCK_PTR); | |
3248 init_block->frequency = ENTRY_BLOCK_PTR->frequency; | |
3249 init_block->count = ENTRY_BLOCK_PTR->count; | |
3250 if (e) | |
3251 { | |
3252 first_block = e->dest; | |
3253 redirect_edge_succ (e, init_block); | |
3254 e = make_edge (init_block, first_block, flags); | |
3255 } | |
3256 else | |
3257 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); | |
3258 e->probability = REG_BR_PROB_BASE; | |
3259 e->count = ENTRY_BLOCK_PTR->count; | |
3260 | |
3261 update_bb_for_insn (init_block); | |
3262 return init_block; | |
3263 } | |
3264 | |
3265 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is | |
3266 found in the block tree. */ | |
3267 | |
3268 static void | |
3269 set_block_levels (tree block, int level) | |
3270 { | |
3271 while (block) | |
3272 { | |
3273 BLOCK_NUMBER (block) = level; | |
3274 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1); | |
3275 block = BLOCK_CHAIN (block); | |
3276 } | |
3277 } | |
3278 | |
3279 /* Create a block containing landing pads and similar stuff. */ | |
3280 | |
3281 static void | |
3282 construct_exit_block (void) | |
3283 { | |
3284 rtx head = get_last_insn (); | |
3285 rtx end; | |
3286 basic_block exit_block; | |
3287 edge e, e2; | |
3288 unsigned ix; | |
3289 edge_iterator ei; | |
3290 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb); | |
3291 | |
3292 rtl_profile_for_bb (EXIT_BLOCK_PTR); | |
3293 | |
3294 /* Make sure the locus is set to the end of the function, so that | |
3295 epilogue line numbers and warnings are set properly. */ | |
3296 if (cfun->function_end_locus != UNKNOWN_LOCATION) | |
3297 input_location = cfun->function_end_locus; | |
3298 | |
3299 /* The following insns belong to the top scope. */ | |
3300 set_curr_insn_block (DECL_INITIAL (current_function_decl)); | |
3301 | |
3302 /* Generate rtl for function exit. */ | |
3303 expand_function_end (); | |
3304 | |
3305 end = get_last_insn (); | |
3306 if (head == end) | |
3307 return; | |
3308 /* While emitting the function end we could move end of the last basic block. | |
3309 */ | |
3310 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end; | |
3311 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) | |
3312 head = NEXT_INSN (head); | |
3313 exit_block = create_basic_block (NEXT_INSN (head), end, | |
3314 EXIT_BLOCK_PTR->prev_bb); | |
3315 exit_block->frequency = EXIT_BLOCK_PTR->frequency; | |
3316 exit_block->count = EXIT_BLOCK_PTR->count; | |
3317 | |
3318 ix = 0; | |
3319 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds)) | |
3320 { | |
3321 e = EDGE_PRED (EXIT_BLOCK_PTR, ix); | |
3322 if (!(e->flags & EDGE_ABNORMAL)) | |
3323 redirect_edge_succ (e, exit_block); | |
3324 else | |
3325 ix++; | |
3326 } | |
3327 | |
3328 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); | |
3329 e->probability = REG_BR_PROB_BASE; | |
3330 e->count = EXIT_BLOCK_PTR->count; | |
3331 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds) | |
3332 if (e2 != e) | |
3333 { | |
3334 e->count -= e2->count; | |
3335 exit_block->count -= e2->count; | |
3336 exit_block->frequency -= EDGE_FREQUENCY (e2); | |
3337 } | |
3338 if (e->count < 0) | |
3339 e->count = 0; | |
3340 if (exit_block->count < 0) | |
3341 exit_block->count = 0; | |
3342 if (exit_block->frequency < 0) | |
3343 exit_block->frequency = 0; | |
3344 update_bb_for_insn (exit_block); | |
3345 } | |
3346 | |
3347 /* Helper function for discover_nonconstant_array_refs. | |
3348 Look for ARRAY_REF nodes with non-constant indexes and mark them | |
3349 addressable. */ | |
3350 | |
3351 static tree | |
3352 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees, | |
3353 void *data ATTRIBUTE_UNUSED) | |
3354 { | |
3355 tree t = *tp; | |
3356 | |
3357 if (IS_TYPE_OR_DECL_P (t)) | |
3358 *walk_subtrees = 0; | |
3359 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
3360 { | |
3361 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
3362 && is_gimple_min_invariant (TREE_OPERAND (t, 1)) | |
3363 && (!TREE_OPERAND (t, 2) | |
3364 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | |
3365 || (TREE_CODE (t) == COMPONENT_REF | |
3366 && (!TREE_OPERAND (t,2) | |
3367 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | |
3368 || TREE_CODE (t) == BIT_FIELD_REF | |
3369 || TREE_CODE (t) == REALPART_EXPR | |
3370 || TREE_CODE (t) == IMAGPART_EXPR | |
3371 || TREE_CODE (t) == VIEW_CONVERT_EXPR | |
3372 || CONVERT_EXPR_P (t)) | |
3373 t = TREE_OPERAND (t, 0); | |
3374 | |
3375 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
3376 { | |
3377 t = get_base_address (t); | |
3378 if (t && DECL_P (t) | |
3379 && DECL_MODE (t) != BLKmode) | |
3380 TREE_ADDRESSABLE (t) = 1; | |
3381 } | |
3382 | |
3383 *walk_subtrees = 0; | |
3384 } | |
3385 | |
3386 return NULL_TREE; | |
3387 } | |
3388 | |
3389 /* RTL expansion is not able to compile array references with variable | |
3390 offsets for arrays stored in single register. Discover such | |
3391 expressions and mark variables as addressable to avoid this | |
3392 scenario. */ | |
3393 | |
3394 static void | |
3395 discover_nonconstant_array_refs (void) | |
3396 { | |
3397 basic_block bb; | |
3398 gimple_stmt_iterator gsi; | |
3399 | |
3400 FOR_EACH_BB (bb) | |
3401 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3402 { | |
3403 gimple stmt = gsi_stmt (gsi); | |
3404 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); | |
3405 } | |
3406 } | |
3407 | |
3408 /* This function sets crtl->args.internal_arg_pointer to a virtual | |
3409 register if DRAP is needed. Local register allocator will replace | |
3410 virtual_incoming_args_rtx with the virtual register. */ | |
3411 | |
3412 static void | |
3413 expand_stack_alignment (void) | |
3414 { | |
3415 rtx drap_rtx; | |
3416 unsigned int preferred_stack_boundary; | |
3417 | |
3418 if (! SUPPORTS_STACK_ALIGNMENT) | |
3419 return; | |
3420 | |
3421 if (cfun->calls_alloca | |
3422 || cfun->has_nonlocal_label | |
3423 || crtl->has_nonlocal_goto) | |
3424 crtl->need_drap = true; | |
3425 | |
3426 /* Call update_stack_boundary here again to update incoming stack | |
3427 boundary. It may set incoming stack alignment to a different | |
3428 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may | |
3429 use the minimum incoming stack alignment to check if it is OK | |
3430 to perform sibcall optimization since sibcall optimization will | |
3431 only align the outgoing stack to incoming stack boundary. */ | |
3432 if (targetm.calls.update_stack_boundary) | |
3433 targetm.calls.update_stack_boundary (); | |
3434 | |
3435 /* The incoming stack frame has to be aligned at least at | |
3436 parm_stack_boundary. */ | |
3437 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY); | |
3438 | |
3439 /* Update crtl->stack_alignment_estimated and use it later to align | |
3440 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call | |
3441 exceptions since callgraph doesn't collect incoming stack alignment | |
3442 in this case. */ | |
3443 if (flag_non_call_exceptions | |
3444 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary) | |
3445 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
3446 else | |
3447 preferred_stack_boundary = crtl->preferred_stack_boundary; | |
3448 if (preferred_stack_boundary > crtl->stack_alignment_estimated) | |
3449 crtl->stack_alignment_estimated = preferred_stack_boundary; | |
3450 if (preferred_stack_boundary > crtl->stack_alignment_needed) | |
3451 crtl->stack_alignment_needed = preferred_stack_boundary; | |
3452 | |
3453 gcc_assert (crtl->stack_alignment_needed | |
3454 <= crtl->stack_alignment_estimated); | |
3455 | |
3456 crtl->stack_realign_needed | |
3457 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated; | |
3458 crtl->stack_realign_tried = crtl->stack_realign_needed; | |
3459 | |
3460 crtl->stack_realign_processed = true; | |
3461 | |
3462 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack | |
3463 alignment. */ | |
3464 gcc_assert (targetm.calls.get_drap_rtx != NULL); | |
3465 drap_rtx = targetm.calls.get_drap_rtx (); | |
3466 | |
3467 /* stack_realign_drap and drap_rtx must match. */ | |
3468 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL)); | |
3469 | |
3470 /* Do nothing if NULL is returned, which means DRAP is not needed. */ | |
3471 if (NULL != drap_rtx) | |
3472 { | |
3473 crtl->args.internal_arg_pointer = drap_rtx; | |
3474 | |
3475 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is | |
3476 needed. */ | |
3477 fixup_tail_calls (); | |
3478 } | |
3479 } | |
3480 | |
3481 /* Translate the intermediate representation contained in the CFG | |
3482 from GIMPLE trees to RTL. | |
3483 | |
3484 We do conversion per basic block and preserve/update the tree CFG. | |
3485 This implies we have to do some magic as the CFG can simultaneously | |
3486 consist of basic blocks containing RTL and GIMPLE trees. This can | |
3487 confuse the CFG hooks, so be careful to not manipulate CFG during | |
3488 the expansion. */ | |
3489 | |
3490 static unsigned int | |
3491 gimple_expand_cfg (void) | |
3492 { | |
3493 basic_block bb, init_block; | |
3494 sbitmap blocks; | |
3495 edge_iterator ei; | |
3496 edge e; | |
3497 unsigned i; | |
3498 | |
3499 rewrite_out_of_ssa (&SA); | |
3500 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions, | |
3501 sizeof (rtx)); | |
3502 | |
3503 /* Some backends want to know that we are expanding to RTL. */ | |
3504 currently_expanding_to_rtl = 1; | |
3505 | |
3506 rtl_profile_for_bb (ENTRY_BLOCK_PTR); | |
3507 | |
3508 insn_locators_alloc (); | |
3509 if (!DECL_IS_BUILTIN (current_function_decl)) | |
3510 { | |
3511 /* Eventually, all FEs should explicitly set function_start_locus. */ | |
3512 if (cfun->function_start_locus == UNKNOWN_LOCATION) | |
3513 set_curr_insn_source_location | |
3514 (DECL_SOURCE_LOCATION (current_function_decl)); | |
3515 else | |
3516 set_curr_insn_source_location (cfun->function_start_locus); | |
3517 } | |
3518 set_curr_insn_block (DECL_INITIAL (current_function_decl)); | |
3519 prologue_locator = curr_insn_locator (); | |
3520 | |
3521 /* Make sure first insn is a note even if we don't want linenums. | |
3522 This makes sure the first insn will never be deleted. | |
3523 Also, final expects a note to appear there. */ | |
3524 emit_note (NOTE_INSN_DELETED); | |
3525 | |
3526 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */ | |
3527 discover_nonconstant_array_refs (); | |
3528 | |
3529 targetm.expand_to_rtl_hook (); | |
3530 crtl->stack_alignment_needed = STACK_BOUNDARY; | |
3531 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY; | |
3532 crtl->stack_alignment_estimated = 0; | |
3533 crtl->preferred_stack_boundary = STACK_BOUNDARY; | |
3534 cfun->cfg->max_jumptable_ents = 0; | |
3535 | |
3536 | |
3537 /* Expand the variables recorded during gimple lowering. */ | |
3538 expand_used_vars (); | |
3539 | |
3540 /* Honor stack protection warnings. */ | |
3541 if (warn_stack_protect) | |
3542 { | |
3543 if (cfun->calls_alloca) | |
3544 warning (OPT_Wstack_protector, | |
3545 "not protecting local variables: variable length buffer"); | |
3546 if (has_short_buffer && !crtl->stack_protect_guard) | |
3547 warning (OPT_Wstack_protector, | |
3548 "not protecting function: no buffer at least %d bytes long", | |
3549 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); | |
3550 } | |
3551 | |
3552 /* Set up parameters and prepare for return, for the function. */ | |
3553 expand_function_start (current_function_decl); | |
3554 | |
3555 /* Now that we also have the parameter RTXs, copy them over to our | |
3556 partitions. */ | |
3557 for (i = 0; i < SA.map->num_partitions; i++) | |
3558 { | |
3559 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i)); | |
3560 | |
3561 if (TREE_CODE (var) != VAR_DECL | |
3562 && !SA.partition_to_pseudo[i]) | |
3563 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var); | |
3564 gcc_assert (SA.partition_to_pseudo[i]); | |
3565 | |
3566 /* If this decl was marked as living in multiple places, reset | |
3567 this now to NULL. */ | |
3568 if (DECL_RTL_IF_SET (var) == pc_rtx) | |
3569 SET_DECL_RTL (var, NULL); | |
3570 | |
3571 /* Some RTL parts really want to look at DECL_RTL(x) when x | |
3572 was a decl marked in REG_ATTR or MEM_ATTR. We could use | |
3573 SET_DECL_RTL here making this available, but that would mean | |
3574 to select one of the potentially many RTLs for one DECL. Instead | |
3575 of doing that we simply reset the MEM_EXPR of the RTL in question, | |
3576 then nobody can get at it and hence nobody can call DECL_RTL on it. */ | |
3577 if (!DECL_RTL_SET_P (var)) | |
3578 { | |
3579 if (MEM_P (SA.partition_to_pseudo[i])) | |
3580 set_mem_expr (SA.partition_to_pseudo[i], NULL); | |
3581 } | |
3582 } | |
3583 | |
3584 /* If this function is `main', emit a call to `__main' | |
3585 to run global initializers, etc. */ | |
3586 if (DECL_NAME (current_function_decl) | |
3587 && MAIN_NAME_P (DECL_NAME (current_function_decl)) | |
3588 && DECL_FILE_SCOPE_P (current_function_decl)) | |
3589 expand_main_function (); | |
3590 | |
3591 /* Initialize the stack_protect_guard field. This must happen after the | |
3592 call to __main (if any) so that the external decl is initialized. */ | |
3593 if (crtl->stack_protect_guard) | |
3594 stack_protect_prologue (); | |
3595 | |
3596 expand_phi_nodes (&SA); | |
3597 | |
3598 /* Register rtl specific functions for cfg. */ | |
3599 rtl_register_cfg_hooks (); | |
3600 | |
3601 init_block = construct_init_block (); | |
3602 | |
3603 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the | |
3604 remaining edges later. */ | |
3605 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) | |
3606 e->flags &= ~EDGE_EXECUTABLE; | |
3607 | |
3608 lab_rtx_for_bb = pointer_map_create (); | |
3609 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb) | |
3610 bb = expand_gimple_basic_block (bb); | |
3611 | |
3612 if (MAY_HAVE_DEBUG_INSNS) | |
3613 expand_debug_locations (); | |
3614 | |
3615 execute_free_datastructures (); | |
3616 finish_out_of_ssa (&SA); | |
3617 | |
3618 /* Expansion is used by optimization passes too, set maybe_hot_insn_p | |
3619 conservatively to true until they are all profile aware. */ | |
3620 pointer_map_destroy (lab_rtx_for_bb); | |
3621 free_histograms (); | |
3622 | |
3623 construct_exit_block (); | |
3624 set_curr_insn_block (DECL_INITIAL (current_function_decl)); | |
3625 insn_locators_finalize (); | |
3626 | |
3627 /* Zap the tree EH table. */ | |
3628 set_eh_throw_stmt_table (cfun, NULL); | |
3629 | |
3630 rebuild_jump_labels (get_insns ()); | |
3631 | |
3632 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) | |
3633 { | |
3634 edge e; | |
3635 edge_iterator ei; | |
3636 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
3637 { | |
3638 if (e->insns.r) | |
3639 commit_one_edge_insertion (e); | |
3640 else | |
3641 ei_next (&ei); | |
3642 } | |
3643 } | |
3644 | |
3645 /* We're done expanding trees to RTL. */ | |
3646 currently_expanding_to_rtl = 0; | |
3647 | |
3648 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb) | |
3649 { | |
3650 edge e; | |
3651 edge_iterator ei; | |
3652 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
3653 { | |
3654 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */ | |
3655 e->flags &= ~EDGE_EXECUTABLE; | |
3656 | |
3657 /* At the moment not all abnormal edges match the RTL | |
3658 representation. It is safe to remove them here as | |
3659 find_many_sub_basic_blocks will rediscover them. | |
3660 In the future we should get this fixed properly. */ | |
3661 if ((e->flags & EDGE_ABNORMAL) | |
3662 && !(e->flags & EDGE_SIBCALL)) | |
3663 remove_edge (e); | |
3664 else | |
3665 ei_next (&ei); | |
3666 } | |
3667 } | |
3668 | |
3669 blocks = sbitmap_alloc (last_basic_block); | |
3670 sbitmap_ones (blocks); | |
3671 find_many_sub_basic_blocks (blocks); | |
3672 sbitmap_free (blocks); | |
3673 purge_all_dead_edges (); | |
3674 | |
3675 compact_blocks (); | |
3676 | |
3677 expand_stack_alignment (); | |
3678 | |
3679 #ifdef ENABLE_CHECKING | |
3680 verify_flow_info (); | |
3681 #endif | |
3682 | |
3683 /* There's no need to defer outputting this function any more; we | |
3684 know we want to output it. */ | |
3685 DECL_DEFER_OUTPUT (current_function_decl) = 0; | |
3686 | |
3687 /* Now that we're done expanding trees to RTL, we shouldn't have any | |
3688 more CONCATs anywhere. */ | |
3689 generating_concat_p = 0; | |
3690 | |
3691 if (dump_file) | |
3692 { | |
3693 fprintf (dump_file, | |
3694 "\n\n;;\n;; Full RTL generated for this function:\n;;\n"); | |
3695 /* And the pass manager will dump RTL for us. */ | |
3696 } | |
3697 | |
3698 /* If we're emitting a nested function, make sure its parent gets | |
3699 emitted as well. Doing otherwise confuses debug info. */ | |
3700 { | |
3701 tree parent; | |
3702 for (parent = DECL_CONTEXT (current_function_decl); | |
3703 parent != NULL_TREE; | |
3704 parent = get_containing_scope (parent)) | |
3705 if (TREE_CODE (parent) == FUNCTION_DECL) | |
3706 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; | |
3707 } | |
3708 | |
3709 /* We are now committed to emitting code for this function. Do any | |
3710 preparation, such as emitting abstract debug info for the inline | |
3711 before it gets mangled by optimization. */ | |
3712 if (cgraph_function_possibly_inlined_p (current_function_decl)) | |
3713 (*debug_hooks->outlining_inline_function) (current_function_decl); | |
3714 | |
3715 TREE_ASM_WRITTEN (current_function_decl) = 1; | |
3716 | |
3717 /* After expanding, the return labels are no longer needed. */ | |
3718 return_label = NULL; | |
3719 naked_return_label = NULL; | |
3720 /* Tag the blocks with a depth number so that change_scope can find | |
3721 the common parent easily. */ | |
3722 set_block_levels (DECL_INITIAL (cfun->decl), 0); | |
3723 default_rtl_profile (); | |
3724 return 0; | |
3725 } | |
3726 | |
3727 struct rtl_opt_pass pass_expand = | |
3728 { | |
3729 { | |
3730 RTL_PASS, | |
3731 "expand", /* name */ | |
3732 NULL, /* gate */ | |
3733 gimple_expand_cfg, /* execute */ | |
3734 NULL, /* sub */ | |
3735 NULL, /* next */ | |
3736 0, /* static_pass_number */ | |
3737 TV_EXPAND, /* tv_id */ | |
3738 PROP_ssa | PROP_gimple_leh | PROP_cfg,/* properties_required */ | |
3739 PROP_rtl, /* properties_provided */ | |
3740 PROP_ssa | PROP_trees, /* properties_destroyed */ | |
3741 TODO_verify_ssa | TODO_verify_flow | |
3742 | TODO_verify_stmts, /* todo_flags_start */ | |
3743 TODO_dump_func | |
3744 | TODO_ggc_collect /* todo_flags_finish */ | |
3745 } | |
3746 }; |