Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-ssa-live.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 58ad6c70ea60 |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Liveness for SSA trees. | |
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009 Free Software Foundation, | |
3 Inc. | |
4 Contributed by Andrew MacLeod <amacleod@redhat.com> | |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify | |
9 it under the terms of the GNU General Public License as published by | |
10 the Free Software Foundation; either version 3, or (at your option) | |
11 any later version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, | |
14 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 GNU General Public License for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 #include "config.h" | |
23 #include "system.h" | |
24 #include "coretypes.h" | |
25 #include "tm.h" | |
26 #include "tree.h" | |
27 #include "diagnostic.h" | |
28 #include "bitmap.h" | |
29 #include "tree-flow.h" | |
30 #include "tree-dump.h" | |
31 #include "tree-ssa-live.h" | |
32 #include "toplev.h" | |
33 #include "debug.h" | |
34 #include "flags.h" | |
35 | |
36 #ifdef ENABLE_CHECKING | |
37 static void verify_live_on_entry (tree_live_info_p); | |
38 #endif | |
39 | |
40 | |
41 /* VARMAP maintains a mapping from SSA version number to real variables. | |
42 | |
43 All SSA_NAMES are divided into partitions. Initially each ssa_name is the | |
44 only member of it's own partition. Coalescing will attempt to group any | |
45 ssa_names which occur in a copy or in a PHI node into the same partition. | |
46 | |
47 At the end of out-of-ssa, each partition becomes a "real" variable and is | |
48 rewritten as a compiler variable. | |
49 | |
50 The var_map data structure is used to manage these partitions. It allows | |
51 partitions to be combined, and determines which partition belongs to what | |
52 ssa_name or variable, and vice versa. */ | |
53 | |
54 | |
55 /* This routine will initialize the basevar fields of MAP. */ | |
56 | |
57 static void | |
58 var_map_base_init (var_map map) | |
59 { | |
60 int x, num_part, num; | |
61 tree var; | |
62 var_ann_t ann; | |
63 | |
64 num = 0; | |
65 num_part = num_var_partitions (map); | |
66 | |
67 /* If a base table already exists, clear it, otherwise create it. */ | |
68 if (map->partition_to_base_index != NULL) | |
69 { | |
70 free (map->partition_to_base_index); | |
71 VEC_truncate (tree, map->basevars, 0); | |
72 } | |
73 else | |
74 map->basevars = VEC_alloc (tree, heap, MAX (40, (num_part / 10))); | |
75 | |
76 map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part); | |
77 | |
78 /* Build the base variable list, and point partitions at their bases. */ | |
79 for (x = 0; x < num_part; x++) | |
80 { | |
81 var = partition_to_var (map, x); | |
82 if (TREE_CODE (var) == SSA_NAME) | |
83 var = SSA_NAME_VAR (var); | |
84 ann = var_ann (var); | |
85 /* If base variable hasn't been seen, set it up. */ | |
86 if (!ann->base_var_processed) | |
87 { | |
88 ann->base_var_processed = 1; | |
89 VAR_ANN_BASE_INDEX (ann) = num++; | |
90 VEC_safe_push (tree, heap, map->basevars, var); | |
91 } | |
92 map->partition_to_base_index[x] = VAR_ANN_BASE_INDEX (ann); | |
93 } | |
94 | |
95 map->num_basevars = num; | |
96 | |
97 /* Now clear the processed bit. */ | |
98 for (x = 0; x < num; x++) | |
99 { | |
100 var = VEC_index (tree, map->basevars, x); | |
101 var_ann (var)->base_var_processed = 0; | |
102 } | |
103 | |
104 #ifdef ENABLE_CHECKING | |
105 for (x = 0; x < num_part; x++) | |
106 { | |
107 tree var2; | |
108 var = SSA_NAME_VAR (partition_to_var (map, x)); | |
109 var2 = VEC_index (tree, map->basevars, basevar_index (map, x)); | |
110 gcc_assert (var == var2); | |
111 } | |
112 #endif | |
113 } | |
114 | |
115 | |
116 /* Remove the base table in MAP. */ | |
117 | |
118 static void | |
119 var_map_base_fini (var_map map) | |
120 { | |
121 /* Free the basevar info if it is present. */ | |
122 if (map->partition_to_base_index != NULL) | |
123 { | |
124 VEC_free (tree, heap, map->basevars); | |
125 free (map->partition_to_base_index); | |
126 map->partition_to_base_index = NULL; | |
127 map->num_basevars = 0; | |
128 } | |
129 } | |
130 /* Create a variable partition map of SIZE, initialize and return it. */ | |
131 | |
132 var_map | |
133 init_var_map (int size) | |
134 { | |
135 var_map map; | |
136 | |
137 map = (var_map) xmalloc (sizeof (struct _var_map)); | |
138 map->var_partition = partition_new (size); | |
139 map->partition_to_var | |
140 = (tree *)xmalloc (size * sizeof (tree)); | |
141 memset (map->partition_to_var, 0, size * sizeof (tree)); | |
142 | |
143 map->partition_to_view = NULL; | |
144 map->view_to_partition = NULL; | |
145 map->num_partitions = size; | |
146 map->partition_size = size; | |
147 map->num_basevars = 0; | |
148 map->partition_to_base_index = NULL; | |
149 map->basevars = NULL; | |
150 return map; | |
151 } | |
152 | |
153 | |
154 /* Free memory associated with MAP. */ | |
155 | |
156 void | |
157 delete_var_map (var_map map) | |
158 { | |
159 var_map_base_fini (map); | |
160 free (map->partition_to_var); | |
161 partition_delete (map->var_partition); | |
162 if (map->partition_to_view) | |
163 free (map->partition_to_view); | |
164 if (map->view_to_partition) | |
165 free (map->view_to_partition); | |
166 free (map); | |
167 } | |
168 | |
169 | |
170 /* This function will combine the partitions in MAP for VAR1 and VAR2. It | |
171 Returns the partition which represents the new partition. If the two | |
172 partitions cannot be combined, NO_PARTITION is returned. */ | |
173 | |
174 int | |
175 var_union (var_map map, tree var1, tree var2) | |
176 { | |
177 int p1, p2, p3; | |
178 tree root_var = NULL_TREE; | |
179 tree other_var = NULL_TREE; | |
180 | |
181 /* This is independent of partition_to_view. If partition_to_view is | |
182 on, then whichever one of these partitions is absorbed will never have a | |
183 dereference into the partition_to_view array any more. */ | |
184 | |
185 if (TREE_CODE (var1) == SSA_NAME) | |
186 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1)); | |
187 else | |
188 { | |
189 p1 = var_to_partition (map, var1); | |
190 if (map->view_to_partition) | |
191 p1 = map->view_to_partition[p1]; | |
192 root_var = var1; | |
193 } | |
194 | |
195 if (TREE_CODE (var2) == SSA_NAME) | |
196 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2)); | |
197 else | |
198 { | |
199 p2 = var_to_partition (map, var2); | |
200 if (map->view_to_partition) | |
201 p2 = map->view_to_partition[p2]; | |
202 | |
203 /* If there is no root_var set, or it's not a user variable, set the | |
204 root_var to this one. */ | |
205 if (!root_var || (DECL_P (root_var) && DECL_IGNORED_P (root_var))) | |
206 { | |
207 other_var = root_var; | |
208 root_var = var2; | |
209 } | |
210 else | |
211 other_var = var2; | |
212 } | |
213 | |
214 gcc_assert (p1 != NO_PARTITION); | |
215 gcc_assert (p2 != NO_PARTITION); | |
216 | |
217 if (p1 == p2) | |
218 p3 = p1; | |
219 else | |
220 p3 = partition_union (map->var_partition, p1, p2); | |
221 | |
222 if (map->partition_to_view) | |
223 p3 = map->partition_to_view[p3]; | |
224 | |
225 if (root_var) | |
226 change_partition_var (map, root_var, p3); | |
227 if (other_var) | |
228 change_partition_var (map, other_var, p3); | |
229 | |
230 return p3; | |
231 } | |
232 | |
233 | |
234 /* Compress the partition numbers in MAP such that they fall in the range | |
235 0..(num_partitions-1) instead of wherever they turned out during | |
236 the partitioning exercise. This removes any references to unused | |
237 partitions, thereby allowing bitmaps and other vectors to be much | |
238 denser. | |
239 | |
240 This is implemented such that compaction doesn't affect partitioning. | |
241 Ie., once partitions are created and possibly merged, running one | |
242 or more different kind of compaction will not affect the partitions | |
243 themselves. Their index might change, but all the same variables will | |
244 still be members of the same partition group. This allows work on reduced | |
245 sets, and no loss of information when a larger set is later desired. | |
246 | |
247 In particular, coalescing can work on partitions which have 2 or more | |
248 definitions, and then 'recompact' later to include all the single | |
249 definitions for assignment to program variables. */ | |
250 | |
251 | |
252 /* Set MAP back to the initial state of having no partition view. Return a | |
253 bitmap which has a bit set for each partition number which is in use in the | |
254 varmap. */ | |
255 | |
256 static bitmap | |
257 partition_view_init (var_map map) | |
258 { | |
259 bitmap used; | |
260 int tmp; | |
261 unsigned int x; | |
262 | |
263 used = BITMAP_ALLOC (NULL); | |
264 | |
265 /* Already in a view? Abandon the old one. */ | |
266 if (map->partition_to_view) | |
267 { | |
268 free (map->partition_to_view); | |
269 map->partition_to_view = NULL; | |
270 } | |
271 if (map->view_to_partition) | |
272 { | |
273 free (map->view_to_partition); | |
274 map->view_to_partition = NULL; | |
275 } | |
276 | |
277 /* Find out which partitions are actually referenced. */ | |
278 for (x = 0; x < map->partition_size; x++) | |
279 { | |
280 tmp = partition_find (map->var_partition, x); | |
281 if (map->partition_to_var[tmp] != NULL_TREE && !bitmap_bit_p (used, tmp)) | |
282 bitmap_set_bit (used, tmp); | |
283 } | |
284 | |
285 map->num_partitions = map->partition_size; | |
286 return used; | |
287 } | |
288 | |
289 | |
290 /* This routine will finalize the view data for MAP based on the partitions | |
291 set in SELECTED. This is either the same bitmap returned from | |
292 partition_view_init, or a trimmed down version if some of those partitions | |
293 were not desired in this view. SELECTED is freed before returning. */ | |
294 | |
295 static void | |
296 partition_view_fini (var_map map, bitmap selected) | |
297 { | |
298 bitmap_iterator bi; | |
299 unsigned count, i, x, limit; | |
300 tree var; | |
301 | |
302 gcc_assert (selected); | |
303 | |
304 count = bitmap_count_bits (selected); | |
305 limit = map->partition_size; | |
306 | |
307 /* If its a one-to-one ratio, we don't need any view compaction. */ | |
308 if (count < limit) | |
309 { | |
310 map->partition_to_view = (int *)xmalloc (limit * sizeof (int)); | |
311 memset (map->partition_to_view, 0xff, (limit * sizeof (int))); | |
312 map->view_to_partition = (int *)xmalloc (count * sizeof (int)); | |
313 | |
314 i = 0; | |
315 /* Give each selected partition an index. */ | |
316 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi) | |
317 { | |
318 map->partition_to_view[x] = i; | |
319 map->view_to_partition[i] = x; | |
320 var = map->partition_to_var[x]; | |
321 /* If any one of the members of a partition is not an SSA_NAME, make | |
322 sure it is the representative. */ | |
323 if (TREE_CODE (var) != SSA_NAME) | |
324 change_partition_var (map, var, i); | |
325 i++; | |
326 } | |
327 gcc_assert (i == count); | |
328 map->num_partitions = i; | |
329 } | |
330 | |
331 BITMAP_FREE (selected); | |
332 } | |
333 | |
334 | |
335 /* Create a partition view which includes all the used partitions in MAP. If | |
336 WANT_BASES is true, create the base variable map as well. */ | |
337 | |
338 extern void | |
339 partition_view_normal (var_map map, bool want_bases) | |
340 { | |
341 bitmap used; | |
342 | |
343 used = partition_view_init (map); | |
344 partition_view_fini (map, used); | |
345 | |
346 if (want_bases) | |
347 var_map_base_init (map); | |
348 else | |
349 var_map_base_fini (map); | |
350 } | |
351 | |
352 | |
353 /* Create a partition view in MAP which includes just partitions which occur in | |
354 the bitmap ONLY. If WANT_BASES is true, create the base variable map | |
355 as well. */ | |
356 | |
357 extern void | |
358 partition_view_bitmap (var_map map, bitmap only, bool want_bases) | |
359 { | |
360 bitmap used; | |
361 bitmap new_partitions = BITMAP_ALLOC (NULL); | |
362 unsigned x, p; | |
363 bitmap_iterator bi; | |
364 | |
365 used = partition_view_init (map); | |
366 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi) | |
367 { | |
368 p = partition_find (map->var_partition, x); | |
369 gcc_assert (bitmap_bit_p (used, p)); | |
370 bitmap_set_bit (new_partitions, p); | |
371 } | |
372 partition_view_fini (map, new_partitions); | |
373 | |
374 BITMAP_FREE (used); | |
375 if (want_bases) | |
376 var_map_base_init (map); | |
377 else | |
378 var_map_base_fini (map); | |
379 } | |
380 | |
381 | |
382 /* This function is used to change the representative variable in MAP for VAR's | |
383 partition to a regular non-ssa variable. This allows partitions to be | |
384 mapped back to real variables. */ | |
385 | |
386 void | |
387 change_partition_var (var_map map, tree var, int part) | |
388 { | |
389 var_ann_t ann; | |
390 | |
391 gcc_assert (TREE_CODE (var) != SSA_NAME); | |
392 | |
393 ann = var_ann (var); | |
394 ann->out_of_ssa_tag = 1; | |
395 VAR_ANN_PARTITION (ann) = part; | |
396 if (map->view_to_partition) | |
397 map->partition_to_var[map->view_to_partition[part]] = var; | |
398 } | |
399 | |
400 | |
401 static inline void mark_all_vars_used (tree *, void *data); | |
402 | |
403 /* Helper function for mark_all_vars_used, called via walk_tree. */ | |
404 | |
405 static tree | |
406 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data) | |
407 { | |
408 tree t = *tp; | |
409 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t)); | |
410 tree b; | |
411 | |
412 if (TREE_CODE (t) == SSA_NAME) | |
413 t = SSA_NAME_VAR (t); | |
414 | |
415 if (IS_EXPR_CODE_CLASS (c) | |
416 && (b = TREE_BLOCK (t)) != NULL) | |
417 TREE_USED (b) = true; | |
418 | |
419 /* Ignore TREE_ORIGINAL for TARGET_MEM_REFS, as well as other | |
420 fields that do not contain vars. */ | |
421 if (TREE_CODE (t) == TARGET_MEM_REF) | |
422 { | |
423 mark_all_vars_used (&TMR_SYMBOL (t), data); | |
424 mark_all_vars_used (&TMR_BASE (t), data); | |
425 mark_all_vars_used (&TMR_INDEX (t), data); | |
426 *walk_subtrees = 0; | |
427 return NULL; | |
428 } | |
429 | |
430 /* Only need to mark VAR_DECLS; parameters and return results are not | |
431 eliminated as unused. */ | |
432 if (TREE_CODE (t) == VAR_DECL) | |
433 { | |
434 if (data != NULL && bitmap_bit_p ((bitmap) data, DECL_UID (t))) | |
435 { | |
436 bitmap_clear_bit ((bitmap) data, DECL_UID (t)); | |
437 mark_all_vars_used (&DECL_INITIAL (t), data); | |
438 } | |
439 set_is_used (t); | |
440 } | |
441 | |
442 if (IS_TYPE_OR_DECL_P (t)) | |
443 *walk_subtrees = 0; | |
444 | |
445 return NULL; | |
446 } | |
447 | |
448 /* Mark the scope block SCOPE and its subblocks unused when they can be | |
449 possibly eliminated if dead. */ | |
450 | |
451 static void | |
452 mark_scope_block_unused (tree scope) | |
453 { | |
454 tree t; | |
455 TREE_USED (scope) = false; | |
456 if (!(*debug_hooks->ignore_block) (scope)) | |
457 TREE_USED (scope) = true; | |
458 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t)) | |
459 mark_scope_block_unused (t); | |
460 } | |
461 | |
462 /* Look if the block is dead (by possibly eliminating its dead subblocks) | |
463 and return true if so. | |
464 Block is declared dead if: | |
465 1) No statements are associated with it. | |
466 2) Declares no live variables | |
467 3) All subblocks are dead | |
468 or there is precisely one subblocks and the block | |
469 has same abstract origin as outer block and declares | |
470 no variables, so it is pure wrapper. | |
471 When we are not outputting full debug info, we also eliminate dead variables | |
472 out of scope blocks to let them to be recycled by GGC and to save copying work | |
473 done by the inliner. */ | |
474 | |
475 static bool | |
476 remove_unused_scope_block_p (tree scope) | |
477 { | |
478 tree *t, *next; | |
479 bool unused = !TREE_USED (scope); | |
480 var_ann_t ann; | |
481 int nsubblocks = 0; | |
482 | |
483 for (t = &BLOCK_VARS (scope); *t; t = next) | |
484 { | |
485 next = &TREE_CHAIN (*t); | |
486 | |
487 /* Debug info of nested function refers to the block of the | |
488 function. We might stil call it even if all statements | |
489 of function it was nested into was elliminated. | |
490 | |
491 TODO: We can actually look into cgraph to see if function | |
492 will be output to file. */ | |
493 if (TREE_CODE (*t) == FUNCTION_DECL) | |
494 unused = false; | |
495 /* Remove everything we don't generate debug info for. */ | |
496 else if (DECL_IGNORED_P (*t)) | |
497 { | |
498 *t = TREE_CHAIN (*t); | |
499 next = t; | |
500 } | |
501 | |
502 /* When we are outputting debug info, we usually want to output | |
503 info about optimized-out variables in the scope blocks. | |
504 Exception are the scope blocks not containing any instructions | |
505 at all so user can't get into the scopes at first place. */ | |
506 else if ((ann = var_ann (*t)) != NULL | |
507 && ann->used) | |
508 unused = false; | |
509 | |
510 /* When we are not doing full debug info, we however can keep around | |
511 only the used variables for cfgexpand's memory packing saving quite | |
512 a lot of memory. | |
513 | |
514 For sake of -g3, we keep around those vars but we don't count this as | |
515 use of block, so innermost block with no used vars and no instructions | |
516 can be considered dead. We only want to keep around blocks user can | |
517 breakpoint into and ask about value of optimized out variables. | |
518 | |
519 Similarly we need to keep around types at least until all variables of | |
520 all nested blocks are gone. We track no information on whether given | |
521 type is used or not. */ | |
522 | |
523 else if (debug_info_level == DINFO_LEVEL_NORMAL | |
524 || debug_info_level == DINFO_LEVEL_VERBOSE | |
525 /* Removing declarations before inlining is going to affect | |
526 DECL_UID that in turn is going to affect hashtables and | |
527 code generation. */ | |
528 || !cfun->after_inlining) | |
529 ; | |
530 else | |
531 { | |
532 *t = TREE_CHAIN (*t); | |
533 next = t; | |
534 } | |
535 } | |
536 | |
537 for (t = &BLOCK_SUBBLOCKS (scope); *t ;) | |
538 if (remove_unused_scope_block_p (*t)) | |
539 { | |
540 if (BLOCK_SUBBLOCKS (*t)) | |
541 { | |
542 tree next = BLOCK_CHAIN (*t); | |
543 tree supercontext = BLOCK_SUPERCONTEXT (*t); | |
544 | |
545 *t = BLOCK_SUBBLOCKS (*t); | |
546 while (BLOCK_CHAIN (*t)) | |
547 { | |
548 BLOCK_SUPERCONTEXT (*t) = supercontext; | |
549 t = &BLOCK_CHAIN (*t); | |
550 } | |
551 BLOCK_CHAIN (*t) = next; | |
552 BLOCK_SUPERCONTEXT (*t) = supercontext; | |
553 t = &BLOCK_CHAIN (*t); | |
554 nsubblocks ++; | |
555 } | |
556 else | |
557 *t = BLOCK_CHAIN (*t); | |
558 } | |
559 else | |
560 { | |
561 t = &BLOCK_CHAIN (*t); | |
562 nsubblocks ++; | |
563 } | |
564 | |
565 | |
566 if (!unused) | |
567 ; | |
568 /* Outer scope is always used. */ | |
569 else if (!BLOCK_SUPERCONTEXT (scope) | |
570 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL) | |
571 unused = false; | |
572 /* Innermost blocks with no live variables nor statements can be always | |
573 eliminated. */ | |
574 else if (!nsubblocks) | |
575 ; | |
576 /* If there are live subblocks and we still have some unused variables | |
577 or types declared, we must keep them. | |
578 Before inliing we must not depend on debug info verbosity to keep | |
579 DECL_UIDs stable. */ | |
580 else if (!cfun->after_inlining && BLOCK_VARS (scope)) | |
581 unused = false; | |
582 /* For terse debug info we can eliminate info on unused variables. */ | |
583 else if (debug_info_level == DINFO_LEVEL_NONE | |
584 || debug_info_level == DINFO_LEVEL_TERSE) | |
585 ; | |
586 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope)) | |
587 unused = false; | |
588 /* See if this block is important for representation of inlined function. | |
589 Inlined functions are always represented by block with | |
590 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION | |
591 set... */ | |
592 else if (inlined_function_outer_scope_p (scope)) | |
593 unused = false; | |
594 else | |
595 /* Verfify that only blocks with source location set | |
596 are entry points to the inlined functions. */ | |
597 gcc_assert (BLOCK_SOURCE_LOCATION (scope) == UNKNOWN_LOCATION); | |
598 return unused; | |
599 } | |
600 | |
601 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be | |
602 eliminated during the tree->rtl conversion process. */ | |
603 | |
604 static inline void | |
605 mark_all_vars_used (tree *expr_p, void *data) | |
606 { | |
607 walk_tree (expr_p, mark_all_vars_used_1, data, NULL); | |
608 } | |
609 | |
610 /* Dump scope blocks. */ | |
611 | |
612 static void | |
613 dump_scope_block (FILE *file, int indent, tree scope, int flags) | |
614 { | |
615 tree var, t; | |
616 unsigned int i; | |
617 | |
618 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope), | |
619 TREE_USED (scope) ? "" : " (unused)", | |
620 BLOCK_ABSTRACT (scope) ? " (abstract)": ""); | |
621 if (BLOCK_SOURCE_LOCATION (scope) != UNKNOWN_LOCATION) | |
622 { | |
623 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope)); | |
624 fprintf (file, " %s:%i", s.file, s.line); | |
625 } | |
626 if (BLOCK_ABSTRACT_ORIGIN (scope)) | |
627 { | |
628 tree origin = block_ultimate_origin (scope); | |
629 if (origin) | |
630 { | |
631 fprintf (file, " Originating from :"); | |
632 if (DECL_P (origin)) | |
633 print_generic_decl (file, origin, flags); | |
634 else | |
635 fprintf (file, "#%i", BLOCK_NUMBER (origin)); | |
636 } | |
637 } | |
638 fprintf (file, " \n"); | |
639 for (var = BLOCK_VARS (scope); var; var = TREE_CHAIN (var)) | |
640 { | |
641 bool used = false; | |
642 var_ann_t ann; | |
643 | |
644 if ((ann = var_ann (var)) | |
645 && ann->used) | |
646 used = true; | |
647 | |
648 fprintf (file, "%*s",indent, ""); | |
649 print_generic_decl (file, var, flags); | |
650 fprintf (file, "%s\n", used ? "" : " (unused)"); | |
651 } | |
652 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++) | |
653 { | |
654 fprintf (file, "%*s",indent, ""); | |
655 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i), | |
656 flags); | |
657 fprintf (file, " (nonlocalized)\n"); | |
658 } | |
659 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t)) | |
660 dump_scope_block (file, indent + 2, t, flags); | |
661 fprintf (file, "\n%*s}\n",indent, ""); | |
662 } | |
663 | |
664 void | |
665 dump_scope_blocks (FILE *file, int flags) | |
666 { | |
667 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags); | |
668 } | |
669 | |
670 /* Remove local variables that are not referenced in the IL. */ | |
671 | |
672 void | |
673 remove_unused_locals (void) | |
674 { | |
675 basic_block bb; | |
676 tree t, *cell; | |
677 referenced_var_iterator rvi; | |
678 var_ann_t ann; | |
679 bitmap global_unused_vars = NULL; | |
680 | |
681 mark_scope_block_unused (DECL_INITIAL (current_function_decl)); | |
682 | |
683 /* Assume all locals are unused. */ | |
684 FOR_EACH_REFERENCED_VAR (t, rvi) | |
685 var_ann (t)->used = false; | |
686 | |
687 /* Walk the CFG marking all referenced symbols. */ | |
688 FOR_EACH_BB (bb) | |
689 { | |
690 gimple_stmt_iterator gsi; | |
691 size_t i; | |
692 edge_iterator ei; | |
693 edge e; | |
694 | |
695 /* Walk the statements. */ | |
696 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
697 { | |
698 gimple stmt = gsi_stmt (gsi); | |
699 tree b = gimple_block (stmt); | |
700 | |
701 if (b) | |
702 TREE_USED (b) = true; | |
703 | |
704 for (i = 0; i < gimple_num_ops (stmt); i++) | |
705 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i), NULL); | |
706 } | |
707 | |
708 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
709 { | |
710 use_operand_p arg_p; | |
711 ssa_op_iter i; | |
712 tree def; | |
713 gimple phi = gsi_stmt (gsi); | |
714 | |
715 /* No point processing globals. */ | |
716 if (is_global_var (SSA_NAME_VAR (gimple_phi_result (phi)))) | |
717 continue; | |
718 | |
719 def = gimple_phi_result (phi); | |
720 mark_all_vars_used (&def, NULL); | |
721 | |
722 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES) | |
723 { | |
724 tree arg = USE_FROM_PTR (arg_p); | |
725 mark_all_vars_used (&arg, NULL); | |
726 } | |
727 } | |
728 | |
729 FOR_EACH_EDGE (e, ei, bb->succs) | |
730 if (e->goto_locus) | |
731 TREE_USED (e->goto_block) = true; | |
732 } | |
733 | |
734 cfun->has_local_explicit_reg_vars = false; | |
735 | |
736 /* Remove unmarked local vars from local_decls. */ | |
737 for (cell = &cfun->local_decls; *cell; ) | |
738 { | |
739 tree var = TREE_VALUE (*cell); | |
740 | |
741 if (TREE_CODE (var) != FUNCTION_DECL | |
742 && (!(ann = var_ann (var)) | |
743 || !ann->used) | |
744 && (optimize || DECL_ARTIFICIAL (var))) | |
745 { | |
746 if (is_global_var (var)) | |
747 { | |
748 if (global_unused_vars == NULL) | |
749 global_unused_vars = BITMAP_ALLOC (NULL); | |
750 bitmap_set_bit (global_unused_vars, DECL_UID (var)); | |
751 } | |
752 else | |
753 { | |
754 *cell = TREE_CHAIN (*cell); | |
755 continue; | |
756 } | |
757 } | |
758 else if (TREE_CODE (var) == VAR_DECL | |
759 && DECL_HARD_REGISTER (var) | |
760 && !is_global_var (var)) | |
761 cfun->has_local_explicit_reg_vars = true; | |
762 cell = &TREE_CHAIN (*cell); | |
763 } | |
764 | |
765 /* Remove unmarked global vars from local_decls. */ | |
766 if (global_unused_vars != NULL) | |
767 { | |
768 for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) | |
769 { | |
770 tree var = TREE_VALUE (t); | |
771 | |
772 if (TREE_CODE (var) == VAR_DECL | |
773 && is_global_var (var) | |
774 && (ann = var_ann (var)) != NULL | |
775 && ann->used) | |
776 mark_all_vars_used (&DECL_INITIAL (var), global_unused_vars); | |
777 } | |
778 | |
779 for (cell = &cfun->local_decls; *cell; ) | |
780 { | |
781 tree var = TREE_VALUE (*cell); | |
782 | |
783 if (TREE_CODE (var) == VAR_DECL | |
784 && is_global_var (var) | |
785 && bitmap_bit_p (global_unused_vars, DECL_UID (var))) | |
786 *cell = TREE_CHAIN (*cell); | |
787 else | |
788 cell = &TREE_CHAIN (*cell); | |
789 } | |
790 BITMAP_FREE (global_unused_vars); | |
791 } | |
792 | |
793 /* Remove unused variables from REFERENCED_VARs. As a special | |
794 exception keep the variables that are believed to be aliased. | |
795 Those can't be easily removed from the alias sets and operand | |
796 caches. They will be removed shortly after the next may_alias | |
797 pass is performed. */ | |
798 FOR_EACH_REFERENCED_VAR (t, rvi) | |
799 if (!is_global_var (t) | |
800 && !MTAG_P (t) | |
801 && TREE_CODE (t) != PARM_DECL | |
802 && TREE_CODE (t) != RESULT_DECL | |
803 && !(ann = var_ann (t))->used | |
804 && !ann->symbol_mem_tag | |
805 && !TREE_ADDRESSABLE (t) | |
806 && (optimize || DECL_ARTIFICIAL (t))) | |
807 remove_referenced_var (t); | |
808 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl)); | |
809 if (dump_file && (dump_flags & TDF_DETAILS)) | |
810 { | |
811 fprintf (dump_file, "Scope blocks after cleanups:\n"); | |
812 dump_scope_blocks (dump_file, dump_flags); | |
813 } | |
814 } | |
815 | |
816 | |
817 /* Allocate and return a new live range information object base on MAP. */ | |
818 | |
819 static tree_live_info_p | |
820 new_tree_live_info (var_map map) | |
821 { | |
822 tree_live_info_p live; | |
823 unsigned x; | |
824 | |
825 live = (tree_live_info_p) xmalloc (sizeof (struct tree_live_info_d)); | |
826 live->map = map; | |
827 live->num_blocks = last_basic_block; | |
828 | |
829 live->livein = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap)); | |
830 for (x = 0; x < (unsigned)last_basic_block; x++) | |
831 live->livein[x] = BITMAP_ALLOC (NULL); | |
832 | |
833 live->liveout = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap)); | |
834 for (x = 0; x < (unsigned)last_basic_block; x++) | |
835 live->liveout[x] = BITMAP_ALLOC (NULL); | |
836 | |
837 live->work_stack = XNEWVEC (int, last_basic_block); | |
838 live->stack_top = live->work_stack; | |
839 | |
840 live->global = BITMAP_ALLOC (NULL); | |
841 return live; | |
842 } | |
843 | |
844 | |
845 /* Free storage for live range info object LIVE. */ | |
846 | |
847 void | |
848 delete_tree_live_info (tree_live_info_p live) | |
849 { | |
850 int x; | |
851 | |
852 BITMAP_FREE (live->global); | |
853 free (live->work_stack); | |
854 | |
855 for (x = live->num_blocks - 1; x >= 0; x--) | |
856 BITMAP_FREE (live->liveout[x]); | |
857 free (live->liveout); | |
858 | |
859 for (x = live->num_blocks - 1; x >= 0; x--) | |
860 BITMAP_FREE (live->livein[x]); | |
861 free (live->livein); | |
862 | |
863 free (live); | |
864 } | |
865 | |
866 | |
867 /* Visit basic block BB and propagate any required live on entry bits from | |
868 LIVE into the predecessors. VISITED is the bitmap of visited blocks. | |
869 TMP is a temporary work bitmap which is passed in to avoid reallocating | |
870 it each time. */ | |
871 | |
872 static void | |
873 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited, | |
874 bitmap tmp) | |
875 { | |
876 edge e; | |
877 bool change; | |
878 edge_iterator ei; | |
879 basic_block pred_bb; | |
880 bitmap loe; | |
881 gcc_assert (!TEST_BIT (visited, bb->index)); | |
882 | |
883 SET_BIT (visited, bb->index); | |
884 loe = live_on_entry (live, bb); | |
885 | |
886 FOR_EACH_EDGE (e, ei, bb->preds) | |
887 { | |
888 pred_bb = e->src; | |
889 if (pred_bb == ENTRY_BLOCK_PTR) | |
890 continue; | |
891 /* TMP is variables live-on-entry from BB that aren't defined in the | |
892 predecessor block. This should be the live on entry vars to pred. | |
893 Note that liveout is the DEFs in a block while live on entry is | |
894 being calculated. */ | |
895 bitmap_and_compl (tmp, loe, live->liveout[pred_bb->index]); | |
896 | |
897 /* Add these bits to live-on-entry for the pred. if there are any | |
898 changes, and pred_bb has been visited already, add it to the | |
899 revisit stack. */ | |
900 change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp); | |
901 if (TEST_BIT (visited, pred_bb->index) && change) | |
902 { | |
903 RESET_BIT (visited, pred_bb->index); | |
904 *(live->stack_top)++ = pred_bb->index; | |
905 } | |
906 } | |
907 } | |
908 | |
909 | |
910 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses | |
911 of all the variables. */ | |
912 | |
913 static void | |
914 live_worklist (tree_live_info_p live) | |
915 { | |
916 unsigned b; | |
917 basic_block bb; | |
918 sbitmap visited = sbitmap_alloc (last_basic_block + 1); | |
919 bitmap tmp = BITMAP_ALLOC (NULL); | |
920 | |
921 sbitmap_zero (visited); | |
922 | |
923 /* Visit all the blocks in reverse order and propagate live on entry values | |
924 into the predecessors blocks. */ | |
925 FOR_EACH_BB_REVERSE (bb) | |
926 loe_visit_block (live, bb, visited, tmp); | |
927 | |
928 /* Process any blocks which require further iteration. */ | |
929 while (live->stack_top != live->work_stack) | |
930 { | |
931 b = *--(live->stack_top); | |
932 loe_visit_block (live, BASIC_BLOCK (b), visited, tmp); | |
933 } | |
934 | |
935 BITMAP_FREE (tmp); | |
936 sbitmap_free (visited); | |
937 } | |
938 | |
939 | |
940 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use | |
941 links. Set the live on entry fields in LIVE. Def's are marked temporarily | |
942 in the liveout vector. */ | |
943 | |
944 static void | |
945 set_var_live_on_entry (tree ssa_name, tree_live_info_p live) | |
946 { | |
947 int p; | |
948 gimple stmt; | |
949 use_operand_p use; | |
950 basic_block def_bb = NULL; | |
951 imm_use_iterator imm_iter; | |
952 bool global = false; | |
953 | |
954 p = var_to_partition (live->map, ssa_name); | |
955 if (p == NO_PARTITION) | |
956 return; | |
957 | |
958 stmt = SSA_NAME_DEF_STMT (ssa_name); | |
959 if (stmt) | |
960 { | |
961 def_bb = gimple_bb (stmt); | |
962 /* Mark defs in liveout bitmap temporarily. */ | |
963 if (def_bb) | |
964 bitmap_set_bit (live->liveout[def_bb->index], p); | |
965 } | |
966 else | |
967 def_bb = ENTRY_BLOCK_PTR; | |
968 | |
969 /* Visit each use of SSA_NAME and if it isn't in the same block as the def, | |
970 add it to the list of live on entry blocks. */ | |
971 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name) | |
972 { | |
973 gimple use_stmt = USE_STMT (use); | |
974 basic_block add_block = NULL; | |
975 | |
976 if (gimple_code (use_stmt) == GIMPLE_PHI) | |
977 { | |
978 /* Uses in PHI's are considered to be live at exit of the SRC block | |
979 as this is where a copy would be inserted. Check to see if it is | |
980 defined in that block, or whether its live on entry. */ | |
981 int index = PHI_ARG_INDEX_FROM_USE (use); | |
982 edge e = gimple_phi_arg_edge (use_stmt, index); | |
983 if (e->src != ENTRY_BLOCK_PTR) | |
984 { | |
985 if (e->src != def_bb) | |
986 add_block = e->src; | |
987 } | |
988 } | |
989 else | |
990 { | |
991 /* If its not defined in this block, its live on entry. */ | |
992 basic_block use_bb = gimple_bb (use_stmt); | |
993 if (use_bb != def_bb) | |
994 add_block = use_bb; | |
995 } | |
996 | |
997 /* If there was a live on entry use, set the bit. */ | |
998 if (add_block) | |
999 { | |
1000 global = true; | |
1001 bitmap_set_bit (live->livein[add_block->index], p); | |
1002 } | |
1003 } | |
1004 | |
1005 /* If SSA_NAME is live on entry to at least one block, fill in all the live | |
1006 on entry blocks between the def and all the uses. */ | |
1007 if (global) | |
1008 bitmap_set_bit (live->global, p); | |
1009 } | |
1010 | |
1011 | |
1012 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */ | |
1013 | |
1014 void | |
1015 calculate_live_on_exit (tree_live_info_p liveinfo) | |
1016 { | |
1017 basic_block bb; | |
1018 edge e; | |
1019 edge_iterator ei; | |
1020 | |
1021 /* live on entry calculations used liveout vectors for defs, clear them. */ | |
1022 FOR_EACH_BB (bb) | |
1023 bitmap_clear (liveinfo->liveout[bb->index]); | |
1024 | |
1025 /* Set all the live-on-exit bits for uses in PHIs. */ | |
1026 FOR_EACH_BB (bb) | |
1027 { | |
1028 gimple_stmt_iterator gsi; | |
1029 size_t i; | |
1030 | |
1031 /* Mark the PHI arguments which are live on exit to the pred block. */ | |
1032 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1033 { | |
1034 gimple phi = gsi_stmt (gsi); | |
1035 for (i = 0; i < gimple_phi_num_args (phi); i++) | |
1036 { | |
1037 tree t = PHI_ARG_DEF (phi, i); | |
1038 int p; | |
1039 | |
1040 if (TREE_CODE (t) != SSA_NAME) | |
1041 continue; | |
1042 | |
1043 p = var_to_partition (liveinfo->map, t); | |
1044 if (p == NO_PARTITION) | |
1045 continue; | |
1046 e = gimple_phi_arg_edge (phi, i); | |
1047 if (e->src != ENTRY_BLOCK_PTR) | |
1048 bitmap_set_bit (liveinfo->liveout[e->src->index], p); | |
1049 } | |
1050 } | |
1051 | |
1052 /* Add each successors live on entry to this bock live on exit. */ | |
1053 FOR_EACH_EDGE (e, ei, bb->succs) | |
1054 if (e->dest != EXIT_BLOCK_PTR) | |
1055 bitmap_ior_into (liveinfo->liveout[bb->index], | |
1056 live_on_entry (liveinfo, e->dest)); | |
1057 } | |
1058 } | |
1059 | |
1060 | |
1061 /* Given partition map MAP, calculate all the live on entry bitmaps for | |
1062 each partition. Return a new live info object. */ | |
1063 | |
1064 tree_live_info_p | |
1065 calculate_live_ranges (var_map map) | |
1066 { | |
1067 tree var; | |
1068 unsigned i; | |
1069 tree_live_info_p live; | |
1070 | |
1071 live = new_tree_live_info (map); | |
1072 for (i = 0; i < num_var_partitions (map); i++) | |
1073 { | |
1074 var = partition_to_var (map, i); | |
1075 if (var != NULL_TREE) | |
1076 set_var_live_on_entry (var, live); | |
1077 } | |
1078 | |
1079 live_worklist (live); | |
1080 | |
1081 #ifdef ENABLE_CHECKING | |
1082 verify_live_on_entry (live); | |
1083 #endif | |
1084 | |
1085 calculate_live_on_exit (live); | |
1086 return live; | |
1087 } | |
1088 | |
1089 | |
1090 /* Output partition map MAP to file F. */ | |
1091 | |
1092 void | |
1093 dump_var_map (FILE *f, var_map map) | |
1094 { | |
1095 int t; | |
1096 unsigned x, y; | |
1097 int p; | |
1098 | |
1099 fprintf (f, "\nPartition map \n\n"); | |
1100 | |
1101 for (x = 0; x < map->num_partitions; x++) | |
1102 { | |
1103 if (map->view_to_partition != NULL) | |
1104 p = map->view_to_partition[x]; | |
1105 else | |
1106 p = x; | |
1107 | |
1108 if (map->partition_to_var[p] == NULL_TREE) | |
1109 continue; | |
1110 | |
1111 t = 0; | |
1112 for (y = 1; y < num_ssa_names; y++) | |
1113 { | |
1114 p = partition_find (map->var_partition, y); | |
1115 if (map->partition_to_view) | |
1116 p = map->partition_to_view[p]; | |
1117 if (p == (int)x) | |
1118 { | |
1119 if (t++ == 0) | |
1120 { | |
1121 fprintf(f, "Partition %d (", x); | |
1122 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM); | |
1123 fprintf (f, " - "); | |
1124 } | |
1125 fprintf (f, "%d ", y); | |
1126 } | |
1127 } | |
1128 if (t != 0) | |
1129 fprintf (f, ")\n"); | |
1130 } | |
1131 fprintf (f, "\n"); | |
1132 } | |
1133 | |
1134 | |
1135 /* Output live range info LIVE to file F, controlled by FLAG. */ | |
1136 | |
1137 void | |
1138 dump_live_info (FILE *f, tree_live_info_p live, int flag) | |
1139 { | |
1140 basic_block bb; | |
1141 unsigned i; | |
1142 var_map map = live->map; | |
1143 bitmap_iterator bi; | |
1144 | |
1145 if ((flag & LIVEDUMP_ENTRY) && live->livein) | |
1146 { | |
1147 FOR_EACH_BB (bb) | |
1148 { | |
1149 fprintf (f, "\nLive on entry to BB%d : ", bb->index); | |
1150 EXECUTE_IF_SET_IN_BITMAP (live->livein[bb->index], 0, i, bi) | |
1151 { | |
1152 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM); | |
1153 fprintf (f, " "); | |
1154 } | |
1155 fprintf (f, "\n"); | |
1156 } | |
1157 } | |
1158 | |
1159 if ((flag & LIVEDUMP_EXIT) && live->liveout) | |
1160 { | |
1161 FOR_EACH_BB (bb) | |
1162 { | |
1163 fprintf (f, "\nLive on exit from BB%d : ", bb->index); | |
1164 EXECUTE_IF_SET_IN_BITMAP (live->liveout[bb->index], 0, i, bi) | |
1165 { | |
1166 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM); | |
1167 fprintf (f, " "); | |
1168 } | |
1169 fprintf (f, "\n"); | |
1170 } | |
1171 } | |
1172 } | |
1173 | |
1174 | |
1175 #ifdef ENABLE_CHECKING | |
1176 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */ | |
1177 | |
1178 void | |
1179 register_ssa_partition_check (tree ssa_var) | |
1180 { | |
1181 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME); | |
1182 if (!is_gimple_reg (SSA_NAME_VAR (ssa_var))) | |
1183 { | |
1184 fprintf (stderr, "Illegally registering a virtual SSA name :"); | |
1185 print_generic_expr (stderr, ssa_var, TDF_SLIM); | |
1186 fprintf (stderr, " in the SSA->Normal phase.\n"); | |
1187 internal_error ("SSA corruption"); | |
1188 } | |
1189 } | |
1190 | |
1191 | |
1192 /* Verify that the info in LIVE matches the current cfg. */ | |
1193 | |
1194 static void | |
1195 verify_live_on_entry (tree_live_info_p live) | |
1196 { | |
1197 unsigned i; | |
1198 tree var; | |
1199 gimple stmt; | |
1200 basic_block bb; | |
1201 edge e; | |
1202 int num; | |
1203 edge_iterator ei; | |
1204 var_map map = live->map; | |
1205 | |
1206 /* Check for live on entry partitions and report those with a DEF in | |
1207 the program. This will typically mean an optimization has done | |
1208 something wrong. */ | |
1209 bb = ENTRY_BLOCK_PTR; | |
1210 num = 0; | |
1211 FOR_EACH_EDGE (e, ei, bb->succs) | |
1212 { | |
1213 int entry_block = e->dest->index; | |
1214 if (e->dest == EXIT_BLOCK_PTR) | |
1215 continue; | |
1216 for (i = 0; i < (unsigned)num_var_partitions (map); i++) | |
1217 { | |
1218 basic_block tmp; | |
1219 tree d; | |
1220 bitmap loe; | |
1221 var = partition_to_var (map, i); | |
1222 stmt = SSA_NAME_DEF_STMT (var); | |
1223 tmp = gimple_bb (stmt); | |
1224 d = gimple_default_def (cfun, SSA_NAME_VAR (var)); | |
1225 | |
1226 loe = live_on_entry (live, e->dest); | |
1227 if (loe && bitmap_bit_p (loe, i)) | |
1228 { | |
1229 if (!gimple_nop_p (stmt)) | |
1230 { | |
1231 num++; | |
1232 print_generic_expr (stderr, var, TDF_SLIM); | |
1233 fprintf (stderr, " is defined "); | |
1234 if (tmp) | |
1235 fprintf (stderr, " in BB%d, ", tmp->index); | |
1236 fprintf (stderr, "by:\n"); | |
1237 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM); | |
1238 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d", | |
1239 entry_block); | |
1240 fprintf (stderr, " So it appears to have multiple defs.\n"); | |
1241 } | |
1242 else | |
1243 { | |
1244 if (d != var) | |
1245 { | |
1246 num++; | |
1247 print_generic_expr (stderr, var, TDF_SLIM); | |
1248 fprintf (stderr, " is live-on-entry to BB%d ", | |
1249 entry_block); | |
1250 if (d) | |
1251 { | |
1252 fprintf (stderr, " but is not the default def of "); | |
1253 print_generic_expr (stderr, d, TDF_SLIM); | |
1254 fprintf (stderr, "\n"); | |
1255 } | |
1256 else | |
1257 fprintf (stderr, " and there is no default def.\n"); | |
1258 } | |
1259 } | |
1260 } | |
1261 else | |
1262 if (d == var) | |
1263 { | |
1264 /* The only way this var shouldn't be marked live on entry is | |
1265 if it occurs in a PHI argument of the block. */ | |
1266 size_t z; | |
1267 bool ok = false; | |
1268 gimple_stmt_iterator gsi; | |
1269 for (gsi = gsi_start_phis (e->dest); | |
1270 !gsi_end_p (gsi) && !ok; | |
1271 gsi_next (&gsi)) | |
1272 { | |
1273 gimple phi = gsi_stmt (gsi); | |
1274 for (z = 0; z < gimple_phi_num_args (phi); z++) | |
1275 if (var == gimple_phi_arg_def (phi, z)) | |
1276 { | |
1277 ok = true; | |
1278 break; | |
1279 } | |
1280 } | |
1281 if (ok) | |
1282 continue; | |
1283 num++; | |
1284 print_generic_expr (stderr, var, TDF_SLIM); | |
1285 fprintf (stderr, " is not marked live-on-entry to entry BB%d ", | |
1286 entry_block); | |
1287 fprintf (stderr, "but it is a default def so it should be.\n"); | |
1288 } | |
1289 } | |
1290 } | |
1291 gcc_assert (num <= 0); | |
1292 } | |
1293 #endif |