Mercurial > hg > CbC > CbC_gcc
annotate gcc/tree-ssa-operands.c @ 19:58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
author | kent@firefly.cr.ie.u-ryukyu.ac.jp |
---|---|
date | Thu, 24 Sep 2009 13:21:57 +0900 |
parents | a06113de4d67 |
children | 9de9dad105d4 77e2b8dfacca |
rev | line source |
---|---|
0 | 1 /* SSA operands management for trees. |
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 | |
3 Free Software Foundation, Inc. | |
4 | |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify | |
8 it under the terms of the GNU General Public License as published by | |
9 the Free Software Foundation; either version 3, or (at your option) | |
10 any later version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, | |
13 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 GNU General Public License for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
24 #include "tm.h" | |
25 #include "tree.h" | |
26 #include "flags.h" | |
27 #include "function.h" | |
28 #include "diagnostic.h" | |
29 #include "tree-flow.h" | |
30 #include "tree-inline.h" | |
31 #include "tree-pass.h" | |
32 #include "ggc.h" | |
33 #include "timevar.h" | |
34 #include "toplev.h" | |
35 #include "langhooks.h" | |
36 #include "ipa-reference.h" | |
37 | |
38 /* This file contains the code required to manage the operands cache of the | |
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt | |
40 annotation. This cache contains operands that will be of interest to | |
41 optimizers and other passes wishing to manipulate the IL. | |
42 | |
43 The operand type are broken up into REAL and VIRTUAL operands. The real | |
44 operands are represented as pointers into the stmt's operand tree. Thus | |
45 any manipulation of the real operands will be reflected in the actual tree. | |
46 Virtual operands are represented solely in the cache, although the base | |
47 variable for the SSA_NAME may, or may not occur in the stmt's tree. | |
48 Manipulation of the virtual operands will not be reflected in the stmt tree. | |
49 | |
50 The routines in this file are concerned with creating this operand cache | |
51 from a stmt tree. | |
52 | |
53 The operand tree is the parsed by the various get_* routines which look | |
54 through the stmt tree for the occurrence of operands which may be of | |
55 interest, and calls are made to the append_* routines whenever one is | |
56 found. There are 4 of these routines, each representing one of the | |
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. | |
58 | |
59 The append_* routines check for duplication, and simply keep a list of | |
60 unique objects for each operand type in the build_* extendable vectors. | |
61 | |
62 Once the stmt tree is completely parsed, the finalize_ssa_operands() | |
63 routine is called, which proceeds to perform the finalization routine | |
64 on each of the 4 operand vectors which have been built up. | |
65 | |
66 If the stmt had a previous operand cache, the finalization routines | |
67 attempt to match up the new operands with the old ones. If it's a perfect | |
68 match, the old vector is simply reused. If it isn't a perfect match, then | |
69 a new vector is created and the new operands are placed there. For | |
70 virtual operands, if the previous cache had SSA_NAME version of a | |
71 variable, and that same variable occurs in the same operands cache, then | |
72 the new cache vector will also get the same SSA_NAME. | |
73 | |
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new | |
75 operand vector for VUSE, then the new vector will also be modified | |
76 such that it contains 'a_5' rather than 'a'. */ | |
77 | |
78 /* Helper functions from gimple.c. These are GIMPLE manipulation | |
79 routines that only the operand scanner should need. */ | |
80 void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *); | |
81 void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *); | |
82 | |
83 /* Structure storing statistics on how many call clobbers we have, and | |
84 how many where avoided. */ | |
85 | |
86 static struct | |
87 { | |
88 /* Number of call-clobbered ops we attempt to add to calls in | |
89 add_call_clobbered_mem_symbols. */ | |
90 unsigned int clobbered_vars; | |
91 | |
92 /* Number of write-clobbers (VDEFs) avoided by using | |
93 not_written information. */ | |
94 unsigned int static_write_clobbers_avoided; | |
95 | |
96 /* Number of reads (VUSEs) avoided by using not_read information. */ | |
97 unsigned int static_read_clobbers_avoided; | |
98 | |
99 /* Number of write-clobbers avoided because the variable can't escape to | |
100 this call. */ | |
101 unsigned int unescapable_clobbers_avoided; | |
102 | |
103 /* Number of read-only uses we attempt to add to calls in | |
104 add_call_read_mem_symbols. */ | |
105 unsigned int readonly_clobbers; | |
106 | |
107 /* Number of read-only uses we avoid using not_read information. */ | |
108 unsigned int static_readonly_clobbers_avoided; | |
109 } clobber_stats; | |
110 | |
111 | |
112 /* Flags to describe operand properties in helpers. */ | |
113 | |
114 /* By default, operands are loaded. */ | |
115 #define opf_use 0 | |
116 | |
117 /* Operand is the target of an assignment expression or a | |
118 call-clobbered variable. */ | |
119 #define opf_def (1 << 0) | |
120 | |
121 /* No virtual operands should be created in the expression. This is used | |
122 when traversing ADDR_EXPR nodes which have different semantics than | |
123 other expressions. Inside an ADDR_EXPR node, the only operands that we | |
124 need to consider are indices into arrays. For instance, &a.b[i] should | |
125 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a | |
126 VUSE for 'b'. */ | |
127 #define opf_no_vops (1 << 1) | |
128 | |
129 /* Operand is an implicit reference. This is used to distinguish | |
130 explicit assignments in the form of MODIFY_EXPR from | |
131 clobbering sites like function calls or ASM_EXPRs. */ | |
132 #define opf_implicit (1 << 2) | |
133 | |
134 /* Array for building all the def operands. */ | |
135 static VEC(tree,heap) *build_defs; | |
136 | |
137 /* Array for building all the use operands. */ | |
138 static VEC(tree,heap) *build_uses; | |
139 | |
140 /* Set for building all the VDEF operands. */ | |
141 static VEC(tree,heap) *build_vdefs; | |
142 | |
143 /* Set for building all the VUSE operands. */ | |
144 static VEC(tree,heap) *build_vuses; | |
145 | |
146 /* Bitmap obstack for our datastructures that needs to survive across | |
147 compilations of multiple functions. */ | |
148 static bitmap_obstack operands_bitmap_obstack; | |
149 | |
150 /* Set for building all the loaded symbols. */ | |
151 static bitmap build_loads; | |
152 | |
153 /* Set for building all the stored symbols. */ | |
154 static bitmap build_stores; | |
155 | |
156 static void get_expr_operands (gimple, tree *, int); | |
157 | |
158 /* Number of functions with initialized ssa_operands. */ | |
159 static int n_initialized = 0; | |
160 | |
161 /* Statement change buffer. Data structure used to record state | |
162 information for statements. This is used to determine what needs | |
163 to be done in order to update the SSA web after a statement is | |
164 modified by a pass. If STMT is a statement that has just been | |
165 created, or needs to be folded via fold_stmt, or anything that | |
166 changes its physical structure then the pass should: | |
167 | |
168 1- Call push_stmt_changes (&stmt) to record the current state of | |
169 STMT before any modifications are made. | |
170 | |
171 2- Make all appropriate modifications to the statement. | |
172 | |
173 3- Call pop_stmt_changes (&stmt) to find new symbols that | |
174 need to be put in SSA form, SSA name mappings for names that | |
175 have disappeared, recompute invariantness for address | |
176 expressions, cleanup EH information, etc. | |
177 | |
178 If it is possible to determine that the statement was not modified, | |
179 instead of calling pop_stmt_changes it is quicker to call | |
180 discard_stmt_changes to avoid the expensive and unnecessary operand | |
181 re-scan and change comparison. */ | |
182 | |
183 struct scb_d | |
184 { | |
185 /* Pointer to the statement being modified. */ | |
186 gimple *stmt_p; | |
187 | |
188 /* If the statement references memory these are the sets of symbols | |
189 loaded and stored by the statement. */ | |
190 bitmap loads; | |
191 bitmap stores; | |
192 }; | |
193 | |
194 typedef struct scb_d *scb_t; | |
195 DEF_VEC_P(scb_t); | |
196 DEF_VEC_ALLOC_P(scb_t,heap); | |
197 | |
198 /* Stack of statement change buffers (SCB). Every call to | |
199 push_stmt_changes pushes a new buffer onto the stack. Calls to | |
200 pop_stmt_changes pop a buffer off of the stack and compute the set | |
201 of changes for the popped statement. */ | |
202 static VEC(scb_t,heap) *scb_stack; | |
203 | |
204 /* Return the DECL_UID of the base variable of T. */ | |
205 | |
206 static inline unsigned | |
207 get_name_decl (const_tree t) | |
208 { | |
209 if (TREE_CODE (t) != SSA_NAME) | |
210 return DECL_UID (t); | |
211 else | |
212 return DECL_UID (SSA_NAME_VAR (t)); | |
213 } | |
214 | |
215 | |
216 /* Comparison function for qsort used in operand_build_sort_virtual. */ | |
217 | |
218 int | |
219 operand_build_cmp (const void *p, const void *q) | |
220 { | |
221 const_tree const e1 = *((const_tree const *)p); | |
222 const_tree const e2 = *((const_tree const *)q); | |
223 const unsigned int u1 = get_name_decl (e1); | |
224 const unsigned int u2 = get_name_decl (e2); | |
225 | |
226 /* We want to sort in ascending order. They can never be equal. */ | |
227 #ifdef ENABLE_CHECKING | |
228 gcc_assert (u1 != u2); | |
229 #endif | |
230 return (u1 > u2 ? 1 : -1); | |
231 } | |
232 | |
233 | |
234 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */ | |
235 | |
236 static inline void | |
237 operand_build_sort_virtual (VEC(tree,heap) *list) | |
238 { | |
239 int num = VEC_length (tree, list); | |
240 | |
241 if (num < 2) | |
242 return; | |
243 | |
244 if (num == 2) | |
245 { | |
246 if (get_name_decl (VEC_index (tree, list, 0)) | |
247 > get_name_decl (VEC_index (tree, list, 1))) | |
248 { | |
249 /* Swap elements if in the wrong order. */ | |
250 tree tmp = VEC_index (tree, list, 0); | |
251 VEC_replace (tree, list, 0, VEC_index (tree, list, 1)); | |
252 VEC_replace (tree, list, 1, tmp); | |
253 } | |
254 return; | |
255 } | |
256 | |
257 /* There are 3 or more elements, call qsort. */ | |
258 qsort (VEC_address (tree, list), | |
259 VEC_length (tree, list), | |
260 sizeof (tree), | |
261 operand_build_cmp); | |
262 } | |
263 | |
264 /* Return true if the SSA operands cache is active. */ | |
265 | |
266 bool | |
267 ssa_operands_active (void) | |
268 { | |
269 /* This function may be invoked from contexts where CFUN is NULL | |
270 (IPA passes), return false for now. FIXME: operands may be | |
271 active in each individual function, maybe this function should | |
272 take CFUN as a parameter. */ | |
273 if (cfun == NULL) | |
274 return false; | |
275 | |
276 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active; | |
277 } | |
278 | |
279 | |
280 /* VOPs are of variable sized, so the free list maps "free buckets" to the | |
281 following table: | |
282 bucket # operands | |
283 ------ ---------- | |
284 0 1 | |
285 1 2 | |
286 ... | |
287 15 16 | |
288 16 17-24 | |
289 17 25-32 | |
290 18 31-40 | |
291 ... | |
292 29 121-128 | |
293 Any VOPs larger than this are simply added to the largest bucket when they | |
294 are freed. */ | |
295 | |
296 | |
297 /* Return the number of operands used in bucket BUCKET. */ | |
298 | |
299 static inline int | |
300 vop_free_bucket_size (int bucket) | |
301 { | |
302 #ifdef ENABLE_CHECKING | |
303 gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS); | |
304 #endif | |
305 if (bucket < 16) | |
306 return bucket + 1; | |
307 return (bucket - 13) * 8; | |
308 } | |
309 | |
310 | |
311 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is | |
312 beyond the end of the bucket table, return -1. */ | |
313 | |
314 static inline int | |
315 vop_free_bucket_index (int num) | |
316 { | |
317 gcc_assert (num > 0 && NUM_VOP_FREE_BUCKETS > 16); | |
318 | |
319 /* Sizes 1 through 16 use buckets 0-15. */ | |
320 if (num <= 16) | |
321 return num - 1; | |
322 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */ | |
323 num = 14 + (num - 1) / 8; | |
324 if (num >= NUM_VOP_FREE_BUCKETS) | |
325 return -1; | |
326 else | |
327 return num; | |
328 } | |
329 | |
330 | |
331 /* Initialize the VOP free buckets. */ | |
332 | |
333 static inline void | |
334 init_vop_buckets (void) | |
335 { | |
336 int x; | |
337 | |
338 for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++) | |
339 gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL; | |
340 } | |
341 | |
342 | |
343 /* Add PTR to the appropriate VOP bucket. */ | |
344 | |
345 static inline void | |
346 add_vop_to_freelist (voptype_p ptr) | |
347 { | |
348 int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev)); | |
349 | |
350 /* Too large, use the largest bucket so its not a complete throw away. */ | |
351 if (bucket == -1) | |
352 bucket = NUM_VOP_FREE_BUCKETS - 1; | |
353 | |
354 ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket]; | |
355 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr; | |
356 } | |
357 | |
358 | |
359 /* These are the sizes of the operand memory buffer which gets allocated each | |
360 time more operands space is required. The final value is the amount that is | |
361 allocated every time after that. */ | |
362 | |
363 #define OP_SIZE_INIT 0 | |
364 #define OP_SIZE_1 30 | |
365 #define OP_SIZE_2 110 | |
366 #define OP_SIZE_3 511 | |
367 | |
368 /* Initialize the operand cache routines. */ | |
369 | |
370 void | |
371 init_ssa_operands (void) | |
372 { | |
373 if (!n_initialized++) | |
374 { | |
375 build_defs = VEC_alloc (tree, heap, 5); | |
376 build_uses = VEC_alloc (tree, heap, 10); | |
377 build_vuses = VEC_alloc (tree, heap, 25); | |
378 build_vdefs = VEC_alloc (tree, heap, 25); | |
379 bitmap_obstack_initialize (&operands_bitmap_obstack); | |
380 build_loads = BITMAP_ALLOC (&operands_bitmap_obstack); | |
381 build_stores = BITMAP_ALLOC (&operands_bitmap_obstack); | |
382 scb_stack = VEC_alloc (scb_t, heap, 20); | |
383 } | |
384 | |
385 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL); | |
386 gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL); | |
387 gimple_ssa_operands (cfun)->operand_memory_index | |
388 = gimple_ssa_operands (cfun)->ssa_operand_mem_size; | |
389 gimple_ssa_operands (cfun)->ops_active = true; | |
390 memset (&clobber_stats, 0, sizeof (clobber_stats)); | |
391 init_vop_buckets (); | |
392 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT; | |
393 } | |
394 | |
395 | |
396 /* Dispose of anything required by the operand routines. */ | |
397 | |
398 void | |
399 fini_ssa_operands (void) | |
400 { | |
401 struct ssa_operand_memory_d *ptr; | |
402 unsigned ix; | |
403 tree mpt; | |
404 | |
405 if (!--n_initialized) | |
406 { | |
407 VEC_free (tree, heap, build_defs); | |
408 VEC_free (tree, heap, build_uses); | |
409 VEC_free (tree, heap, build_vdefs); | |
410 VEC_free (tree, heap, build_vuses); | |
411 BITMAP_FREE (build_loads); | |
412 BITMAP_FREE (build_stores); | |
413 | |
414 /* The change buffer stack had better be empty. */ | |
415 gcc_assert (VEC_length (scb_t, scb_stack) == 0); | |
416 VEC_free (scb_t, heap, scb_stack); | |
417 scb_stack = NULL; | |
418 } | |
419 | |
420 gimple_ssa_operands (cfun)->free_defs = NULL; | |
421 gimple_ssa_operands (cfun)->free_uses = NULL; | |
422 | |
423 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL) | |
424 { | |
425 gimple_ssa_operands (cfun)->operand_memory | |
426 = gimple_ssa_operands (cfun)->operand_memory->next; | |
427 ggc_free (ptr); | |
428 } | |
429 | |
430 for (ix = 0; | |
431 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt); | |
432 ix++) | |
433 { | |
434 if (mpt) | |
435 BITMAP_FREE (MPT_SYMBOLS (mpt)); | |
436 } | |
437 | |
438 VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table); | |
439 | |
440 gimple_ssa_operands (cfun)->ops_active = false; | |
441 | |
442 if (!n_initialized) | |
443 bitmap_obstack_release (&operands_bitmap_obstack); | |
444 | |
445 if (dump_file && (dump_flags & TDF_STATS)) | |
446 { | |
447 fprintf (dump_file, "Original clobbered vars: %d\n", | |
448 clobber_stats.clobbered_vars); | |
449 fprintf (dump_file, "Static write clobbers avoided: %d\n", | |
450 clobber_stats.static_write_clobbers_avoided); | |
451 fprintf (dump_file, "Static read clobbers avoided: %d\n", | |
452 clobber_stats.static_read_clobbers_avoided); | |
453 fprintf (dump_file, "Unescapable clobbers avoided: %d\n", | |
454 clobber_stats.unescapable_clobbers_avoided); | |
455 fprintf (dump_file, "Original read-only clobbers: %d\n", | |
456 clobber_stats.readonly_clobbers); | |
457 fprintf (dump_file, "Static read-only clobbers avoided: %d\n", | |
458 clobber_stats.static_readonly_clobbers_avoided); | |
459 } | |
460 } | |
461 | |
462 | |
463 /* Return memory for operands of SIZE chunks. */ | |
464 | |
465 static inline void * | |
466 ssa_operand_alloc (unsigned size) | |
467 { | |
468 char *ptr; | |
469 | |
470 if (gimple_ssa_operands (cfun)->operand_memory_index + size | |
471 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size) | |
472 { | |
473 struct ssa_operand_memory_d *ptr; | |
474 | |
475 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size == OP_SIZE_INIT) | |
476 gimple_ssa_operands (cfun)->ssa_operand_mem_size | |
477 = OP_SIZE_1 * sizeof (struct voptype_d); | |
478 else | |
479 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size | |
480 == OP_SIZE_1 * sizeof (struct voptype_d)) | |
481 gimple_ssa_operands (cfun)->ssa_operand_mem_size | |
482 = OP_SIZE_2 * sizeof (struct voptype_d); | |
483 else | |
484 gimple_ssa_operands (cfun)->ssa_operand_mem_size | |
485 = OP_SIZE_3 * sizeof (struct voptype_d); | |
486 | |
487 /* Go right to the maximum size if the request is too large. */ | |
488 if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size) | |
489 gimple_ssa_operands (cfun)->ssa_operand_mem_size | |
490 = OP_SIZE_3 * sizeof (struct voptype_d); | |
491 | |
492 /* We can reliably trigger the case that we need arbitrary many | |
493 operands (see PR34093), so allocate a buffer just for this request. */ | |
494 if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size) | |
495 gimple_ssa_operands (cfun)->ssa_operand_mem_size = size; | |
496 | |
497 ptr = (struct ssa_operand_memory_d *) | |
498 ggc_alloc (sizeof (struct ssa_operand_memory_d) | |
499 + gimple_ssa_operands (cfun)->ssa_operand_mem_size - 1); | |
500 ptr->next = gimple_ssa_operands (cfun)->operand_memory; | |
501 gimple_ssa_operands (cfun)->operand_memory = ptr; | |
502 gimple_ssa_operands (cfun)->operand_memory_index = 0; | |
503 } | |
504 ptr = &(gimple_ssa_operands (cfun)->operand_memory | |
505 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]); | |
506 gimple_ssa_operands (cfun)->operand_memory_index += size; | |
507 return ptr; | |
508 } | |
509 | |
510 | |
511 /* Allocate a DEF operand. */ | |
512 | |
513 static inline struct def_optype_d * | |
514 alloc_def (void) | |
515 { | |
516 struct def_optype_d *ret; | |
517 if (gimple_ssa_operands (cfun)->free_defs) | |
518 { | |
519 ret = gimple_ssa_operands (cfun)->free_defs; | |
520 gimple_ssa_operands (cfun)->free_defs | |
521 = gimple_ssa_operands (cfun)->free_defs->next; | |
522 } | |
523 else | |
524 ret = (struct def_optype_d *) | |
525 ssa_operand_alloc (sizeof (struct def_optype_d)); | |
526 return ret; | |
527 } | |
528 | |
529 | |
530 /* Allocate a USE operand. */ | |
531 | |
532 static inline struct use_optype_d * | |
533 alloc_use (void) | |
534 { | |
535 struct use_optype_d *ret; | |
536 if (gimple_ssa_operands (cfun)->free_uses) | |
537 { | |
538 ret = gimple_ssa_operands (cfun)->free_uses; | |
539 gimple_ssa_operands (cfun)->free_uses | |
540 = gimple_ssa_operands (cfun)->free_uses->next; | |
541 } | |
542 else | |
543 ret = (struct use_optype_d *) | |
544 ssa_operand_alloc (sizeof (struct use_optype_d)); | |
545 return ret; | |
546 } | |
547 | |
548 | |
549 /* Allocate a vop with NUM elements. */ | |
550 | |
551 static inline struct voptype_d * | |
552 alloc_vop (int num) | |
553 { | |
554 struct voptype_d *ret = NULL; | |
555 int alloc_size = 0; | |
556 | |
557 int bucket = vop_free_bucket_index (num); | |
558 if (bucket != -1) | |
559 { | |
560 /* If there is a free operand, use it. */ | |
561 if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL) | |
562 { | |
563 ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket]; | |
564 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = | |
565 gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next; | |
566 } | |
567 else | |
568 alloc_size = vop_free_bucket_size(bucket); | |
569 } | |
570 else | |
571 alloc_size = num; | |
572 | |
573 if (alloc_size > 0) | |
574 ret = (struct voptype_d *)ssa_operand_alloc ( | |
575 sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t)); | |
576 | |
577 VUSE_VECT_NUM_ELEM (ret->usev) = num; | |
578 return ret; | |
579 } | |
580 | |
581 | |
582 /* This routine makes sure that PTR is in an immediate use list, and makes | |
583 sure the stmt pointer is set to the current stmt. */ | |
584 | |
585 static inline void | |
586 set_virtual_use_link (use_operand_p ptr, gimple stmt) | |
587 { | |
588 /* fold_stmt may have changed the stmt pointers. */ | |
589 if (ptr->loc.stmt != stmt) | |
590 ptr->loc.stmt = stmt; | |
591 | |
592 /* If this use isn't in a list, add it to the correct list. */ | |
593 if (!ptr->prev) | |
594 link_imm_use (ptr, *(ptr->use)); | |
595 } | |
596 | |
597 | |
598 /* Adds OP to the list of defs after LAST. */ | |
599 | |
600 static inline def_optype_p | |
601 add_def_op (tree *op, def_optype_p last) | |
602 { | |
603 def_optype_p new_def; | |
604 | |
605 new_def = alloc_def (); | |
606 DEF_OP_PTR (new_def) = op; | |
607 last->next = new_def; | |
608 new_def->next = NULL; | |
609 return new_def; | |
610 } | |
611 | |
612 | |
613 /* Adds OP to the list of uses of statement STMT after LAST. */ | |
614 | |
615 static inline use_optype_p | |
616 add_use_op (gimple stmt, tree *op, use_optype_p last) | |
617 { | |
618 use_optype_p new_use; | |
619 | |
620 new_use = alloc_use (); | |
621 USE_OP_PTR (new_use)->use = op; | |
622 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); | |
623 last->next = new_use; | |
624 new_use->next = NULL; | |
625 return new_use; | |
626 } | |
627 | |
628 | |
629 /* Return a virtual op pointer with NUM elements which are all | |
630 initialized to OP and are linked into the immediate uses for STMT. | |
631 The new vop is appended after PREV. */ | |
632 | |
633 static inline voptype_p | |
634 add_vop (gimple stmt, tree op, int num, voptype_p prev) | |
635 { | |
636 voptype_p new_vop; | |
637 int x; | |
638 | |
639 new_vop = alloc_vop (num); | |
640 for (x = 0; x < num; x++) | |
641 { | |
642 VUSE_OP_PTR (new_vop, x)->prev = NULL; | |
643 SET_VUSE_OP (new_vop, x, op); | |
644 VUSE_OP_PTR (new_vop, x)->use = &new_vop->usev.uses[x].use_var; | |
645 link_imm_use_stmt (VUSE_OP_PTR (new_vop, x), | |
646 new_vop->usev.uses[x].use_var, stmt); | |
647 } | |
648 | |
649 if (prev) | |
650 prev->next = new_vop; | |
651 new_vop->next = NULL; | |
652 return new_vop; | |
653 } | |
654 | |
655 | |
656 /* Adds OP to the list of vuses of statement STMT after LAST, and moves | |
657 LAST to the new element. */ | |
658 | |
659 static inline voptype_p | |
660 add_vuse_op (gimple stmt, tree op, int num, voptype_p last) | |
661 { | |
662 voptype_p new_vop = add_vop (stmt, op, num, last); | |
663 VDEF_RESULT (new_vop) = NULL_TREE; | |
664 return new_vop; | |
665 } | |
666 | |
667 | |
668 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves | |
669 LAST to the new element. */ | |
670 | |
671 static inline voptype_p | |
672 add_vdef_op (gimple stmt, tree op, int num, voptype_p last) | |
673 { | |
674 voptype_p new_vop = add_vop (stmt, op, num, last); | |
675 VDEF_RESULT (new_vop) = op; | |
676 return new_vop; | |
677 } | |
678 | |
679 | |
680 /* Takes elements from build_defs and turns them into def operands of STMT. | |
681 TODO -- Make build_defs VEC of tree *. */ | |
682 | |
683 static inline void | |
684 finalize_ssa_defs (gimple stmt) | |
685 { | |
686 unsigned new_i; | |
687 struct def_optype_d new_list; | |
688 def_optype_p old_ops, last; | |
689 unsigned int num = VEC_length (tree, build_defs); | |
690 | |
691 /* There should only be a single real definition per assignment. */ | |
692 gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1); | |
693 | |
694 new_list.next = NULL; | |
695 last = &new_list; | |
696 | |
697 old_ops = gimple_def_ops (stmt); | |
698 | |
699 new_i = 0; | |
700 | |
701 /* Check for the common case of 1 def that hasn't changed. */ | |
702 if (old_ops && old_ops->next == NULL && num == 1 | |
703 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops)) | |
704 return; | |
705 | |
706 /* If there is anything in the old list, free it. */ | |
707 if (old_ops) | |
708 { | |
709 old_ops->next = gimple_ssa_operands (cfun)->free_defs; | |
710 gimple_ssa_operands (cfun)->free_defs = old_ops; | |
711 } | |
712 | |
713 /* If there is anything remaining in the build_defs list, simply emit it. */ | |
714 for ( ; new_i < num; new_i++) | |
715 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last); | |
716 | |
717 /* Now set the stmt's operands. */ | |
718 gimple_set_def_ops (stmt, new_list.next); | |
719 | |
720 #ifdef ENABLE_CHECKING | |
721 { | |
722 def_optype_p ptr; | |
723 unsigned x = 0; | |
724 for (ptr = gimple_def_ops (stmt); ptr; ptr = ptr->next) | |
725 x++; | |
726 | |
727 gcc_assert (x == num); | |
728 } | |
729 #endif | |
730 } | |
731 | |
732 | |
733 /* Takes elements from build_uses and turns them into use operands of STMT. | |
734 TODO -- Make build_uses VEC of tree *. */ | |
735 | |
736 static inline void | |
737 finalize_ssa_uses (gimple stmt) | |
738 { | |
739 unsigned new_i; | |
740 struct use_optype_d new_list; | |
741 use_optype_p old_ops, ptr, last; | |
742 | |
743 new_list.next = NULL; | |
744 last = &new_list; | |
745 | |
746 old_ops = gimple_use_ops (stmt); | |
747 | |
748 /* If there is anything in the old list, free it. */ | |
749 if (old_ops) | |
750 { | |
751 for (ptr = old_ops; ptr; ptr = ptr->next) | |
752 delink_imm_use (USE_OP_PTR (ptr)); | |
753 old_ops->next = gimple_ssa_operands (cfun)->free_uses; | |
754 gimple_ssa_operands (cfun)->free_uses = old_ops; | |
755 } | |
756 | |
757 /* Now create nodes for all the new nodes. */ | |
758 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++) | |
759 last = add_use_op (stmt, | |
760 (tree *) VEC_index (tree, build_uses, new_i), | |
761 last); | |
762 | |
763 /* Now set the stmt's operands. */ | |
764 gimple_set_use_ops (stmt, new_list.next); | |
765 | |
766 #ifdef ENABLE_CHECKING | |
767 { | |
768 unsigned x = 0; | |
769 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) | |
770 x++; | |
771 | |
772 gcc_assert (x == VEC_length (tree, build_uses)); | |
773 } | |
774 #endif | |
775 } | |
776 | |
777 | |
778 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of | |
779 STMT. */ | |
780 | |
781 static inline void | |
782 finalize_ssa_vdefs (gimple stmt) | |
783 { | |
784 unsigned new_i; | |
785 struct voptype_d new_list; | |
786 voptype_p old_ops, ptr, last; | |
787 | |
788 /* Set the symbols referenced by STMT. */ | |
789 gimple_set_stored_syms (stmt, build_stores, &operands_bitmap_obstack); | |
790 | |
791 /* If aliases have not been computed, do not instantiate a virtual | |
792 operator on STMT. Initially, we only compute the SSA form on | |
793 GIMPLE registers. The virtual SSA form is only computed after | |
794 alias analysis, so virtual operators will remain unrenamed and | |
795 the verifier will complain. However, alias analysis needs to | |
796 access symbol load/store information, so we need to compute | |
797 those. */ | |
798 if (!gimple_aliases_computed_p (cfun)) | |
799 return; | |
800 | |
801 new_list.next = NULL; | |
802 last = &new_list; | |
803 | |
804 old_ops = gimple_vdef_ops (stmt); | |
805 new_i = 0; | |
806 while (old_ops && new_i < VEC_length (tree, build_vdefs)) | |
807 { | |
808 tree op = VEC_index (tree, build_vdefs, new_i); | |
809 unsigned new_uid = get_name_decl (op); | |
810 unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops)); | |
811 | |
812 /* FIXME, for now each VDEF operator should have at most one | |
813 operand in their RHS. */ | |
814 gcc_assert (VDEF_NUM (old_ops) == 1); | |
815 | |
816 if (old_uid == new_uid) | |
817 { | |
818 /* If the symbols are the same, reuse the existing operand. */ | |
819 last->next = old_ops; | |
820 last = old_ops; | |
821 old_ops = old_ops->next; | |
822 last->next = NULL; | |
823 set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt); | |
824 new_i++; | |
825 } | |
826 else if (old_uid < new_uid) | |
827 { | |
828 /* If old is less than new, old goes to the free list. */ | |
829 voptype_p next; | |
830 delink_imm_use (VDEF_OP_PTR (old_ops, 0)); | |
831 next = old_ops->next; | |
832 add_vop_to_freelist (old_ops); | |
833 old_ops = next; | |
834 } | |
835 else | |
836 { | |
837 /* This is a new operand. */ | |
838 last = add_vdef_op (stmt, op, 1, last); | |
839 new_i++; | |
840 } | |
841 } | |
842 | |
843 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */ | |
844 for ( ; new_i < VEC_length (tree, build_vdefs); new_i++) | |
845 last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last); | |
846 | |
847 /* If there is anything in the old list, free it. */ | |
848 if (old_ops) | |
849 { | |
850 for (ptr = old_ops; ptr; ptr = last) | |
851 { | |
852 last = ptr->next; | |
853 delink_imm_use (VDEF_OP_PTR (ptr, 0)); | |
854 add_vop_to_freelist (ptr); | |
855 } | |
856 } | |
857 | |
858 /* Now set STMT's operands. */ | |
859 gimple_set_vdef_ops (stmt, new_list.next); | |
860 | |
861 #ifdef ENABLE_CHECKING | |
862 { | |
863 unsigned x = 0; | |
864 for (ptr = gimple_vdef_ops (stmt); ptr; ptr = ptr->next) | |
865 x++; | |
866 | |
867 gcc_assert (x == VEC_length (tree, build_vdefs)); | |
868 } | |
869 #endif | |
870 } | |
871 | |
872 | |
873 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of | |
874 STMT. */ | |
875 | |
876 static inline void | |
877 finalize_ssa_vuse_ops (gimple stmt) | |
878 { | |
879 unsigned new_i, old_i; | |
880 voptype_p old_ops, last; | |
881 VEC(tree,heap) *new_ops; | |
882 | |
883 /* Set the symbols referenced by STMT. */ | |
884 gimple_set_loaded_syms (stmt, build_loads, &operands_bitmap_obstack); | |
885 | |
886 /* If aliases have not been computed, do not instantiate a virtual | |
887 operator on STMT. Initially, we only compute the SSA form on | |
888 GIMPLE registers. The virtual SSA form is only computed after | |
889 alias analysis, so virtual operators will remain unrenamed and | |
890 the verifier will complain. However, alias analysis needs to | |
891 access symbol load/store information, so we need to compute | |
892 those. */ | |
893 if (!gimple_aliases_computed_p (cfun)) | |
894 return; | |
895 | |
896 /* STMT should have at most one VUSE operator. */ | |
897 old_ops = gimple_vuse_ops (stmt); | |
898 gcc_assert (old_ops == NULL || old_ops->next == NULL); | |
899 | |
900 new_ops = NULL; | |
901 new_i = old_i = 0; | |
902 while (old_ops | |
903 && old_i < VUSE_NUM (old_ops) | |
904 && new_i < VEC_length (tree, build_vuses)) | |
905 { | |
906 tree new_op = VEC_index (tree, build_vuses, new_i); | |
907 tree old_op = VUSE_OP (old_ops, old_i); | |
908 unsigned new_uid = get_name_decl (new_op); | |
909 unsigned old_uid = get_name_decl (old_op); | |
910 | |
911 if (old_uid == new_uid) | |
912 { | |
913 /* If the symbols are the same, reuse the existing operand. */ | |
914 VEC_safe_push (tree, heap, new_ops, old_op); | |
915 new_i++; | |
916 old_i++; | |
917 } | |
918 else if (old_uid < new_uid) | |
919 { | |
920 /* If OLD_UID is less than NEW_UID, the old operand has | |
921 disappeared, skip to the next old operand. */ | |
922 old_i++; | |
923 } | |
924 else | |
925 { | |
926 /* This is a new operand. */ | |
927 VEC_safe_push (tree, heap, new_ops, new_op); | |
928 new_i++; | |
929 } | |
930 } | |
931 | |
932 /* If there is anything remaining in the build_vuses list, simply emit it. */ | |
933 for ( ; new_i < VEC_length (tree, build_vuses); new_i++) | |
934 VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i)); | |
935 | |
936 /* If there is anything in the old list, free it. */ | |
937 if (old_ops) | |
938 { | |
939 for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++) | |
940 delink_imm_use (VUSE_OP_PTR (old_ops, old_i)); | |
941 add_vop_to_freelist (old_ops); | |
942 gimple_set_vuse_ops (stmt, NULL); | |
943 } | |
944 | |
945 /* If there are any operands, instantiate a VUSE operator for STMT. */ | |
946 if (new_ops) | |
947 { | |
948 tree op; | |
949 unsigned i; | |
950 | |
951 last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL); | |
952 | |
953 for (i = 0; VEC_iterate (tree, new_ops, i, op); i++) | |
954 SET_USE (VUSE_OP_PTR (last, (int) i), op); | |
955 | |
956 gimple_set_vuse_ops (stmt, last); | |
957 VEC_free (tree, heap, new_ops); | |
958 } | |
959 | |
960 #ifdef ENABLE_CHECKING | |
961 { | |
962 unsigned x; | |
963 | |
964 if (gimple_vuse_ops (stmt)) | |
965 { | |
966 gcc_assert (gimple_vuse_ops (stmt)->next == NULL); | |
967 x = VUSE_NUM (gimple_vuse_ops (stmt)); | |
968 } | |
969 else | |
970 x = 0; | |
971 | |
972 gcc_assert (x == VEC_length (tree, build_vuses)); | |
973 } | |
974 #endif | |
975 } | |
976 | |
977 /* Return a new VUSE operand vector for STMT. */ | |
978 | |
979 static void | |
980 finalize_ssa_vuses (gimple stmt) | |
981 { | |
982 unsigned num, num_vdefs; | |
983 unsigned vuse_index; | |
984 | |
985 /* Remove superfluous VUSE operands. If the statement already has a | |
986 VDEF operator for a variable 'a', then a VUSE for 'a' is not | |
987 needed because VDEFs imply a VUSE of the variable. For instance, | |
988 suppose that variable 'a' is pointed-to by p and q: | |
989 | |
990 # VUSE <a_2> | |
991 # a_3 = VDEF <a_2> | |
992 *p = *q; | |
993 | |
994 The VUSE <a_2> is superfluous because it is implied by the | |
995 VDEF operator. */ | |
996 num = VEC_length (tree, build_vuses); | |
997 num_vdefs = VEC_length (tree, build_vdefs); | |
998 | |
999 if (num > 0 && num_vdefs > 0) | |
1000 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); ) | |
1001 { | |
1002 tree vuse; | |
1003 vuse = VEC_index (tree, build_vuses, vuse_index); | |
1004 if (TREE_CODE (vuse) != SSA_NAME) | |
1005 { | |
1006 var_ann_t ann = var_ann (vuse); | |
1007 ann->in_vuse_list = 0; | |
1008 if (ann->in_vdef_list) | |
1009 { | |
1010 VEC_ordered_remove (tree, build_vuses, vuse_index); | |
1011 continue; | |
1012 } | |
1013 } | |
1014 vuse_index++; | |
1015 } | |
1016 | |
1017 finalize_ssa_vuse_ops (stmt); | |
1018 } | |
1019 | |
1020 | |
1021 /* Clear the in_list bits and empty the build array for VDEFs and | |
1022 VUSEs. */ | |
1023 | |
1024 static inline void | |
1025 cleanup_build_arrays (void) | |
1026 { | |
1027 unsigned i; | |
1028 tree t; | |
1029 | |
1030 for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++) | |
1031 if (TREE_CODE (t) != SSA_NAME) | |
1032 var_ann (t)->in_vdef_list = false; | |
1033 | |
1034 for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++) | |
1035 if (TREE_CODE (t) != SSA_NAME) | |
1036 var_ann (t)->in_vuse_list = false; | |
1037 | |
1038 VEC_truncate (tree, build_vdefs, 0); | |
1039 VEC_truncate (tree, build_vuses, 0); | |
1040 VEC_truncate (tree, build_defs, 0); | |
1041 VEC_truncate (tree, build_uses, 0); | |
1042 bitmap_clear (build_loads); | |
1043 bitmap_clear (build_stores); | |
1044 } | |
1045 | |
1046 | |
1047 /* Finalize all the build vectors, fill the new ones into INFO. */ | |
1048 | |
1049 static inline void | |
1050 finalize_ssa_stmt_operands (gimple stmt) | |
1051 { | |
1052 finalize_ssa_defs (stmt); | |
1053 finalize_ssa_uses (stmt); | |
1054 if (gimple_has_mem_ops (stmt)) | |
1055 { | |
1056 finalize_ssa_vdefs (stmt); | |
1057 finalize_ssa_vuses (stmt); | |
1058 } | |
1059 cleanup_build_arrays (); | |
1060 } | |
1061 | |
1062 | |
1063 /* Start the process of building up operands vectors in INFO. */ | |
1064 | |
1065 static inline void | |
1066 start_ssa_stmt_operands (void) | |
1067 { | |
1068 gcc_assert (VEC_length (tree, build_defs) == 0); | |
1069 gcc_assert (VEC_length (tree, build_uses) == 0); | |
1070 gcc_assert (VEC_length (tree, build_vuses) == 0); | |
1071 gcc_assert (VEC_length (tree, build_vdefs) == 0); | |
1072 gcc_assert (bitmap_empty_p (build_loads)); | |
1073 gcc_assert (bitmap_empty_p (build_stores)); | |
1074 } | |
1075 | |
1076 | |
1077 /* Add DEF_P to the list of pointers to operands. */ | |
1078 | |
1079 static inline void | |
1080 append_def (tree *def_p) | |
1081 { | |
1082 VEC_safe_push (tree, heap, build_defs, (tree) def_p); | |
1083 } | |
1084 | |
1085 | |
1086 /* Add USE_P to the list of pointers to operands. */ | |
1087 | |
1088 static inline void | |
1089 append_use (tree *use_p) | |
1090 { | |
1091 VEC_safe_push (tree, heap, build_uses, (tree) use_p); | |
1092 } | |
1093 | |
1094 | |
1095 /* Add VAR to the set of variables that require a VDEF operator. */ | |
1096 | |
1097 static inline void | |
1098 append_vdef (tree var) | |
1099 { | |
1100 tree sym; | |
1101 | |
1102 if (TREE_CODE (var) != SSA_NAME) | |
1103 { | |
1104 tree mpt; | |
1105 var_ann_t ann; | |
1106 | |
1107 /* If VAR belongs to a memory partition, use it instead of VAR. */ | |
1108 mpt = memory_partition (var); | |
1109 if (mpt) | |
1110 var = mpt; | |
1111 | |
1112 /* Don't allow duplicate entries. */ | |
1113 ann = get_var_ann (var); | |
1114 if (ann->in_vdef_list) | |
1115 return; | |
1116 | |
1117 ann->in_vdef_list = true; | |
1118 sym = var; | |
1119 } | |
1120 else | |
1121 sym = SSA_NAME_VAR (var); | |
1122 | |
1123 VEC_safe_push (tree, heap, build_vdefs, var); | |
1124 bitmap_set_bit (build_stores, DECL_UID (sym)); | |
1125 } | |
1126 | |
1127 | |
1128 /* Add VAR to the set of variables that require a VUSE operator. */ | |
1129 | |
1130 static inline void | |
1131 append_vuse (tree var) | |
1132 { | |
1133 tree sym; | |
1134 | |
1135 if (TREE_CODE (var) != SSA_NAME) | |
1136 { | |
1137 tree mpt; | |
1138 var_ann_t ann; | |
1139 | |
1140 /* If VAR belongs to a memory partition, use it instead of VAR. */ | |
1141 mpt = memory_partition (var); | |
1142 if (mpt) | |
1143 var = mpt; | |
1144 | |
1145 /* Don't allow duplicate entries. */ | |
1146 ann = get_var_ann (var); | |
1147 if (ann->in_vuse_list) | |
1148 return; | |
1149 else if (ann->in_vdef_list) | |
1150 { | |
1151 /* We don't want a vuse if we already have a vdef, but we must | |
1152 still put this in build_loads. */ | |
1153 bitmap_set_bit (build_loads, DECL_UID (var)); | |
1154 return; | |
1155 } | |
1156 | |
1157 ann->in_vuse_list = true; | |
1158 sym = var; | |
1159 } | |
1160 else | |
1161 sym = SSA_NAME_VAR (var); | |
1162 | |
1163 VEC_safe_push (tree, heap, build_vuses, var); | |
1164 bitmap_set_bit (build_loads, DECL_UID (sym)); | |
1165 } | |
1166 | |
1167 | |
1168 /* REF is a tree that contains the entire pointer dereference | |
1169 expression, if available, or NULL otherwise. ALIAS is the variable | |
1170 we are asking if REF can access. OFFSET and SIZE come from the | |
1171 memory access expression that generated this virtual operand. | |
1172 | |
1173 XXX: We should handle the NO_ALIAS attributes here. */ | |
1174 | |
1175 static bool | |
1176 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset, | |
1177 HOST_WIDE_INT size) | |
1178 { | |
1179 bool offsetgtz = offset > 0; | |
1180 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset; | |
1181 tree base = ref ? get_base_address (ref) : NULL; | |
1182 | |
1183 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be | |
1184 using a call-clobbered memory tag. By definition, call-clobbered | |
1185 memory tags can always touch .GLOBAL_VAR. */ | |
1186 if (alias == gimple_global_var (cfun)) | |
1187 return true; | |
1188 | |
1189 /* If ref is a TARGET_MEM_REF, just return true, as we can't really | |
1190 disambiguate them right now. */ | |
1191 if (ref && TREE_CODE (ref) == TARGET_MEM_REF) | |
1192 return true; | |
1193 | |
1194 /* Without strict aliasing, it is impossible for a component access | |
1195 through a pointer to touch a random variable, unless that | |
1196 variable *is* a structure or a pointer. | |
1197 | |
1198 That is, given p->c, and some random global variable b, | |
1199 there is no legal way that p->c could be an access to b. | |
1200 | |
1201 Without strict aliasing on, we consider it legal to do something | |
1202 like: | |
1203 | |
1204 struct foos { int l; }; | |
1205 int foo; | |
1206 static struct foos *getfoo(void); | |
1207 int main (void) | |
1208 { | |
1209 struct foos *f = getfoo(); | |
1210 f->l = 1; | |
1211 foo = 2; | |
1212 if (f->l == 1) | |
1213 abort(); | |
1214 exit(0); | |
1215 } | |
1216 static struct foos *getfoo(void) | |
1217 { return (struct foos *)&foo; } | |
1218 | |
1219 (taken from 20000623-1.c) | |
1220 | |
1221 The docs also say/imply that access through union pointers | |
1222 is legal (but *not* if you take the address of the union member, | |
1223 i.e. the inverse), such that you can do | |
1224 | |
1225 typedef union { | |
1226 int d; | |
1227 } U; | |
1228 | |
1229 int rv; | |
1230 void breakme() | |
1231 { | |
1232 U *rv0; | |
1233 U *pretmp = (U*)&rv; | |
1234 rv0 = pretmp; | |
1235 rv0->d = 42; | |
1236 } | |
1237 To implement this, we just punt on accesses through union | |
1238 pointers entirely. | |
1239 | |
1240 Another case we have to allow is accessing a variable | |
1241 through an array access at offset zero. This happens from | |
1242 code generated by the fortran frontend like | |
1243 | |
1244 char[1:1] & my_char_ref; | |
1245 char my_char; | |
1246 my_char_ref_1 = (char[1:1] &) &my_char; | |
1247 D.874_2 = (*my_char_ref_1)[1]{lb: 1 sz: 1}; | |
1248 */ | |
1249 if (ref | |
1250 && flag_strict_aliasing | |
1251 && TREE_CODE (ref) != INDIRECT_REF | |
1252 && !MTAG_P (alias) | |
1253 && base | |
1254 && (TREE_CODE (base) != INDIRECT_REF | |
1255 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE) | |
1256 && (TREE_CODE (base) != INDIRECT_REF | |
1257 || TREE_CODE (ref) != ARRAY_REF | |
1258 || offset != 0 | |
1259 || (DECL_SIZE (alias) | |
1260 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST | |
1261 && size != -1 | |
1262 && (unsigned HOST_WIDE_INT)size | |
1263 != TREE_INT_CST_LOW (DECL_SIZE (alias)))) | |
1264 && !AGGREGATE_TYPE_P (TREE_TYPE (alias)) | |
1265 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE | |
1266 && !var_ann (alias)->is_heapvar | |
1267 /* When the struct has may_alias attached to it, we need not to | |
1268 return true. */ | |
1269 && get_alias_set (base)) | |
1270 { | |
1271 #ifdef ACCESS_DEBUGGING | |
1272 fprintf (stderr, "Access to "); | |
1273 print_generic_expr (stderr, ref, 0); | |
1274 fprintf (stderr, " may not touch "); | |
1275 print_generic_expr (stderr, alias, 0); | |
1276 fprintf (stderr, " in function %s\n", get_name (current_function_decl)); | |
1277 #endif | |
1278 return false; | |
1279 } | |
1280 | |
1281 /* If the offset of the access is greater than the size of one of | |
1282 the possible aliases, it can't be touching that alias, because it | |
1283 would be past the end of the structure. */ | |
1284 else if (ref | |
1285 && flag_strict_aliasing | |
1286 && TREE_CODE (ref) != INDIRECT_REF | |
1287 && !MTAG_P (alias) | |
1288 && !var_ann (alias)->is_heapvar | |
1289 && !POINTER_TYPE_P (TREE_TYPE (alias)) | |
1290 && offsetgtz | |
1291 && DECL_SIZE (alias) | |
1292 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST | |
1293 && uoffset >= TREE_INT_CST_LOW (DECL_SIZE (alias))) | |
1294 { | |
1295 #ifdef ACCESS_DEBUGGING | |
1296 fprintf (stderr, "Access to "); | |
1297 print_generic_expr (stderr, ref, 0); | |
1298 fprintf (stderr, " may not touch "); | |
1299 print_generic_expr (stderr, alias, 0); | |
1300 fprintf (stderr, " in function %s\n", get_name (current_function_decl)); | |
1301 #endif | |
1302 return false; | |
1303 } | |
1304 | |
1305 return true; | |
1306 } | |
1307 | |
1308 /* Add VAR to the virtual operands for STMT. FLAGS is as in | |
1309 get_expr_operands. FULL_REF is a tree that contains the entire | |
1310 pointer dereference expression, if available, or NULL otherwise. | |
1311 OFFSET and SIZE come from the memory access expression that | |
1312 generated this virtual operand. IS_CALL_SITE is true if the | |
1313 affected statement is a call site. */ | |
1314 | |
1315 static void | |
1316 add_virtual_operand (tree var, gimple stmt, int flags, | |
1317 tree full_ref, HOST_WIDE_INT offset, | |
1318 HOST_WIDE_INT size, bool is_call_site) | |
1319 { | |
1320 bitmap aliases = NULL; | |
1321 tree sym; | |
1322 var_ann_t v_ann; | |
1323 | |
1324 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); | |
1325 v_ann = var_ann (sym); | |
1326 | |
1327 /* Mark the statement as having memory operands. */ | |
1328 gimple_set_references_memory (stmt, true); | |
1329 | |
1330 /* If the variable cannot be modified and this is a VDEF change | |
1331 it into a VUSE. This happens when read-only variables are marked | |
1332 call-clobbered and/or aliased to writable variables. So we only | |
1333 check that this only happens on non-specific stores. | |
1334 | |
1335 Note that if this is a specific store, i.e. associated with a | |
1336 MODIFY_EXPR, then we can't suppress the VDEF, lest we run | |
1337 into validation problems. | |
1338 | |
1339 This can happen when programs cast away const, leaving us with a | |
1340 store to read-only memory. If the statement is actually executed | |
1341 at runtime, then the program is ill formed. If the statement is | |
1342 not executed then all is well. At the very least, we cannot ICE. */ | |
1343 if ((flags & opf_implicit) && unmodifiable_var_p (var)) | |
1344 flags &= ~opf_def; | |
1345 | |
1346 /* The variable is not a GIMPLE register. Add it (or its aliases) to | |
1347 virtual operands, unless the caller has specifically requested | |
1348 not to add virtual operands (used when adding operands inside an | |
1349 ADDR_EXPR expression). */ | |
1350 if (flags & opf_no_vops) | |
1351 return; | |
1352 | |
1353 if (MTAG_P (var)) | |
1354 aliases = MTAG_ALIASES (var); | |
1355 | |
1356 if (aliases == NULL) | |
1357 { | |
1358 if (!gimple_aliases_computed_p (cfun) && (flags & opf_def)) | |
1359 gimple_set_has_volatile_ops (stmt, true); | |
1360 | |
1361 /* The variable is not aliased or it is an alias tag. */ | |
1362 if (flags & opf_def) | |
1363 append_vdef (var); | |
1364 else | |
1365 append_vuse (var); | |
1366 } | |
1367 else | |
1368 { | |
1369 bitmap_iterator bi; | |
1370 unsigned int i; | |
1371 bool none_added = true; | |
1372 | |
1373 /* The variable is aliased. Add its aliases to the virtual | |
1374 operands. */ | |
1375 gcc_assert (!bitmap_empty_p (aliases)); | |
1376 | |
1377 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi) | |
1378 { | |
1379 tree al = referenced_var (i); | |
1380 | |
1381 /* Call-clobbered tags may have non-call-clobbered | |
1382 symbols in their alias sets. Ignore them if we are | |
1383 adding VOPs for a call site. */ | |
1384 if (is_call_site && !is_call_clobbered (al)) | |
1385 continue; | |
1386 | |
1387 /* If we do not know the full reference tree or if the access is | |
1388 unspecified [0, -1], we cannot prune it. Otherwise try doing | |
1389 so using access_can_touch_variable. */ | |
1390 if (full_ref | |
1391 && !access_can_touch_variable (full_ref, al, offset, size)) | |
1392 continue; | |
1393 | |
1394 if (flags & opf_def) | |
1395 append_vdef (al); | |
1396 else | |
1397 append_vuse (al); | |
1398 none_added = false; | |
1399 } | |
1400 | |
1401 if (flags & opf_def) | |
1402 { | |
1403 /* If the variable is also an alias tag, add a virtual | |
1404 operand for it, otherwise we will miss representing | |
1405 references to the members of the variable's alias set. | |
1406 This fixes the bug in gcc.c-torture/execute/20020503-1.c. | |
1407 | |
1408 It is also necessary to add bare defs on clobbers for | |
1409 SMT's, so that bare SMT uses caused by pruning all the | |
1410 aliases will link up properly with calls. In order to | |
1411 keep the number of these bare defs we add down to the | |
1412 minimum necessary, we keep track of which SMT's were used | |
1413 alone in statement vdefs or VUSEs. */ | |
1414 if (none_added | |
1415 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG | |
1416 && is_call_site)) | |
1417 append_vdef (var); | |
1418 } | |
1419 else | |
1420 { | |
1421 /* Even if no aliases have been added, we still need to | |
1422 establish def-use and use-def chains, lest | |
1423 transformations think that this is not a memory | |
1424 reference. For an example of this scenario, see | |
1425 testsuite/g++.dg/opt/cleanup1.C. */ | |
1426 if (none_added) | |
1427 append_vuse (var); | |
1428 } | |
1429 } | |
1430 } | |
1431 | |
1432 | |
1433 /* Add *VAR_P to the appropriate operand array for statement STMT. | |
1434 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, | |
1435 it will be added to the statement's real operands, otherwise it is | |
1436 added to virtual operands. */ | |
1437 | |
1438 static void | |
1439 add_stmt_operand (tree *var_p, gimple stmt, int flags) | |
1440 { | |
1441 tree var, sym; | |
1442 var_ann_t v_ann; | |
1443 | |
1444 gcc_assert (SSA_VAR_P (*var_p)); | |
1445 | |
1446 var = *var_p; | |
1447 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); | |
1448 v_ann = var_ann (sym); | |
1449 | |
1450 /* Mark statements with volatile operands. */ | |
1451 if (TREE_THIS_VOLATILE (sym)) | |
1452 gimple_set_has_volatile_ops (stmt, true); | |
1453 | |
1454 if (is_gimple_reg (sym)) | |
1455 { | |
1456 /* The variable is a GIMPLE register. Add it to real operands. */ | |
1457 if (flags & opf_def) | |
1458 append_def (var_p); | |
1459 else | |
1460 append_use (var_p); | |
1461 } | |
1462 else | |
1463 add_virtual_operand (var, stmt, flags, NULL_TREE, 0, -1, false); | |
1464 } | |
1465 | |
1466 /* Subroutine of get_indirect_ref_operands. ADDR is the address | |
1467 that is dereferenced, the meaning of the rest of the arguments | |
1468 is the same as in get_indirect_ref_operands. */ | |
1469 | |
1470 static void | |
1471 get_addr_dereference_operands (gimple stmt, tree *addr, int flags, | |
1472 tree full_ref, HOST_WIDE_INT offset, | |
1473 HOST_WIDE_INT size, bool recurse_on_base) | |
1474 { | |
1475 tree ptr = *addr; | |
1476 | |
1477 /* Mark the statement as having memory operands. */ | |
1478 gimple_set_references_memory (stmt, true); | |
1479 | |
1480 if (SSA_VAR_P (ptr)) | |
1481 { | |
1482 struct ptr_info_def *pi = NULL; | |
1483 | |
1484 /* If PTR has flow-sensitive points-to information, use it. */ | |
1485 if (TREE_CODE (ptr) == SSA_NAME | |
1486 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL | |
1487 && pi->name_mem_tag) | |
1488 { | |
1489 /* PTR has its own memory tag. Use it. */ | |
1490 add_virtual_operand (pi->name_mem_tag, stmt, flags, | |
1491 full_ref, offset, size, false); | |
1492 } | |
1493 else | |
1494 { | |
1495 /* If PTR is not an SSA_NAME or it doesn't have a name | |
1496 tag, use its symbol memory tag. */ | |
1497 var_ann_t v_ann; | |
1498 | |
1499 /* If we are emitting debugging dumps, display a warning if | |
1500 PTR is an SSA_NAME with no flow-sensitive alias | |
1501 information. That means that we may need to compute | |
1502 aliasing again or that a propagation pass forgot to | |
1503 update the alias information on the pointers. */ | |
1504 if (dump_file | |
1505 && TREE_CODE (ptr) == SSA_NAME | |
1506 && (pi == NULL | |
1507 || (pi->name_mem_tag == NULL_TREE | |
1508 && !pi->pt_anything)) | |
1509 && gimple_aliases_computed_p (cfun)) | |
1510 { | |
1511 fprintf (dump_file, | |
1512 "NOTE: no flow-sensitive alias info for "); | |
1513 print_generic_expr (dump_file, ptr, dump_flags); | |
1514 fprintf (dump_file, " in "); | |
1515 print_gimple_stmt (dump_file, stmt, 0, 0); | |
1516 } | |
1517 | |
1518 if (TREE_CODE (ptr) == SSA_NAME) | |
1519 ptr = SSA_NAME_VAR (ptr); | |
1520 v_ann = var_ann (ptr); | |
1521 | |
1522 /* If we don't know what this pointer points to then we have | |
1523 to make sure to not prune virtual operands based on offset | |
1524 and size. */ | |
1525 if (v_ann->symbol_mem_tag) | |
1526 { | |
1527 add_virtual_operand (v_ann->symbol_mem_tag, stmt, flags, | |
1528 full_ref, 0, -1, false); | |
1529 /* Make sure we add the SMT itself. */ | |
1530 if (!(flags & opf_no_vops)) | |
1531 { | |
1532 if (flags & opf_def) | |
1533 append_vdef (v_ann->symbol_mem_tag); | |
1534 else | |
1535 append_vuse (v_ann->symbol_mem_tag); | |
1536 } | |
1537 } | |
1538 | |
1539 /* Aliasing information is missing; mark statement as | |
1540 volatile so we won't optimize it out too actively. */ | |
1541 else if (!gimple_aliases_computed_p (cfun) | |
1542 && (flags & opf_def)) | |
1543 gimple_set_has_volatile_ops (stmt, true); | |
1544 } | |
1545 } | |
1546 else if (TREE_CODE (ptr) == INTEGER_CST) | |
1547 { | |
1548 /* If a constant is used as a pointer, we can't generate a real | |
1549 operand for it but we mark the statement volatile to prevent | |
1550 optimizations from messing things up. */ | |
1551 gimple_set_has_volatile_ops (stmt, true); | |
1552 return; | |
1553 } | |
1554 else | |
1555 { | |
1556 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */ | |
1557 gcc_unreachable (); | |
1558 } | |
1559 | |
1560 /* If requested, add a USE operand for the base pointer. */ | |
1561 if (recurse_on_base) | |
1562 get_expr_operands (stmt, addr, opf_use); | |
1563 } | |
1564 | |
1565 | |
1566 /* A subroutine of get_expr_operands to handle INDIRECT_REF, | |
1567 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. | |
1568 | |
1569 STMT is the statement being processed, EXPR is the INDIRECT_REF | |
1570 that got us here. | |
1571 | |
1572 FLAGS is as in get_expr_operands. | |
1573 | |
1574 FULL_REF contains the full pointer dereference expression, if we | |
1575 have it, or NULL otherwise. | |
1576 | |
1577 OFFSET and SIZE are the location of the access inside the | |
1578 dereferenced pointer, if known. | |
1579 | |
1580 RECURSE_ON_BASE should be set to true if we want to continue | |
1581 calling get_expr_operands on the base pointer, and false if | |
1582 something else will do it for us. */ | |
1583 | |
1584 static void | |
1585 get_indirect_ref_operands (gimple stmt, tree expr, int flags, tree full_ref, | |
1586 HOST_WIDE_INT offset, HOST_WIDE_INT size, | |
1587 bool recurse_on_base) | |
1588 { | |
1589 tree *pptr = &TREE_OPERAND (expr, 0); | |
1590 | |
1591 if (TREE_THIS_VOLATILE (expr)) | |
1592 gimple_set_has_volatile_ops (stmt, true); | |
1593 | |
1594 get_addr_dereference_operands (stmt, pptr, flags, full_ref, offset, size, | |
1595 recurse_on_base); | |
1596 } | |
1597 | |
1598 | |
1599 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ | |
1600 | |
1601 static void | |
1602 get_tmr_operands (gimple stmt, tree expr, int flags) | |
1603 { | |
1604 tree tag; | |
1605 | |
1606 /* Mark the statement as having memory operands. */ | |
1607 gimple_set_references_memory (stmt, true); | |
1608 | |
1609 /* First record the real operands. */ | |
1610 get_expr_operands (stmt, &TMR_BASE (expr), opf_use); | |
1611 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use); | |
1612 | |
1613 if (TMR_SYMBOL (expr)) | |
1614 gimple_add_to_addresses_taken (stmt, TMR_SYMBOL (expr)); | |
1615 | |
1616 tag = TMR_TAG (expr); | |
1617 if (!tag) | |
1618 { | |
1619 /* Something weird, so ensure that we will be careful. */ | |
1620 gimple_set_has_volatile_ops (stmt, true); | |
1621 return; | |
1622 } | |
1623 if (!MTAG_P (tag)) | |
1624 { | |
1625 get_expr_operands (stmt, &tag, flags); | |
1626 return; | |
1627 } | |
1628 | |
1629 add_virtual_operand (tag, stmt, flags, expr, 0, -1, false); | |
1630 } | |
1631 | |
1632 | |
1633 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call | |
1634 clobbered variables in the function. */ | |
1635 | |
1636 static void | |
1637 add_call_clobber_ops (gimple stmt, tree callee ATTRIBUTE_UNUSED) | |
1638 { | |
1639 unsigned u; | |
1640 bitmap_iterator bi; | |
1641 bitmap not_read_b, not_written_b; | |
1642 | |
1643 gcc_assert (!(gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))); | |
1644 | |
1645 /* If we created .GLOBAL_VAR earlier, just use it. */ | |
1646 if (gimple_global_var (cfun)) | |
1647 { | |
1648 tree var = gimple_global_var (cfun); | |
1649 add_virtual_operand (var, stmt, opf_def, NULL, 0, -1, true); | |
1650 return; | |
1651 } | |
1652 | |
1653 /* Get info for local and module level statics. There is a bit | |
1654 set for each static if the call being processed does not read | |
1655 or write that variable. */ | |
1656 not_read_b = callee ? ipa_reference_get_not_read_global (cgraph_node (callee)) : NULL; | |
1657 not_written_b = callee ? ipa_reference_get_not_written_global (cgraph_node (callee)) : NULL; | |
1658 | |
1659 /* Add a VDEF operand for every call clobbered variable. */ | |
1660 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi) | |
1661 { | |
1662 tree var = referenced_var_lookup (u); | |
1663 tree real_var = var; | |
1664 bool not_read; | |
1665 bool not_written; | |
1666 | |
1667 not_read = not_read_b | |
1668 ? bitmap_bit_p (not_read_b, DECL_UID (real_var)) | |
1669 : false; | |
1670 | |
1671 not_written = not_written_b | |
1672 ? bitmap_bit_p (not_written_b, DECL_UID (real_var)) | |
1673 : false; | |
1674 gcc_assert (!unmodifiable_var_p (var)); | |
1675 | |
1676 clobber_stats.clobbered_vars++; | |
1677 | |
1678 /* See if this variable is really clobbered by this function. */ | |
1679 | |
1680 if (not_written) | |
1681 { | |
1682 clobber_stats.static_write_clobbers_avoided++; | |
1683 if (!not_read) | |
1684 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); | |
1685 else | |
1686 clobber_stats.static_read_clobbers_avoided++; | |
1687 } | |
1688 else | |
1689 add_virtual_operand (var, stmt, opf_def, NULL, 0, -1, true); | |
1690 } | |
1691 } | |
1692 | |
1693 | |
1694 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the | |
1695 function. */ | |
1696 | |
1697 static void | |
1698 add_call_read_ops (gimple stmt, tree callee ATTRIBUTE_UNUSED) | |
1699 { | |
1700 unsigned u; | |
1701 bitmap_iterator bi; | |
1702 bitmap not_read_b; | |
1703 | |
1704 /* Const functions do not reference memory. */ | |
1705 if (gimple_call_flags (stmt) & ECF_CONST) | |
1706 return; | |
1707 | |
1708 not_read_b = callee ? ipa_reference_get_not_read_global (cgraph_node (callee)) : NULL; | |
1709 | |
1710 /* For pure functions we compute non-escaped uses separately. */ | |
1711 if (gimple_call_flags (stmt) & ECF_PURE) | |
1712 EXECUTE_IF_SET_IN_BITMAP (gimple_call_used_vars (cfun), 0, u, bi) | |
1713 { | |
1714 tree var = referenced_var_lookup (u); | |
1715 tree real_var = var; | |
1716 bool not_read; | |
1717 | |
1718 if (unmodifiable_var_p (var)) | |
1719 continue; | |
1720 | |
1721 not_read = not_read_b | |
1722 ? bitmap_bit_p (not_read_b, DECL_UID (real_var)) | |
1723 : false; | |
1724 | |
1725 clobber_stats.readonly_clobbers++; | |
1726 | |
1727 /* See if this variable is really used by this function. */ | |
1728 if (!not_read) | |
1729 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); | |
1730 else | |
1731 clobber_stats.static_readonly_clobbers_avoided++; | |
1732 } | |
1733 | |
1734 /* Add a VUSE for .GLOBAL_VAR if it has been created. See | |
1735 add_referenced_var for the heuristic used to decide whether to | |
1736 create .GLOBAL_VAR. */ | |
1737 if (gimple_global_var (cfun)) | |
1738 { | |
1739 tree var = gimple_global_var (cfun); | |
1740 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); | |
1741 return; | |
1742 } | |
1743 | |
1744 /* Add a VUSE for each call-clobbered variable. */ | |
1745 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi) | |
1746 { | |
1747 tree var = referenced_var (u); | |
1748 tree real_var = var; | |
1749 bool not_read; | |
1750 | |
1751 clobber_stats.readonly_clobbers++; | |
1752 | |
1753 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var)) | |
1754 : false; | |
1755 | |
1756 if (not_read) | |
1757 { | |
1758 clobber_stats.static_readonly_clobbers_avoided++; | |
1759 continue; | |
1760 } | |
1761 | |
1762 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); | |
1763 } | |
1764 } | |
1765 | |
1766 | |
1767 /* If STMT is a call that may clobber globals and other symbols that | |
1768 escape, add them to the VDEF/VUSE lists for it. */ | |
1769 | |
1770 static void | |
1771 maybe_add_call_clobbered_vops (gimple stmt) | |
1772 { | |
1773 int call_flags = gimple_call_flags (stmt); | |
1774 | |
1775 /* Mark the statement as having memory operands. */ | |
1776 gimple_set_references_memory (stmt, true); | |
1777 | |
1778 /* If aliases have been computed already, add VDEF or VUSE | |
1779 operands for all the symbols that have been found to be | |
1780 call-clobbered. */ | |
1781 if (gimple_aliases_computed_p (cfun) && !(call_flags & ECF_NOVOPS)) | |
1782 { | |
1783 /* A 'pure' or a 'const' function never call-clobbers anything. | |
1784 A 'noreturn' function might, but since we don't return anyway | |
1785 there is no point in recording that. */ | |
1786 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) | |
1787 add_call_clobber_ops (stmt, gimple_call_fndecl (stmt)); | |
1788 else if (!(call_flags & ECF_CONST)) | |
1789 add_call_read_ops (stmt, gimple_call_fndecl (stmt)); | |
1790 } | |
1791 } | |
1792 | |
1793 | |
1794 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ | |
1795 | |
1796 static void | |
1797 get_asm_expr_operands (gimple stmt) | |
1798 { | |
1799 size_t i, noutputs; | |
1800 const char **oconstraints; | |
1801 const char *constraint; | |
1802 bool allows_mem, allows_reg, is_inout; | |
1803 | |
1804 noutputs = gimple_asm_noutputs (stmt); | |
1805 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
1806 | |
1807 /* Gather all output operands. */ | |
1808 for (i = 0; i < gimple_asm_noutputs (stmt); i++) | |
1809 { | |
1810 tree link = gimple_asm_output_op (stmt, i); | |
1811 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
1812 oconstraints[i] = constraint; | |
1813 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
1814 &allows_reg, &is_inout); | |
1815 | |
1816 /* This should have been split in gimplify_asm_expr. */ | |
1817 gcc_assert (!allows_reg || !is_inout); | |
1818 | |
1819 /* Memory operands are addressable. Note that STMT needs the | |
1820 address of this operand. */ | |
1821 if (!allows_reg && allows_mem) | |
1822 { | |
1823 tree t = get_base_address (TREE_VALUE (link)); | |
1824 if (t && DECL_P (t)) | |
1825 gimple_add_to_addresses_taken (stmt, t); | |
1826 } | |
1827 | |
1828 get_expr_operands (stmt, &TREE_VALUE (link), opf_def); | |
1829 } | |
1830 | |
1831 /* Gather all input operands. */ | |
1832 for (i = 0; i < gimple_asm_ninputs (stmt); i++) | |
1833 { | |
1834 tree link = gimple_asm_input_op (stmt, i); | |
1835 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
1836 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, | |
1837 &allows_mem, &allows_reg); | |
1838 | |
1839 /* Memory operands are addressable. Note that STMT needs the | |
1840 address of this operand. */ | |
1841 if (!allows_reg && allows_mem) | |
1842 { | |
1843 tree t = get_base_address (TREE_VALUE (link)); | |
1844 if (t && DECL_P (t)) | |
1845 gimple_add_to_addresses_taken (stmt, t); | |
1846 } | |
1847 | |
1848 get_expr_operands (stmt, &TREE_VALUE (link), 0); | |
1849 } | |
1850 | |
1851 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ | |
1852 for (i = 0; i < gimple_asm_nclobbers (stmt); i++) | |
1853 { | |
1854 tree link = gimple_asm_clobber_op (stmt, i); | |
1855 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) | |
1856 { | |
1857 unsigned i; | |
1858 bitmap_iterator bi; | |
1859 | |
1860 /* Mark the statement as having memory operands. */ | |
1861 gimple_set_references_memory (stmt, true); | |
1862 | |
1863 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi) | |
1864 { | |
1865 tree var = referenced_var (i); | |
1866 add_stmt_operand (&var, stmt, opf_def | opf_implicit); | |
1867 } | |
1868 | |
1869 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi) | |
1870 { | |
1871 tree var = referenced_var (i); | |
1872 add_stmt_operand (&var, stmt, opf_def | opf_implicit); | |
1873 } | |
1874 break; | |
1875 } | |
1876 } | |
1877 } | |
1878 | |
1879 | |
1880 /* Recursively scan the expression pointed to by EXPR_P in statement | |
1881 STMT. FLAGS is one of the OPF_* constants modifying how to | |
1882 interpret the operands found. */ | |
1883 | |
1884 static void | |
1885 get_expr_operands (gimple stmt, tree *expr_p, int flags) | |
1886 { | |
1887 enum tree_code code; | |
1888 enum tree_code_class codeclass; | |
1889 tree expr = *expr_p; | |
1890 | |
1891 if (expr == NULL) | |
1892 return; | |
1893 | |
1894 code = TREE_CODE (expr); | |
1895 codeclass = TREE_CODE_CLASS (code); | |
1896 | |
1897 switch (code) | |
1898 { | |
1899 case ADDR_EXPR: | |
1900 /* Taking the address of a variable does not represent a | |
1901 reference to it, but the fact that the statement takes its | |
1902 address will be of interest to some passes (e.g. alias | |
1903 resolution). */ | |
1904 gimple_add_to_addresses_taken (stmt, TREE_OPERAND (expr, 0)); | |
1905 | |
1906 /* If the address is invariant, there may be no interesting | |
1907 variable references inside. */ | |
1908 if (is_gimple_min_invariant (expr)) | |
1909 return; | |
1910 | |
1911 /* Otherwise, there may be variables referenced inside but there | |
1912 should be no VUSEs created, since the referenced objects are | |
1913 not really accessed. The only operands that we should find | |
1914 here are ARRAY_REF indices which will always be real operands | |
1915 (GIMPLE does not allow non-registers as array indices). */ | |
1916 flags |= opf_no_vops; | |
1917 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
1918 return; | |
1919 | |
1920 case SSA_NAME: | |
1921 case SYMBOL_MEMORY_TAG: | |
1922 case NAME_MEMORY_TAG: | |
1923 add_stmt_operand (expr_p, stmt, flags); | |
1924 return; | |
1925 | |
1926 case VAR_DECL: | |
1927 case PARM_DECL: | |
1928 case RESULT_DECL: | |
1929 add_stmt_operand (expr_p, stmt, flags); | |
1930 return; | |
1931 | |
1932 case MISALIGNED_INDIRECT_REF: | |
1933 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
1934 /* fall through */ | |
1935 | |
1936 case ALIGN_INDIRECT_REF: | |
1937 case INDIRECT_REF: | |
1938 get_indirect_ref_operands (stmt, expr, flags, expr, 0, -1, true); | |
1939 return; | |
1940 | |
1941 case TARGET_MEM_REF: | |
1942 get_tmr_operands (stmt, expr, flags); | |
1943 return; | |
1944 | |
1945 case ARRAY_REF: | |
1946 case ARRAY_RANGE_REF: | |
1947 case COMPONENT_REF: | |
1948 case REALPART_EXPR: | |
1949 case IMAGPART_EXPR: | |
1950 { | |
1951 tree ref; | |
1952 HOST_WIDE_INT offset, size, maxsize; | |
1953 | |
1954 if (TREE_THIS_VOLATILE (expr)) | |
1955 gimple_set_has_volatile_ops (stmt, true); | |
1956 | |
1957 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize); | |
1958 if (TREE_CODE (ref) == INDIRECT_REF) | |
1959 { | |
1960 get_indirect_ref_operands (stmt, ref, flags, expr, offset, | |
1961 maxsize, false); | |
1962 flags |= opf_no_vops; | |
1963 } | |
1964 | |
1965 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
1966 | |
1967 if (code == COMPONENT_REF) | |
1968 { | |
1969 if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) | |
1970 gimple_set_has_volatile_ops (stmt, true); | |
1971 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use); | |
1972 } | |
1973 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) | |
1974 { | |
1975 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use); | |
1976 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use); | |
1977 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use); | |
1978 } | |
1979 | |
1980 return; | |
1981 } | |
1982 | |
1983 case WITH_SIZE_EXPR: | |
1984 /* WITH_SIZE_EXPR is a pass-through reference to its first argument, | |
1985 and an rvalue reference to its second argument. */ | |
1986 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use); | |
1987 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
1988 return; | |
1989 | |
1990 case COND_EXPR: | |
1991 case VEC_COND_EXPR: | |
1992 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use); | |
1993 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use); | |
1994 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use); | |
1995 return; | |
1996 | |
1997 case CONSTRUCTOR: | |
1998 { | |
1999 /* General aggregate CONSTRUCTORs have been decomposed, but they | |
2000 are still in use as the COMPLEX_EXPR equivalent for vectors. */ | |
2001 constructor_elt *ce; | |
2002 unsigned HOST_WIDE_INT idx; | |
2003 | |
2004 for (idx = 0; | |
2005 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce); | |
2006 idx++) | |
2007 get_expr_operands (stmt, &ce->value, opf_use); | |
2008 | |
2009 return; | |
2010 } | |
2011 | |
2012 case BIT_FIELD_REF: | |
2013 if (TREE_THIS_VOLATILE (expr)) | |
2014 gimple_set_has_volatile_ops (stmt, true); | |
2015 /* FALLTHRU */ | |
2016 | |
2017 case TRUTH_NOT_EXPR: | |
2018 case VIEW_CONVERT_EXPR: | |
2019 do_unary: | |
2020 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
2021 return; | |
2022 | |
2023 case TRUTH_AND_EXPR: | |
2024 case TRUTH_OR_EXPR: | |
2025 case TRUTH_XOR_EXPR: | |
2026 case COMPOUND_EXPR: | |
2027 case OBJ_TYPE_REF: | |
2028 case ASSERT_EXPR: | |
2029 do_binary: | |
2030 { | |
2031 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
2032 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
2033 return; | |
2034 } | |
2035 | |
2036 case DOT_PROD_EXPR: | |
2037 case REALIGN_LOAD_EXPR: | |
2038 { | |
2039 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
2040 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
2041 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); | |
2042 return; | |
2043 } | |
2044 | |
2045 case CHANGE_DYNAMIC_TYPE_EXPR: | |
2046 gcc_unreachable (); | |
2047 | |
2048 case FUNCTION_DECL: | |
2049 case LABEL_DECL: | |
2050 case CONST_DECL: | |
2051 case CASE_LABEL_EXPR: | |
2052 case FILTER_EXPR: | |
2053 case EXC_PTR_EXPR: | |
2054 /* Expressions that make no memory references. */ | |
2055 return; | |
2056 | |
2057 default: | |
2058 if (codeclass == tcc_unary) | |
2059 goto do_unary; | |
2060 if (codeclass == tcc_binary || codeclass == tcc_comparison) | |
2061 goto do_binary; | |
2062 if (codeclass == tcc_constant || codeclass == tcc_type) | |
2063 return; | |
2064 } | |
2065 | |
2066 /* If we get here, something has gone wrong. */ | |
2067 #ifdef ENABLE_CHECKING | |
2068 fprintf (stderr, "unhandled expression in get_expr_operands():\n"); | |
2069 debug_tree (expr); | |
2070 fputs ("\n", stderr); | |
2071 #endif | |
2072 gcc_unreachable (); | |
2073 } | |
2074 | |
2075 | |
2076 /* Parse STMT looking for operands. When finished, the various | |
2077 build_* operand vectors will have potential operands in them. */ | |
2078 | |
2079 static void | |
2080 parse_ssa_operands (gimple stmt) | |
2081 { | |
2082 enum gimple_code code = gimple_code (stmt); | |
2083 | |
2084 if (code == GIMPLE_ASM) | |
2085 get_asm_expr_operands (stmt); | |
2086 else | |
2087 { | |
2088 size_t i, start = 0; | |
2089 | |
2090 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) | |
2091 { | |
2092 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def); | |
2093 start = 1; | |
2094 } | |
2095 | |
2096 for (i = start; i < gimple_num_ops (stmt); i++) | |
2097 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use); | |
2098 | |
2099 /* Add call-clobbered operands, if needed. */ | |
2100 if (code == GIMPLE_CALL) | |
2101 maybe_add_call_clobbered_vops (stmt); | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2102 |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2103 /* Make sure the return value is addressable in case of NRV. */ |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2104 if (code == GIMPLE_CALL |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2105 && gimple_call_lhs (stmt) != NULL_TREE |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2106 && gimple_call_return_slot_opt_p (stmt) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2107 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt)))) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2108 gimple_add_to_addresses_taken (stmt, gimple_call_lhs (stmt)); |
0 | 2109 } |
2110 } | |
2111 | |
2112 | |
2113 /* Create an operands cache for STMT. */ | |
2114 | |
2115 static void | |
2116 build_ssa_operands (gimple stmt) | |
2117 { | |
2118 /* Initially assume that the statement has no volatile operands and | |
2119 makes no memory references. */ | |
2120 gimple_set_has_volatile_ops (stmt, false); | |
2121 gimple_set_references_memory (stmt, false); | |
2122 | |
2123 /* Just clear the bitmap so we don't end up reallocating it over and over. */ | |
2124 if (gimple_addresses_taken (stmt)) | |
2125 bitmap_clear (gimple_addresses_taken (stmt)); | |
2126 | |
2127 start_ssa_stmt_operands (); | |
2128 parse_ssa_operands (stmt); | |
2129 operand_build_sort_virtual (build_vuses); | |
2130 operand_build_sort_virtual (build_vdefs); | |
2131 finalize_ssa_stmt_operands (stmt); | |
2132 | |
2133 /* For added safety, assume that statements with volatile operands | |
2134 also reference memory. */ | |
2135 if (gimple_has_volatile_ops (stmt)) | |
2136 gimple_set_references_memory (stmt, true); | |
2137 } | |
2138 | |
2139 | |
2140 /* Releases the operands of STMT back to their freelists, and clears | |
2141 the stmt operand lists. */ | |
2142 | |
2143 void | |
2144 free_stmt_operands (gimple stmt) | |
2145 { | |
2146 def_optype_p defs = gimple_def_ops (stmt), last_def; | |
2147 use_optype_p uses = gimple_use_ops (stmt), last_use; | |
2148 voptype_p vuses = gimple_vuse_ops (stmt); | |
2149 voptype_p vdefs = gimple_vdef_ops (stmt), vdef, next_vdef; | |
2150 unsigned i; | |
2151 | |
2152 if (defs) | |
2153 { | |
2154 for (last_def = defs; last_def->next; last_def = last_def->next) | |
2155 continue; | |
2156 last_def->next = gimple_ssa_operands (cfun)->free_defs; | |
2157 gimple_ssa_operands (cfun)->free_defs = defs; | |
2158 gimple_set_def_ops (stmt, NULL); | |
2159 } | |
2160 | |
2161 if (uses) | |
2162 { | |
2163 for (last_use = uses; last_use->next; last_use = last_use->next) | |
2164 delink_imm_use (USE_OP_PTR (last_use)); | |
2165 delink_imm_use (USE_OP_PTR (last_use)); | |
2166 last_use->next = gimple_ssa_operands (cfun)->free_uses; | |
2167 gimple_ssa_operands (cfun)->free_uses = uses; | |
2168 gimple_set_use_ops (stmt, NULL); | |
2169 } | |
2170 | |
2171 if (vuses) | |
2172 { | |
2173 for (i = 0; i < VUSE_NUM (vuses); i++) | |
2174 delink_imm_use (VUSE_OP_PTR (vuses, i)); | |
2175 add_vop_to_freelist (vuses); | |
2176 gimple_set_vuse_ops (stmt, NULL); | |
2177 } | |
2178 | |
2179 if (vdefs) | |
2180 { | |
2181 for (vdef = vdefs; vdef; vdef = next_vdef) | |
2182 { | |
2183 next_vdef = vdef->next; | |
2184 delink_imm_use (VDEF_OP_PTR (vdef, 0)); | |
2185 add_vop_to_freelist (vdef); | |
2186 } | |
2187 gimple_set_vdef_ops (stmt, NULL); | |
2188 } | |
2189 | |
2190 if (gimple_has_ops (stmt)) | |
2191 gimple_set_addresses_taken (stmt, NULL); | |
2192 | |
2193 if (gimple_has_mem_ops (stmt)) | |
2194 { | |
2195 gimple_set_stored_syms (stmt, NULL, &operands_bitmap_obstack); | |
2196 gimple_set_loaded_syms (stmt, NULL, &operands_bitmap_obstack); | |
2197 } | |
2198 } | |
2199 | |
2200 | |
2201 /* Get the operands of statement STMT. */ | |
2202 | |
2203 void | |
2204 update_stmt_operands (gimple stmt) | |
2205 { | |
2206 /* If update_stmt_operands is called before SSA is initialized, do | |
2207 nothing. */ | |
2208 if (!ssa_operands_active ()) | |
2209 return; | |
2210 | |
2211 timevar_push (TV_TREE_OPS); | |
2212 | |
2213 gcc_assert (gimple_modified_p (stmt)); | |
2214 build_ssa_operands (stmt); | |
2215 gimple_set_modified (stmt, false); | |
2216 | |
2217 timevar_pop (TV_TREE_OPS); | |
2218 } | |
2219 | |
2220 | |
2221 /* Copies virtual operands from SRC to DST. */ | |
2222 | |
2223 void | |
2224 copy_virtual_operands (gimple dest, gimple src) | |
2225 { | |
2226 unsigned int i, n; | |
2227 voptype_p src_vuses, dest_vuses; | |
2228 voptype_p src_vdefs, dest_vdefs; | |
2229 struct voptype_d vuse; | |
2230 struct voptype_d vdef; | |
2231 | |
2232 if (!gimple_has_mem_ops (src)) | |
2233 return; | |
2234 | |
2235 gimple_set_vdef_ops (dest, NULL); | |
2236 gimple_set_vuse_ops (dest, NULL); | |
2237 | |
2238 gimple_set_stored_syms (dest, gimple_stored_syms (src), | |
2239 &operands_bitmap_obstack); | |
2240 gimple_set_loaded_syms (dest, gimple_loaded_syms (src), | |
2241 &operands_bitmap_obstack); | |
2242 | |
2243 /* Copy all the VUSE operators and corresponding operands. */ | |
2244 dest_vuses = &vuse; | |
2245 for (src_vuses = gimple_vuse_ops (src); | |
2246 src_vuses; | |
2247 src_vuses = src_vuses->next) | |
2248 { | |
2249 n = VUSE_NUM (src_vuses); | |
2250 dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses); | |
2251 for (i = 0; i < n; i++) | |
2252 SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i)); | |
2253 | |
2254 if (gimple_vuse_ops (dest) == NULL) | |
2255 gimple_set_vuse_ops (dest, vuse.next); | |
2256 } | |
2257 | |
2258 /* Copy all the VDEF operators and corresponding operands. */ | |
2259 dest_vdefs = &vdef; | |
2260 for (src_vdefs = gimple_vdef_ops (src); | |
2261 src_vdefs; | |
2262 src_vdefs = src_vdefs->next) | |
2263 { | |
2264 n = VUSE_NUM (src_vdefs); | |
2265 dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs); | |
2266 VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs); | |
2267 for (i = 0; i < n; i++) | |
2268 SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i)); | |
2269 | |
2270 if (gimple_vdef_ops (dest) == NULL) | |
2271 gimple_set_vdef_ops (dest, vdef.next); | |
2272 } | |
2273 } | |
2274 | |
2275 | |
2276 /* Specifically for use in DOM's expression analysis. Given a store, we | |
2277 create an artificial stmt which looks like a load from the store, this can | |
2278 be used to eliminate redundant loads. OLD_OPS are the operands from the | |
2279 store stmt, and NEW_STMT is the new load which represents a load of the | |
2280 values stored. If DELINK_IMM_USES_P is specified, the immediate | |
2281 uses of this stmt will be de-linked. */ | |
2282 | |
2283 void | |
2284 create_ssa_artificial_load_stmt (gimple new_stmt, gimple old_stmt, | |
2285 bool delink_imm_uses_p) | |
2286 { | |
2287 tree op; | |
2288 ssa_op_iter iter; | |
2289 use_operand_p use_p; | |
2290 unsigned i; | |
2291 | |
2292 gimple_set_modified (new_stmt, false); | |
2293 | |
2294 /* Process NEW_STMT looking for operands. */ | |
2295 start_ssa_stmt_operands (); | |
2296 parse_ssa_operands (new_stmt); | |
2297 | |
2298 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++) | |
2299 if (TREE_CODE (op) != SSA_NAME) | |
2300 var_ann (op)->in_vuse_list = false; | |
2301 | |
2302 for (i = 0; VEC_iterate (tree, build_vdefs, i, op); i++) | |
2303 if (TREE_CODE (op) != SSA_NAME) | |
2304 var_ann (op)->in_vdef_list = false; | |
2305 | |
2306 /* Remove any virtual operands that were found. */ | |
2307 VEC_truncate (tree, build_vdefs, 0); | |
2308 VEC_truncate (tree, build_vuses, 0); | |
2309 | |
2310 /* Clear the loads and stores bitmaps. */ | |
2311 bitmap_clear (build_loads); | |
2312 bitmap_clear (build_stores); | |
2313 | |
2314 /* For each VDEF on the original statement, we want to create a | |
2315 VUSE of the VDEF result operand on the new statement. */ | |
2316 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF) | |
2317 append_vuse (op); | |
2318 | |
2319 finalize_ssa_stmt_operands (new_stmt); | |
2320 | |
2321 /* All uses in this fake stmt must not be in the immediate use lists. */ | |
2322 if (delink_imm_uses_p) | |
2323 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES) | |
2324 delink_imm_use (use_p); | |
2325 } | |
2326 | |
2327 | |
2328 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done | |
2329 to test the validity of the swap operation. */ | |
2330 | |
2331 void | |
2332 swap_tree_operands (gimple stmt, tree *exp0, tree *exp1) | |
2333 { | |
2334 tree op0, op1; | |
2335 op0 = *exp0; | |
2336 op1 = *exp1; | |
2337 | |
2338 /* If the operand cache is active, attempt to preserve the relative | |
2339 positions of these two operands in their respective immediate use | |
2340 lists. */ | |
2341 if (ssa_operands_active () && op0 != op1) | |
2342 { | |
2343 use_optype_p use0, use1, ptr; | |
2344 use0 = use1 = NULL; | |
2345 | |
2346 /* Find the 2 operands in the cache, if they are there. */ | |
2347 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) | |
2348 if (USE_OP_PTR (ptr)->use == exp0) | |
2349 { | |
2350 use0 = ptr; | |
2351 break; | |
2352 } | |
2353 | |
2354 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) | |
2355 if (USE_OP_PTR (ptr)->use == exp1) | |
2356 { | |
2357 use1 = ptr; | |
2358 break; | |
2359 } | |
2360 | |
2361 /* If both uses don't have operand entries, there isn't much we can do | |
2362 at this point. Presumably we don't need to worry about it. */ | |
2363 if (use0 && use1) | |
2364 { | |
2365 tree *tmp = USE_OP_PTR (use1)->use; | |
2366 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use; | |
2367 USE_OP_PTR (use0)->use = tmp; | |
2368 } | |
2369 } | |
2370 | |
2371 /* Now swap the data. */ | |
2372 *exp0 = op1; | |
2373 *exp1 = op0; | |
2374 } | |
2375 | |
2376 /* Add the base address of REF to SET. */ | |
2377 | |
2378 void | |
2379 add_to_addressable_set (tree ref, bitmap *set) | |
2380 { | |
2381 tree var; | |
2382 | |
2383 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF | |
2384 as the only thing we take the address of. If VAR is a structure, | |
2385 taking the address of a field means that the whole structure may | |
2386 be referenced using pointer arithmetic. See PR 21407 and the | |
2387 ensuing mailing list discussion. */ | |
2388 var = get_base_address (ref); | |
2389 if (var && SSA_VAR_P (var)) | |
2390 { | |
2391 if (*set == NULL) | |
2392 *set = BITMAP_ALLOC (&operands_bitmap_obstack); | |
2393 | |
2394 bitmap_set_bit (*set, DECL_UID (var)); | |
2395 TREE_ADDRESSABLE (var) = 1; | |
2396 } | |
2397 } | |
2398 | |
2399 | |
2400 /* Add the base address of REF to the set of addresses taken by STMT. | |
2401 REF may be a single variable whose address has been taken or any | |
2402 other valid GIMPLE memory reference (structure reference, array, | |
2403 etc). If the base address of REF is a decl that has sub-variables, | |
2404 also add all of its sub-variables. */ | |
2405 | |
2406 void | |
2407 gimple_add_to_addresses_taken (gimple stmt, tree ref) | |
2408 { | |
2409 gcc_assert (gimple_has_ops (stmt)); | |
2410 add_to_addressable_set (ref, gimple_addresses_taken_ptr (stmt)); | |
2411 } | |
2412 | |
2413 | |
2414 /* Scan the immediate_use list for VAR making sure its linked properly. | |
2415 Return TRUE if there is a problem and emit an error message to F. */ | |
2416 | |
2417 bool | |
2418 verify_imm_links (FILE *f, tree var) | |
2419 { | |
2420 use_operand_p ptr, prev, list; | |
2421 int count; | |
2422 | |
2423 gcc_assert (TREE_CODE (var) == SSA_NAME); | |
2424 | |
2425 list = &(SSA_NAME_IMM_USE_NODE (var)); | |
2426 gcc_assert (list->use == NULL); | |
2427 | |
2428 if (list->prev == NULL) | |
2429 { | |
2430 gcc_assert (list->next == NULL); | |
2431 return false; | |
2432 } | |
2433 | |
2434 prev = list; | |
2435 count = 0; | |
2436 for (ptr = list->next; ptr != list; ) | |
2437 { | |
2438 if (prev != ptr->prev) | |
2439 goto error; | |
2440 | |
2441 if (ptr->use == NULL) | |
2442 goto error; /* 2 roots, or SAFE guard node. */ | |
2443 else if (*(ptr->use) != var) | |
2444 goto error; | |
2445 | |
2446 prev = ptr; | |
2447 ptr = ptr->next; | |
2448 | |
2449 /* Avoid infinite loops. 50,000,000 uses probably indicates a | |
2450 problem. */ | |
2451 if (count++ > 50000000) | |
2452 goto error; | |
2453 } | |
2454 | |
2455 /* Verify list in the other direction. */ | |
2456 prev = list; | |
2457 for (ptr = list->prev; ptr != list; ) | |
2458 { | |
2459 if (prev != ptr->next) | |
2460 goto error; | |
2461 prev = ptr; | |
2462 ptr = ptr->prev; | |
2463 if (count-- < 0) | |
2464 goto error; | |
2465 } | |
2466 | |
2467 if (count != 0) | |
2468 goto error; | |
2469 | |
2470 return false; | |
2471 | |
2472 error: | |
2473 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) | |
2474 { | |
2475 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); | |
2476 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); | |
2477 } | |
2478 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, | |
2479 (void *)ptr->use); | |
2480 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); | |
2481 fprintf(f, "\n"); | |
2482 return true; | |
2483 } | |
2484 | |
2485 | |
2486 /* Dump all the immediate uses to FILE. */ | |
2487 | |
2488 void | |
2489 dump_immediate_uses_for (FILE *file, tree var) | |
2490 { | |
2491 imm_use_iterator iter; | |
2492 use_operand_p use_p; | |
2493 | |
2494 gcc_assert (var && TREE_CODE (var) == SSA_NAME); | |
2495 | |
2496 print_generic_expr (file, var, TDF_SLIM); | |
2497 fprintf (file, " : -->"); | |
2498 if (has_zero_uses (var)) | |
2499 fprintf (file, " no uses.\n"); | |
2500 else | |
2501 if (has_single_use (var)) | |
2502 fprintf (file, " single use.\n"); | |
2503 else | |
2504 fprintf (file, "%d uses.\n", num_imm_uses (var)); | |
2505 | |
2506 FOR_EACH_IMM_USE_FAST (use_p, iter, var) | |
2507 { | |
2508 if (use_p->loc.stmt == NULL && use_p->use == NULL) | |
2509 fprintf (file, "***end of stmt iterator marker***\n"); | |
2510 else | |
2511 if (!is_gimple_reg (USE_FROM_PTR (use_p))) | |
2512 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); | |
2513 else | |
2514 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); | |
2515 } | |
2516 fprintf(file, "\n"); | |
2517 } | |
2518 | |
2519 | |
2520 /* Dump all the immediate uses to FILE. */ | |
2521 | |
2522 void | |
2523 dump_immediate_uses (FILE *file) | |
2524 { | |
2525 tree var; | |
2526 unsigned int x; | |
2527 | |
2528 fprintf (file, "Immediate_uses: \n\n"); | |
2529 for (x = 1; x < num_ssa_names; x++) | |
2530 { | |
2531 var = ssa_name(x); | |
2532 if (!var) | |
2533 continue; | |
2534 dump_immediate_uses_for (file, var); | |
2535 } | |
2536 } | |
2537 | |
2538 | |
2539 /* Dump def-use edges on stderr. */ | |
2540 | |
2541 void | |
2542 debug_immediate_uses (void) | |
2543 { | |
2544 dump_immediate_uses (stderr); | |
2545 } | |
2546 | |
2547 | |
2548 /* Dump def-use edges on stderr. */ | |
2549 | |
2550 void | |
2551 debug_immediate_uses_for (tree var) | |
2552 { | |
2553 dump_immediate_uses_for (stderr, var); | |
2554 } | |
2555 | |
2556 | |
2557 /* Create a new change buffer for the statement pointed by STMT_P and | |
2558 push the buffer into SCB_STACK. Each change buffer | |
2559 records state information needed to determine what changed in the | |
2560 statement. Mainly, this keeps track of symbols that may need to be | |
2561 put into SSA form, SSA name replacements and other information | |
2562 needed to keep the SSA form up to date. */ | |
2563 | |
2564 void | |
2565 push_stmt_changes (gimple *stmt_p) | |
2566 { | |
2567 gimple stmt; | |
2568 scb_t buf; | |
2569 | |
2570 stmt = *stmt_p; | |
2571 | |
2572 /* It makes no sense to keep track of PHI nodes. */ | |
2573 if (gimple_code (stmt) == GIMPLE_PHI) | |
2574 return; | |
2575 | |
2576 buf = XNEW (struct scb_d); | |
2577 memset (buf, 0, sizeof *buf); | |
2578 | |
2579 buf->stmt_p = stmt_p; | |
2580 | |
2581 if (gimple_references_memory_p (stmt)) | |
2582 { | |
2583 tree op; | |
2584 ssa_op_iter i; | |
2585 | |
2586 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE) | |
2587 { | |
2588 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op; | |
2589 if (buf->loads == NULL) | |
2590 buf->loads = BITMAP_ALLOC (NULL); | |
2591 bitmap_set_bit (buf->loads, DECL_UID (sym)); | |
2592 } | |
2593 | |
2594 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF) | |
2595 { | |
2596 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op; | |
2597 if (buf->stores == NULL) | |
2598 buf->stores = BITMAP_ALLOC (NULL); | |
2599 bitmap_set_bit (buf->stores, DECL_UID (sym)); | |
2600 } | |
2601 } | |
2602 | |
2603 VEC_safe_push (scb_t, heap, scb_stack, buf); | |
2604 } | |
2605 | |
2606 | |
2607 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2 | |
2608 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */ | |
2609 | |
2610 static void | |
2611 mark_difference_for_renaming (bitmap s1, bitmap s2) | |
2612 { | |
2613 if (s1 == NULL && s2 == NULL) | |
2614 return; | |
2615 | |
2616 if (s1 && s2 == NULL) | |
2617 mark_set_for_renaming (s1); | |
2618 else if (s1 == NULL && s2) | |
2619 mark_set_for_renaming (s2); | |
2620 else if (!bitmap_equal_p (s1, s2)) | |
2621 { | |
2622 bitmap t1 = BITMAP_ALLOC (NULL); | |
2623 bitmap_xor (t1, s1, s2); | |
2624 mark_set_for_renaming (t1); | |
2625 BITMAP_FREE (t1); | |
2626 } | |
2627 } | |
2628 | |
2629 | |
2630 /* Pop the top SCB from SCB_STACK and act on the differences between | |
2631 what was recorded by push_stmt_changes and the current state of | |
2632 the statement. */ | |
2633 | |
2634 void | |
2635 pop_stmt_changes (gimple *stmt_p) | |
2636 { | |
2637 tree op; | |
2638 gimple stmt; | |
2639 ssa_op_iter iter; | |
2640 bitmap loads, stores; | |
2641 scb_t buf; | |
2642 | |
2643 stmt = *stmt_p; | |
2644 | |
2645 /* It makes no sense to keep track of PHI nodes. */ | |
2646 if (gimple_code (stmt) == GIMPLE_PHI) | |
2647 return; | |
2648 | |
2649 buf = VEC_pop (scb_t, scb_stack); | |
2650 gcc_assert (stmt_p == buf->stmt_p); | |
2651 | |
2652 /* Force an operand re-scan on the statement and mark any newly | |
2653 exposed variables. */ | |
2654 update_stmt (stmt); | |
2655 | |
2656 /* Determine whether any memory symbols need to be renamed. If the | |
2657 sets of loads and stores are different after the statement is | |
2658 modified, then the affected symbols need to be renamed. | |
2659 | |
2660 Note that it may be possible for the statement to not reference | |
2661 memory anymore, but we still need to act on the differences in | |
2662 the sets of symbols. */ | |
2663 loads = stores = NULL; | |
2664 if (gimple_references_memory_p (stmt)) | |
2665 { | |
2666 tree op; | |
2667 ssa_op_iter i; | |
2668 | |
2669 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE) | |
2670 { | |
2671 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op; | |
2672 if (loads == NULL) | |
2673 loads = BITMAP_ALLOC (NULL); | |
2674 bitmap_set_bit (loads, DECL_UID (sym)); | |
2675 } | |
2676 | |
2677 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF) | |
2678 { | |
2679 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op; | |
2680 if (stores == NULL) | |
2681 stores = BITMAP_ALLOC (NULL); | |
2682 bitmap_set_bit (stores, DECL_UID (sym)); | |
2683 } | |
2684 } | |
2685 | |
2686 /* If LOADS is different from BUF->LOADS, the affected | |
2687 symbols need to be marked for renaming. */ | |
2688 mark_difference_for_renaming (loads, buf->loads); | |
2689 | |
2690 /* Similarly for STORES and BUF->STORES. */ | |
2691 mark_difference_for_renaming (stores, buf->stores); | |
2692 | |
2693 /* Mark all the naked GIMPLE register operands for renaming. */ | |
2694 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE) | |
2695 if (DECL_P (op)) | |
2696 mark_sym_for_renaming (op); | |
2697 | |
2698 /* FIXME, need to add more finalizers here. Cleanup EH info, | |
2699 recompute invariants for address expressions, add | |
2700 SSA replacement mappings, etc. For instance, given | |
2701 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of | |
2702 the form: | |
2703 | |
2704 # SMT.4_20 = VDEF <SMT.4_16> | |
2705 D.1576_11 = 1.0e+0; | |
2706 | |
2707 So, the VDEF will disappear, but instead of marking SMT.4 for | |
2708 renaming it would be far more efficient to establish a | |
2709 replacement mapping that would replace every reference of | |
2710 SMT.4_20 with SMT.4_16. */ | |
2711 | |
2712 /* Free memory used by the buffer. */ | |
2713 BITMAP_FREE (buf->loads); | |
2714 BITMAP_FREE (buf->stores); | |
2715 BITMAP_FREE (loads); | |
2716 BITMAP_FREE (stores); | |
2717 buf->stmt_p = NULL; | |
2718 free (buf); | |
2719 } | |
2720 | |
2721 | |
2722 /* Discard the topmost change buffer from SCB_STACK. This is useful | |
2723 when the caller realized that it did not actually modified the | |
2724 statement. It avoids the expensive operand re-scan. */ | |
2725 | |
2726 void | |
2727 discard_stmt_changes (gimple *stmt_p) | |
2728 { | |
2729 scb_t buf; | |
2730 gimple stmt; | |
2731 | |
2732 /* It makes no sense to keep track of PHI nodes. */ | |
2733 stmt = *stmt_p; | |
2734 if (gimple_code (stmt) == GIMPLE_PHI) | |
2735 return; | |
2736 | |
2737 buf = VEC_pop (scb_t, scb_stack); | |
2738 gcc_assert (stmt_p == buf->stmt_p); | |
2739 | |
2740 /* Free memory used by the buffer. */ | |
2741 BITMAP_FREE (buf->loads); | |
2742 BITMAP_FREE (buf->stores); | |
2743 buf->stmt_p = NULL; | |
2744 free (buf); | |
2745 } |