Mercurial > hg > CbC > GCC_original
comparison gcc/tree-dfa.c @ 16:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | f6334be47118 |
children | 84e7813d76e9 |
comparison
equal
deleted
inserted
replaced
15:561a7518be6b | 16:04ced10e8804 |
---|---|
1 /* Data flow functions for trees. | 1 /* Data flow functions for trees. |
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010 | 2 Copyright (C) 2001-2017 Free Software Foundation, Inc. |
3 Free Software Foundation, Inc. | |
4 Contributed by Diego Novillo <dnovillo@redhat.com> | 3 Contributed by Diego Novillo <dnovillo@redhat.com> |
5 | 4 |
6 This file is part of GCC. | 5 This file is part of GCC. |
7 | 6 |
8 GCC is free software; you can redistribute it and/or modify | 7 GCC is free software; you can redistribute it and/or modify |
20 <http://www.gnu.org/licenses/>. */ | 19 <http://www.gnu.org/licenses/>. */ |
21 | 20 |
22 #include "config.h" | 21 #include "config.h" |
23 #include "system.h" | 22 #include "system.h" |
24 #include "coretypes.h" | 23 #include "coretypes.h" |
25 #include "tm.h" | 24 #include "backend.h" |
26 #include "hashtab.h" | 25 #include "rtl.h" |
27 #include "pointer-set.h" | |
28 #include "tree.h" | 26 #include "tree.h" |
29 #include "tm_p.h" | 27 #include "gimple.h" |
30 #include "basic-block.h" | 28 #include "tree-pass.h" |
31 #include "output.h" | 29 #include "ssa.h" |
32 #include "timevar.h" | 30 #include "tree-pretty-print.h" |
33 #include "ggc.h" | 31 #include "fold-const.h" |
32 #include "stor-layout.h" | |
34 #include "langhooks.h" | 33 #include "langhooks.h" |
35 #include "flags.h" | 34 #include "gimple-iterator.h" |
36 #include "function.h" | 35 #include "gimple-walk.h" |
37 #include "tree-pretty-print.h" | 36 #include "tree-dfa.h" |
38 #include "tree-dump.h" | |
39 #include "gimple.h" | |
40 #include "tree-flow.h" | |
41 #include "tree-inline.h" | |
42 #include "tree-pass.h" | |
43 #include "convert.h" | |
44 #include "params.h" | |
45 #include "cgraph.h" | |
46 | 37 |
47 /* Build and maintain data flow information for trees. */ | 38 /* Build and maintain data flow information for trees. */ |
48 | 39 |
49 /* Counters used to display DFA and SSA statistics. */ | 40 /* Counters used to display DFA and SSA statistics. */ |
50 struct dfa_stats_d | 41 struct dfa_stats_d |
51 { | 42 { |
52 long num_var_anns; | |
53 long num_defs; | 43 long num_defs; |
54 long num_uses; | 44 long num_uses; |
55 long num_phis; | 45 long num_phis; |
56 long num_phi_args; | 46 long num_phi_args; |
57 size_t max_num_phi_args; | 47 size_t max_num_phi_args; |
60 }; | 50 }; |
61 | 51 |
62 | 52 |
63 /* Local functions. */ | 53 /* Local functions. */ |
64 static void collect_dfa_stats (struct dfa_stats_d *); | 54 static void collect_dfa_stats (struct dfa_stats_d *); |
65 static tree find_vars_r (tree *, int *, void *); | |
66 | 55 |
67 | 56 |
68 /*--------------------------------------------------------------------------- | 57 /*--------------------------------------------------------------------------- |
69 Dataflow analysis (DFA) routines | 58 Dataflow analysis (DFA) routines |
70 ---------------------------------------------------------------------------*/ | 59 ---------------------------------------------------------------------------*/ |
71 /* Find all the variables referenced in the function. This function | |
72 builds the global arrays REFERENCED_VARS and CALL_CLOBBERED_VARS. | |
73 | |
74 Note that this function does not look for statement operands, it simply | |
75 determines what variables are referenced in the program and detects | |
76 various attributes for each variable used by alias analysis and the | |
77 optimizer. */ | |
78 | |
79 static unsigned int | |
80 find_referenced_vars (void) | |
81 { | |
82 basic_block bb; | |
83 gimple_stmt_iterator si; | |
84 | |
85 FOR_EACH_BB (bb) | |
86 { | |
87 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) | |
88 { | |
89 gimple stmt = gsi_stmt (si); | |
90 if (is_gimple_debug (stmt)) | |
91 continue; | |
92 find_referenced_vars_in (gsi_stmt (si)); | |
93 } | |
94 | |
95 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) | |
96 find_referenced_vars_in (gsi_stmt (si)); | |
97 } | |
98 | |
99 return 0; | |
100 } | |
101 | |
102 struct gimple_opt_pass pass_referenced_vars = | |
103 { | |
104 { | |
105 GIMPLE_PASS, | |
106 "*referenced_vars", /* name */ | |
107 NULL, /* gate */ | |
108 find_referenced_vars, /* execute */ | |
109 NULL, /* sub */ | |
110 NULL, /* next */ | |
111 0, /* static_pass_number */ | |
112 TV_FIND_REFERENCED_VARS, /* tv_id */ | |
113 PROP_gimple_leh | PROP_cfg, /* properties_required */ | |
114 PROP_referenced_vars, /* properties_provided */ | |
115 0, /* properties_destroyed */ | |
116 TODO_dump_func, /* todo_flags_start */ | |
117 TODO_dump_func /* todo_flags_finish */ | |
118 } | |
119 }; | |
120 | |
121 | |
122 /*--------------------------------------------------------------------------- | |
123 Manage annotations | |
124 ---------------------------------------------------------------------------*/ | |
125 /* Create a new annotation for a _DECL node T. */ | |
126 | |
127 var_ann_t | |
128 create_var_ann (tree t) | |
129 { | |
130 var_ann_t ann; | |
131 | |
132 gcc_assert (t); | |
133 gcc_assert (TREE_CODE (t) == VAR_DECL | |
134 || TREE_CODE (t) == PARM_DECL | |
135 || TREE_CODE (t) == RESULT_DECL); | |
136 | |
137 ann = ggc_alloc_cleared_var_ann_d (); | |
138 *DECL_VAR_ANN_PTR (t) = ann; | |
139 | |
140 return ann; | |
141 } | |
142 | 60 |
143 /* Renumber all of the gimple stmt uids. */ | 61 /* Renumber all of the gimple stmt uids. */ |
144 | 62 |
145 void | 63 void |
146 renumber_gimple_stmt_uids (void) | 64 renumber_gimple_stmt_uids (void) |
147 { | 65 { |
148 basic_block bb; | 66 basic_block bb; |
149 | 67 |
150 set_gimple_stmt_max_uid (cfun, 0); | 68 set_gimple_stmt_max_uid (cfun, 0); |
151 FOR_ALL_BB (bb) | 69 FOR_ALL_BB_FN (bb, cfun) |
152 { | 70 { |
153 gimple_stmt_iterator bsi; | 71 gimple_stmt_iterator bsi; |
72 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi)) | |
73 { | |
74 gimple *stmt = gsi_stmt (bsi); | |
75 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); | |
76 } | |
154 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) | 77 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
155 { | 78 { |
156 gimple stmt = gsi_stmt (bsi); | 79 gimple *stmt = gsi_stmt (bsi); |
157 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); | 80 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); |
158 } | 81 } |
159 } | 82 } |
160 } | 83 } |
161 | 84 |
172 { | 95 { |
173 basic_block bb = blocks[i]; | 96 basic_block bb = blocks[i]; |
174 gimple_stmt_iterator bsi; | 97 gimple_stmt_iterator bsi; |
175 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi)) | 98 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
176 { | 99 { |
177 gimple stmt = gsi_stmt (bsi); | 100 gimple *stmt = gsi_stmt (bsi); |
178 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); | 101 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); |
179 } | 102 } |
180 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) | 103 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
181 { | 104 { |
182 gimple stmt = gsi_stmt (bsi); | 105 gimple *stmt = gsi_stmt (bsi); |
183 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); | 106 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); |
184 } | 107 } |
185 } | 108 } |
186 } | |
187 | |
188 /* Build a temporary. Make sure and register it to be renamed. */ | |
189 | |
190 tree | |
191 make_rename_temp (tree type, const char *prefix) | |
192 { | |
193 tree t = create_tmp_reg (type, prefix); | |
194 | |
195 if (gimple_referenced_vars (cfun)) | |
196 { | |
197 add_referenced_var (t); | |
198 mark_sym_for_renaming (t); | |
199 } | |
200 | |
201 return t; | |
202 } | 109 } |
203 | 110 |
204 | 111 |
205 | 112 |
206 /*--------------------------------------------------------------------------- | 113 /*--------------------------------------------------------------------------- |
207 Debugging functions | 114 Debugging functions |
208 ---------------------------------------------------------------------------*/ | 115 ---------------------------------------------------------------------------*/ |
209 /* Dump the list of all the referenced variables in the current function to | |
210 FILE. */ | |
211 | |
212 void | |
213 dump_referenced_vars (FILE *file) | |
214 { | |
215 tree var; | |
216 referenced_var_iterator rvi; | |
217 | |
218 fprintf (file, "\nReferenced variables in %s: %u\n\n", | |
219 get_name (current_function_decl), (unsigned) num_referenced_vars); | |
220 | |
221 FOR_EACH_REFERENCED_VAR (cfun, var, rvi) | |
222 { | |
223 fprintf (file, "Variable: "); | |
224 dump_variable (file, var); | |
225 } | |
226 | |
227 fprintf (file, "\n"); | |
228 } | |
229 | |
230 | |
231 /* Dump the list of all the referenced variables to stderr. */ | |
232 | |
233 DEBUG_FUNCTION void | |
234 debug_referenced_vars (void) | |
235 { | |
236 dump_referenced_vars (stderr); | |
237 } | |
238 | |
239 | 116 |
240 /* Dump variable VAR and its may-aliases to FILE. */ | 117 /* Dump variable VAR and its may-aliases to FILE. */ |
241 | 118 |
242 void | 119 void |
243 dump_variable (FILE *file, tree var) | 120 dump_variable (FILE *file, tree var) |
271 fprintf (file, ", is global"); | 148 fprintf (file, ", is global"); |
272 | 149 |
273 if (TREE_THIS_VOLATILE (var)) | 150 if (TREE_THIS_VOLATILE (var)) |
274 fprintf (file, ", is volatile"); | 151 fprintf (file, ", is volatile"); |
275 | 152 |
276 if (cfun && gimple_default_def (cfun, var)) | 153 if (cfun && ssa_default_def (cfun, var)) |
277 { | 154 { |
278 fprintf (file, ", default def: "); | 155 fprintf (file, ", default def: "); |
279 print_generic_expr (file, gimple_default_def (cfun, var), dump_flags); | 156 print_generic_expr (file, ssa_default_def (cfun, var), dump_flags); |
280 } | 157 } |
281 | 158 |
282 if (DECL_INITIAL (var)) | 159 if (DECL_INITIAL (var)) |
283 { | 160 { |
284 fprintf (file, ", initial: "); | 161 fprintf (file, ", initial: "); |
319 fprintf (file, "---------------------------------------------------------\n"); | 196 fprintf (file, "---------------------------------------------------------\n"); |
320 fprintf (file, fmt_str, "", " Number of ", "Memory"); | 197 fprintf (file, fmt_str, "", " Number of ", "Memory"); |
321 fprintf (file, fmt_str, "", " instances ", "used "); | 198 fprintf (file, fmt_str, "", " instances ", "used "); |
322 fprintf (file, "---------------------------------------------------------\n"); | 199 fprintf (file, "---------------------------------------------------------\n"); |
323 | 200 |
324 size = num_referenced_vars * sizeof (tree); | |
325 total += size; | |
326 fprintf (file, fmt_str_1, "Referenced variables", (unsigned long)num_referenced_vars, | |
327 SCALE (size), LABEL (size)); | |
328 | |
329 size = dfa_stats.num_var_anns * sizeof (struct var_ann_d); | |
330 total += size; | |
331 fprintf (file, fmt_str_1, "Variables annotated", dfa_stats.num_var_anns, | |
332 SCALE (size), LABEL (size)); | |
333 | |
334 size = dfa_stats.num_uses * sizeof (tree *); | 201 size = dfa_stats.num_uses * sizeof (tree *); |
335 total += size; | 202 total += size; |
336 fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses, | 203 fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses, |
337 SCALE (size), LABEL (size)); | 204 SCALE (size), LABEL (size)); |
338 | 205 |
349 size = dfa_stats.num_vdefs * sizeof (tree *); | 216 size = dfa_stats.num_vdefs * sizeof (tree *); |
350 total += size; | 217 total += size; |
351 fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs, | 218 fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs, |
352 SCALE (size), LABEL (size)); | 219 SCALE (size), LABEL (size)); |
353 | 220 |
354 size = dfa_stats.num_phis * sizeof (struct gimple_statement_phi); | 221 size = dfa_stats.num_phis * sizeof (struct gphi); |
355 total += size; | 222 total += size; |
356 fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis, | 223 fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis, |
357 SCALE (size), LABEL (size)); | 224 SCALE (size), LABEL (size)); |
358 | 225 |
359 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d); | 226 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d); |
390 | 257 |
391 static void | 258 static void |
392 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED) | 259 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED) |
393 { | 260 { |
394 basic_block bb; | 261 basic_block bb; |
395 referenced_var_iterator vi; | |
396 tree var; | |
397 | 262 |
398 gcc_assert (dfa_stats_p); | 263 gcc_assert (dfa_stats_p); |
399 | 264 |
400 memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d)); | 265 memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d)); |
401 | 266 |
402 /* Count all the variable annotations. */ | |
403 FOR_EACH_REFERENCED_VAR (cfun, var, vi) | |
404 if (var_ann (var)) | |
405 dfa_stats_p->num_var_anns++; | |
406 | |
407 /* Walk all the statements in the function counting references. */ | 267 /* Walk all the statements in the function counting references. */ |
408 FOR_EACH_BB (bb) | 268 FOR_EACH_BB_FN (bb, cfun) |
409 { | 269 { |
410 gimple_stmt_iterator si; | 270 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si); |
411 | 271 gsi_next (&si)) |
412 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) | 272 { |
413 { | 273 gphi *phi = si.phi (); |
414 gimple phi = gsi_stmt (si); | |
415 dfa_stats_p->num_phis++; | 274 dfa_stats_p->num_phis++; |
416 dfa_stats_p->num_phi_args += gimple_phi_num_args (phi); | 275 dfa_stats_p->num_phi_args += gimple_phi_num_args (phi); |
417 if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args) | 276 if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args) |
418 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi); | 277 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi); |
419 } | 278 } |
420 | 279 |
421 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) | 280 for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si); |
422 { | 281 gsi_next (&si)) |
423 gimple stmt = gsi_stmt (si); | 282 { |
283 gimple *stmt = gsi_stmt (si); | |
424 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF); | 284 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF); |
425 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE); | 285 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE); |
426 dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0; | 286 dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0; |
427 dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0; | 287 dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0; |
428 } | 288 } |
431 | 291 |
432 | 292 |
433 /*--------------------------------------------------------------------------- | 293 /*--------------------------------------------------------------------------- |
434 Miscellaneous helpers | 294 Miscellaneous helpers |
435 ---------------------------------------------------------------------------*/ | 295 ---------------------------------------------------------------------------*/ |
436 /* Callback for walk_tree. Used to collect variables referenced in | |
437 the function. */ | |
438 | |
439 static tree | |
440 find_vars_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) | |
441 { | |
442 /* If we are reading the lto info back in, we need to rescan the | |
443 referenced vars. */ | |
444 if (TREE_CODE (*tp) == SSA_NAME) | |
445 add_referenced_var (SSA_NAME_VAR (*tp)); | |
446 | |
447 /* If T is a regular variable that the optimizers are interested | |
448 in, add it to the list of variables. */ | |
449 else if (SSA_VAR_P (*tp)) | |
450 add_referenced_var (*tp); | |
451 | |
452 /* Type, _DECL and constant nodes have no interesting children. | |
453 Ignore them. */ | |
454 else if (IS_TYPE_OR_DECL_P (*tp) || CONSTANT_CLASS_P (*tp)) | |
455 *walk_subtrees = 0; | |
456 | |
457 return NULL_TREE; | |
458 } | |
459 | |
460 /* Find referenced variables in STMT. In contrast with | |
461 find_new_referenced_vars, this function will not mark newly found | |
462 variables for renaming. */ | |
463 | |
464 void | |
465 find_referenced_vars_in (gimple stmt) | |
466 { | |
467 size_t i; | |
468 | |
469 if (gimple_code (stmt) != GIMPLE_PHI) | |
470 { | |
471 for (i = 0; i < gimple_num_ops (stmt); i++) | |
472 walk_tree (gimple_op_ptr (stmt, i), find_vars_r, NULL, NULL); | |
473 } | |
474 else | |
475 { | |
476 walk_tree (gimple_phi_result_ptr (stmt), find_vars_r, NULL, NULL); | |
477 | |
478 for (i = 0; i < gimple_phi_num_args (stmt); i++) | |
479 { | |
480 tree arg = gimple_phi_arg_def (stmt, i); | |
481 walk_tree (&arg, find_vars_r, NULL, NULL); | |
482 } | |
483 } | |
484 } | |
485 | |
486 | |
487 /* Lookup UID in the referenced_vars hashtable and return the associated | |
488 variable. */ | |
489 | |
490 tree | |
491 referenced_var_lookup (struct function *fn, unsigned int uid) | |
492 { | |
493 tree h; | |
494 struct tree_decl_minimal in; | |
495 in.uid = uid; | |
496 h = (tree) htab_find_with_hash (gimple_referenced_vars (fn), &in, uid); | |
497 return h; | |
498 } | |
499 | |
500 /* Check if TO is in the referenced_vars hash table and insert it if not. | |
501 Return true if it required insertion. */ | |
502 | |
503 bool | |
504 referenced_var_check_and_insert (tree to) | |
505 { | |
506 tree h, *loc; | |
507 struct tree_decl_minimal in; | |
508 unsigned int uid = DECL_UID (to); | |
509 | |
510 in.uid = uid; | |
511 h = (tree) htab_find_with_hash (gimple_referenced_vars (cfun), &in, uid); | |
512 if (h) | |
513 { | |
514 /* DECL_UID has already been entered in the table. Verify that it is | |
515 the same entry as TO. See PR 27793. */ | |
516 gcc_assert (h == to); | |
517 return false; | |
518 } | |
519 | |
520 loc = (tree *) htab_find_slot_with_hash (gimple_referenced_vars (cfun), | |
521 &in, uid, INSERT); | |
522 *loc = to; | |
523 return true; | |
524 } | |
525 | 296 |
526 /* Lookup VAR UID in the default_defs hashtable and return the associated | 297 /* Lookup VAR UID in the default_defs hashtable and return the associated |
527 variable. */ | 298 variable. */ |
528 | 299 |
529 tree | 300 tree |
530 gimple_default_def (struct function *fn, tree var) | 301 ssa_default_def (struct function *fn, tree var) |
531 { | 302 { |
532 struct tree_decl_minimal ind; | 303 struct tree_decl_minimal ind; |
533 struct tree_ssa_name in; | 304 struct tree_ssa_name in; |
534 gcc_assert (SSA_VAR_P (var)); | 305 gcc_assert (VAR_P (var) |
306 || TREE_CODE (var) == PARM_DECL | |
307 || TREE_CODE (var) == RESULT_DECL); | |
308 | |
309 /* Always NULL_TREE for rtl function dumps. */ | |
310 if (!fn->gimple_df) | |
311 return NULL_TREE; | |
312 | |
535 in.var = (tree)&ind; | 313 in.var = (tree)&ind; |
536 ind.uid = DECL_UID (var); | 314 ind.uid = DECL_UID (var); |
537 return (tree) htab_find_with_hash (DEFAULT_DEFS (fn), &in, DECL_UID (var)); | 315 return DEFAULT_DEFS (fn)->find_with_hash ((tree)&in, DECL_UID (var)); |
538 } | 316 } |
539 | 317 |
540 /* Insert the pair VAR's UID, DEF into the default_defs hashtable. */ | 318 /* Insert the pair VAR's UID, DEF into the default_defs hashtable |
319 of function FN. */ | |
541 | 320 |
542 void | 321 void |
543 set_default_def (tree var, tree def) | 322 set_ssa_default_def (struct function *fn, tree var, tree def) |
544 { | 323 { |
545 struct tree_decl_minimal ind; | 324 struct tree_decl_minimal ind; |
546 struct tree_ssa_name in; | 325 struct tree_ssa_name in; |
547 void **loc; | 326 |
548 | 327 gcc_assert (VAR_P (var) |
549 gcc_assert (SSA_VAR_P (var)); | 328 || TREE_CODE (var) == PARM_DECL |
329 || TREE_CODE (var) == RESULT_DECL); | |
550 in.var = (tree)&ind; | 330 in.var = (tree)&ind; |
551 ind.uid = DECL_UID (var); | 331 ind.uid = DECL_UID (var); |
552 if (!def) | 332 if (!def) |
553 { | 333 { |
554 loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in, | 334 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in, |
555 DECL_UID (var), INSERT); | 335 DECL_UID (var), |
556 gcc_assert (*loc); | 336 NO_INSERT); |
557 htab_remove_elt (DEFAULT_DEFS (cfun), *loc); | 337 if (loc) |
338 { | |
339 SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false; | |
340 DEFAULT_DEFS (fn)->clear_slot (loc); | |
341 } | |
558 return; | 342 return; |
559 } | 343 } |
560 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var); | 344 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var); |
561 loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in, | 345 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in, |
562 DECL_UID (var), INSERT); | 346 DECL_UID (var), INSERT); |
563 | 347 |
564 /* Default definition might be changed by tail call optimization. */ | 348 /* Default definition might be changed by tail call optimization. */ |
565 if (*loc) | 349 if (*loc) |
566 SSA_NAME_IS_DEFAULT_DEF (*(tree *) loc) = false; | 350 SSA_NAME_IS_DEFAULT_DEF (*loc) = false; |
567 *(tree *) loc = def; | |
568 | 351 |
569 /* Mark DEF as the default definition for VAR. */ | 352 /* Mark DEF as the default definition for VAR. */ |
570 SSA_NAME_IS_DEFAULT_DEF (def) = true; | 353 *loc = def; |
571 } | 354 SSA_NAME_IS_DEFAULT_DEF (def) = true; |
572 | 355 } |
573 /* Add VAR to the list of referenced variables if it isn't already there. */ | 356 |
574 | 357 /* Retrieve or create a default definition for VAR. */ |
575 bool | |
576 add_referenced_var (tree var) | |
577 { | |
578 get_var_ann (var); | |
579 gcc_assert (DECL_P (var)); | |
580 | |
581 /* Insert VAR into the referenced_vars has table if it isn't present. */ | |
582 if (referenced_var_check_and_insert (var)) | |
583 { | |
584 /* Scan DECL_INITIAL for pointer variables as they may contain | |
585 address arithmetic referencing the address of other | |
586 variables. As we are only interested in directly referenced | |
587 globals or referenced locals restrict this to initializers | |
588 than can refer to local variables. */ | |
589 if (DECL_INITIAL (var) | |
590 && DECL_CONTEXT (var) == current_function_decl) | |
591 walk_tree (&DECL_INITIAL (var), find_vars_r, NULL, 0); | |
592 | |
593 return true; | |
594 } | |
595 | |
596 return false; | |
597 } | |
598 | |
599 /* Remove VAR from the list. */ | |
600 | |
601 void | |
602 remove_referenced_var (tree var) | |
603 { | |
604 var_ann_t v_ann; | |
605 struct tree_decl_minimal in; | |
606 void **loc; | |
607 unsigned int uid = DECL_UID (var); | |
608 | |
609 /* Preserve var_anns of globals. */ | |
610 if (!is_global_var (var) | |
611 && (v_ann = var_ann (var))) | |
612 { | |
613 ggc_free (v_ann); | |
614 *DECL_VAR_ANN_PTR (var) = NULL; | |
615 } | |
616 gcc_assert (DECL_P (var)); | |
617 in.uid = uid; | |
618 loc = htab_find_slot_with_hash (gimple_referenced_vars (cfun), &in, uid, | |
619 NO_INSERT); | |
620 htab_clear_slot (gimple_referenced_vars (cfun), loc); | |
621 } | |
622 | |
623 | |
624 /* Return the virtual variable associated to the non-scalar variable VAR. */ | |
625 | 358 |
626 tree | 359 tree |
627 get_virtual_var (tree var) | 360 get_or_create_ssa_default_def (struct function *fn, tree var) |
628 { | 361 { |
629 STRIP_NOPS (var); | 362 tree ddef = ssa_default_def (fn, var); |
630 | 363 if (ddef == NULL_TREE) |
631 if (TREE_CODE (var) == SSA_NAME) | 364 { |
632 var = SSA_NAME_VAR (var); | 365 ddef = make_ssa_name_fn (fn, var, gimple_build_nop ()); |
633 | 366 set_ssa_default_def (fn, var, ddef); |
634 while (TREE_CODE (var) == REALPART_EXPR || TREE_CODE (var) == IMAGPART_EXPR | 367 } |
635 || handled_component_p (var)) | 368 return ddef; |
636 var = TREE_OPERAND (var, 0); | |
637 | |
638 /* Treating GIMPLE registers as virtual variables makes no sense. | |
639 Also complain if we couldn't extract a _DECL out of the original | |
640 expression. */ | |
641 gcc_assert (SSA_VAR_P (var)); | |
642 gcc_assert (!is_gimple_reg (var)); | |
643 | |
644 return var; | |
645 } | |
646 | |
647 /* Mark all the naked symbols in STMT for SSA renaming. */ | |
648 | |
649 void | |
650 mark_symbols_for_renaming (gimple stmt) | |
651 { | |
652 tree op; | |
653 ssa_op_iter iter; | |
654 | |
655 update_stmt (stmt); | |
656 | |
657 /* Mark all the operands for renaming. */ | |
658 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_OPERANDS) | |
659 if (DECL_P (op)) | |
660 mark_sym_for_renaming (op); | |
661 } | |
662 | |
663 | |
664 /* Find all variables within the gimplified statement that were not | |
665 previously visible to the function and add them to the referenced | |
666 variables list. */ | |
667 | |
668 static tree | |
669 find_new_referenced_vars_1 (tree *tp, int *walk_subtrees, | |
670 void *data ATTRIBUTE_UNUSED) | |
671 { | |
672 tree t = *tp; | |
673 | |
674 if (TREE_CODE (t) == VAR_DECL && !var_ann (t)) | |
675 { | |
676 add_referenced_var (t); | |
677 mark_sym_for_renaming (t); | |
678 } | |
679 | |
680 if (IS_TYPE_OR_DECL_P (t)) | |
681 *walk_subtrees = 0; | |
682 | |
683 return NULL; | |
684 } | |
685 | |
686 | |
687 /* Find any new referenced variables in STMT. */ | |
688 | |
689 void | |
690 find_new_referenced_vars (gimple stmt) | |
691 { | |
692 walk_gimple_op (stmt, find_new_referenced_vars_1, NULL); | |
693 } | 369 } |
694 | 370 |
695 | 371 |
696 /* If EXP is a handled component reference for a structure, return the | 372 /* If EXP is a handled component reference for a structure, return the |
697 base variable. The access range is delimited by bit positions *POFFSET and | 373 base variable. The access range is delimited by bit positions *POFFSET and |
698 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either | 374 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either |
699 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE | 375 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE |
700 and *PMAX_SIZE are equal, the access is non-variable. */ | 376 and *PMAX_SIZE are equal, the access is non-variable. If *PREVERSE is |
377 true, the storage order of the reference is reversed. */ | |
701 | 378 |
702 tree | 379 tree |
703 get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset, | 380 get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset, |
704 HOST_WIDE_INT *psize, | 381 HOST_WIDE_INT *psize, |
705 HOST_WIDE_INT *pmax_size) | 382 HOST_WIDE_INT *pmax_size, |
706 { | 383 bool *preverse) |
707 HOST_WIDE_INT bitsize = -1; | 384 { |
708 HOST_WIDE_INT maxsize = -1; | 385 offset_int bitsize = -1; |
386 offset_int maxsize; | |
709 tree size_tree = NULL_TREE; | 387 tree size_tree = NULL_TREE; |
710 HOST_WIDE_INT bit_offset = 0; | 388 offset_int bit_offset = 0; |
711 bool seen_variable_array_ref = false; | 389 bool seen_variable_array_ref = false; |
712 | 390 |
713 /* First get the final access size from just the outermost expression. */ | 391 /* First get the final access size and the storage order from just the |
392 outermost expression. */ | |
714 if (TREE_CODE (exp) == COMPONENT_REF) | 393 if (TREE_CODE (exp) == COMPONENT_REF) |
715 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); | 394 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); |
716 else if (TREE_CODE (exp) == BIT_FIELD_REF) | 395 else if (TREE_CODE (exp) == BIT_FIELD_REF) |
717 size_tree = TREE_OPERAND (exp, 1); | 396 size_tree = TREE_OPERAND (exp, 1); |
718 else if (!VOID_TYPE_P (TREE_TYPE (exp))) | 397 else if (!VOID_TYPE_P (TREE_TYPE (exp))) |
719 { | 398 { |
720 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); | 399 machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); |
721 if (mode == BLKmode) | 400 if (mode == BLKmode) |
722 size_tree = TYPE_SIZE (TREE_TYPE (exp)); | 401 size_tree = TYPE_SIZE (TREE_TYPE (exp)); |
723 else | 402 else |
724 bitsize = GET_MODE_BITSIZE (mode); | 403 bitsize = int (GET_MODE_BITSIZE (mode)); |
725 } | 404 } |
726 if (size_tree != NULL_TREE) | 405 if (size_tree != NULL_TREE |
727 { | 406 && TREE_CODE (size_tree) == INTEGER_CST) |
728 if (! host_integerp (size_tree, 1)) | 407 bitsize = wi::to_offset (size_tree); |
729 bitsize = -1; | 408 |
730 else | 409 *preverse = reverse_storage_order_for_component_p (exp); |
731 bitsize = TREE_INT_CST_LOW (size_tree); | |
732 } | |
733 | 410 |
734 /* Initially, maxsize is the same as the accessed element size. | 411 /* Initially, maxsize is the same as the accessed element size. |
735 In the following it will only grow (or become -1). */ | 412 In the following it will only grow (or become -1). */ |
736 maxsize = bitsize; | 413 maxsize = bitsize; |
737 | 414 |
740 while (1) | 417 while (1) |
741 { | 418 { |
742 switch (TREE_CODE (exp)) | 419 switch (TREE_CODE (exp)) |
743 { | 420 { |
744 case BIT_FIELD_REF: | 421 case BIT_FIELD_REF: |
745 bit_offset += TREE_INT_CST_LOW (TREE_OPERAND (exp, 2)); | 422 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2)); |
746 break; | 423 break; |
747 | 424 |
748 case COMPONENT_REF: | 425 case COMPONENT_REF: |
749 { | 426 { |
750 tree field = TREE_OPERAND (exp, 1); | 427 tree field = TREE_OPERAND (exp, 1); |
751 tree this_offset = component_ref_field_offset (exp); | 428 tree this_offset = component_ref_field_offset (exp); |
752 | 429 |
753 if (this_offset | 430 if (this_offset && TREE_CODE (this_offset) == INTEGER_CST) |
754 && TREE_CODE (this_offset) == INTEGER_CST | |
755 && host_integerp (this_offset, 0)) | |
756 { | 431 { |
757 HOST_WIDE_INT hthis_offset = TREE_INT_CST_LOW (this_offset); | 432 offset_int woffset = (wi::to_offset (this_offset) |
758 hthis_offset *= BITS_PER_UNIT; | 433 << LOG2_BITS_PER_UNIT); |
759 hthis_offset | 434 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field)); |
760 += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); | 435 bit_offset += woffset; |
761 bit_offset += hthis_offset; | |
762 | 436 |
763 /* If we had seen a variable array ref already and we just | 437 /* If we had seen a variable array ref already and we just |
764 referenced the last field of a struct or a union member | 438 referenced the last field of a struct or a union member |
765 then we have to adjust maxsize by the padding at the end | 439 then we have to adjust maxsize by the padding at the end |
766 of our field. */ | 440 of our field. */ |
767 if (seen_variable_array_ref | 441 if (seen_variable_array_ref && maxsize != -1) |
768 && maxsize != -1) | |
769 { | 442 { |
770 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0)); | 443 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0)); |
771 tree next = DECL_CHAIN (field); | 444 tree next = DECL_CHAIN (field); |
772 while (next && TREE_CODE (next) != FIELD_DECL) | 445 while (next && TREE_CODE (next) != FIELD_DECL) |
773 next = DECL_CHAIN (next); | 446 next = DECL_CHAIN (next); |
774 if (!next | 447 if (!next |
775 || TREE_CODE (stype) != RECORD_TYPE) | 448 || TREE_CODE (stype) != RECORD_TYPE) |
776 { | 449 { |
777 tree fsize = DECL_SIZE_UNIT (field); | 450 tree fsize = DECL_SIZE_UNIT (field); |
778 tree ssize = TYPE_SIZE_UNIT (stype); | 451 tree ssize = TYPE_SIZE_UNIT (stype); |
779 if (host_integerp (fsize, 0) | 452 if (fsize == NULL |
780 && host_integerp (ssize, 0)) | 453 || TREE_CODE (fsize) != INTEGER_CST |
781 maxsize += ((TREE_INT_CST_LOW (ssize) | 454 || ssize == NULL |
782 - TREE_INT_CST_LOW (fsize)) | 455 || TREE_CODE (ssize) != INTEGER_CST) |
783 * BITS_PER_UNIT - hthis_offset); | 456 maxsize = -1; |
784 else | 457 else |
785 maxsize = -1; | 458 { |
459 offset_int tem = (wi::to_offset (ssize) | |
460 - wi::to_offset (fsize)); | |
461 tem <<= LOG2_BITS_PER_UNIT; | |
462 tem -= woffset; | |
463 maxsize += tem; | |
464 } | |
786 } | 465 } |
787 } | 466 } |
788 } | 467 } |
789 else | 468 else |
790 { | 469 { |
791 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0))); | 470 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
792 /* We need to adjust maxsize to the whole structure bitsize. | 471 /* We need to adjust maxsize to the whole structure bitsize. |
793 But we can subtract any constant offset seen so far, | 472 But we can subtract any constant offset seen so far, |
794 because that would get us out of the structure otherwise. */ | 473 because that would get us out of the structure otherwise. */ |
795 if (maxsize != -1 && csize && host_integerp (csize, 1)) | 474 if (maxsize != -1 |
796 maxsize = TREE_INT_CST_LOW (csize) - bit_offset; | 475 && csize |
476 && TREE_CODE (csize) == INTEGER_CST) | |
477 maxsize = wi::to_offset (csize) - bit_offset; | |
797 else | 478 else |
798 maxsize = -1; | 479 maxsize = -1; |
799 } | 480 } |
800 } | 481 } |
801 break; | 482 break; |
806 tree index = TREE_OPERAND (exp, 1); | 487 tree index = TREE_OPERAND (exp, 1); |
807 tree low_bound, unit_size; | 488 tree low_bound, unit_size; |
808 | 489 |
809 /* If the resulting bit-offset is constant, track it. */ | 490 /* If the resulting bit-offset is constant, track it. */ |
810 if (TREE_CODE (index) == INTEGER_CST | 491 if (TREE_CODE (index) == INTEGER_CST |
811 && host_integerp (index, 0) | |
812 && (low_bound = array_ref_low_bound (exp), | 492 && (low_bound = array_ref_low_bound (exp), |
813 host_integerp (low_bound, 0)) | 493 TREE_CODE (low_bound) == INTEGER_CST) |
814 && (unit_size = array_ref_element_size (exp), | 494 && (unit_size = array_ref_element_size (exp), |
815 host_integerp (unit_size, 1))) | 495 TREE_CODE (unit_size) == INTEGER_CST)) |
816 { | 496 { |
817 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index); | 497 offset_int woffset |
818 | 498 = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound), |
819 hindex -= TREE_INT_CST_LOW (low_bound); | 499 TYPE_PRECISION (TREE_TYPE (index))); |
820 hindex *= TREE_INT_CST_LOW (unit_size); | 500 woffset *= wi::to_offset (unit_size); |
821 hindex *= BITS_PER_UNIT; | 501 woffset <<= LOG2_BITS_PER_UNIT; |
822 bit_offset += hindex; | 502 bit_offset += woffset; |
823 | 503 |
824 /* An array ref with a constant index up in the structure | 504 /* An array ref with a constant index up in the structure |
825 hierarchy will constrain the size of any variable array ref | 505 hierarchy will constrain the size of any variable array ref |
826 lower in the access hierarchy. */ | 506 lower in the access hierarchy. */ |
827 seen_variable_array_ref = false; | 507 seen_variable_array_ref = false; |
830 { | 510 { |
831 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0))); | 511 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
832 /* We need to adjust maxsize to the whole array bitsize. | 512 /* We need to adjust maxsize to the whole array bitsize. |
833 But we can subtract any constant offset seen so far, | 513 But we can subtract any constant offset seen so far, |
834 because that would get us outside of the array otherwise. */ | 514 because that would get us outside of the array otherwise. */ |
835 if (maxsize != -1 && asize && host_integerp (asize, 1)) | 515 if (maxsize != -1 |
836 maxsize = TREE_INT_CST_LOW (asize) - bit_offset; | 516 && asize |
517 && TREE_CODE (asize) == INTEGER_CST) | |
518 maxsize = wi::to_offset (asize) - bit_offset; | |
837 else | 519 else |
838 maxsize = -1; | 520 maxsize = -1; |
839 | 521 |
840 /* Remember that we have seen an array ref with a variable | 522 /* Remember that we have seen an array ref with a variable |
841 index. */ | 523 index. */ |
852 break; | 534 break; |
853 | 535 |
854 case VIEW_CONVERT_EXPR: | 536 case VIEW_CONVERT_EXPR: |
855 break; | 537 break; |
856 | 538 |
539 case TARGET_MEM_REF: | |
540 /* Via the variable index or index2 we can reach the | |
541 whole object. Still hand back the decl here. */ | |
542 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR | |
543 && (TMR_INDEX (exp) || TMR_INDEX2 (exp))) | |
544 { | |
545 exp = TREE_OPERAND (TMR_BASE (exp), 0); | |
546 bit_offset = 0; | |
547 maxsize = -1; | |
548 goto done; | |
549 } | |
550 /* Fallthru. */ | |
857 case MEM_REF: | 551 case MEM_REF: |
552 /* We need to deal with variable arrays ending structures such as | |
553 struct { int length; int a[1]; } x; x.a[d] | |
554 struct { struct { int a; int b; } a[1]; } x; x.a[d].a | |
555 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0] | |
556 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d] | |
557 where we do not know maxsize for variable index accesses to | |
558 the array. The simplest way to conservatively deal with this | |
559 is to punt in the case that offset + maxsize reaches the | |
560 base type boundary. This needs to include possible trailing | |
561 padding that is there for alignment purposes. */ | |
562 if (seen_variable_array_ref | |
563 && maxsize != -1 | |
564 && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE | |
565 || TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST | |
566 || (bit_offset + maxsize | |
567 == wi::to_offset (TYPE_SIZE (TREE_TYPE (exp)))))) | |
568 maxsize = -1; | |
569 | |
858 /* Hand back the decl for MEM[&decl, off]. */ | 570 /* Hand back the decl for MEM[&decl, off]. */ |
859 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR) | 571 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR) |
860 { | 572 { |
861 if (integer_zerop (TREE_OPERAND (exp, 1))) | 573 if (integer_zerop (TREE_OPERAND (exp, 1))) |
862 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | 574 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); |
863 else | 575 else |
864 { | 576 { |
865 double_int off = mem_ref_offset (exp); | 577 offset_int off = mem_ref_offset (exp); |
866 off = double_int_lshift (off, | 578 off <<= LOG2_BITS_PER_UNIT; |
867 BITS_PER_UNIT == 8 | 579 off += bit_offset; |
868 ? 3 : exact_log2 (BITS_PER_UNIT), | 580 if (wi::fits_shwi_p (off)) |
869 HOST_BITS_PER_DOUBLE_INT, true); | |
870 off = double_int_add (off, shwi_to_double_int (bit_offset)); | |
871 if (double_int_fits_in_shwi_p (off)) | |
872 { | 581 { |
873 bit_offset = double_int_to_shwi (off); | 582 bit_offset = off; |
874 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | 583 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); |
875 } | 584 } |
876 } | 585 } |
877 } | 586 } |
878 goto done; | 587 goto done; |
879 | 588 |
880 case TARGET_MEM_REF: | |
881 /* Hand back the decl for MEM[&decl, off]. */ | |
882 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR) | |
883 { | |
884 /* Via the variable index or index2 we can reach the | |
885 whole object. */ | |
886 if (TMR_INDEX (exp) || TMR_INDEX2 (exp)) | |
887 { | |
888 exp = TREE_OPERAND (TMR_BASE (exp), 0); | |
889 bit_offset = 0; | |
890 maxsize = -1; | |
891 goto done; | |
892 } | |
893 if (integer_zerop (TMR_OFFSET (exp))) | |
894 exp = TREE_OPERAND (TMR_BASE (exp), 0); | |
895 else | |
896 { | |
897 double_int off = mem_ref_offset (exp); | |
898 off = double_int_lshift (off, | |
899 BITS_PER_UNIT == 8 | |
900 ? 3 : exact_log2 (BITS_PER_UNIT), | |
901 HOST_BITS_PER_DOUBLE_INT, true); | |
902 off = double_int_add (off, shwi_to_double_int (bit_offset)); | |
903 if (double_int_fits_in_shwi_p (off)) | |
904 { | |
905 bit_offset = double_int_to_shwi (off); | |
906 exp = TREE_OPERAND (TMR_BASE (exp), 0); | |
907 } | |
908 } | |
909 } | |
910 goto done; | |
911 | |
912 default: | 589 default: |
913 goto done; | 590 goto done; |
914 } | 591 } |
915 | 592 |
916 exp = TREE_OPERAND (exp, 0); | 593 exp = TREE_OPERAND (exp, 0); |
917 } | 594 } |
595 | |
918 done: | 596 done: |
919 | 597 if (!wi::fits_shwi_p (bitsize) || wi::neg_p (bitsize)) |
920 /* We need to deal with variable arrays ending structures such as | 598 { |
921 struct { int length; int a[1]; } x; x.a[d] | 599 *poffset = 0; |
922 struct { struct { int a; int b; } a[1]; } x; x.a[d].a | 600 *psize = -1; |
923 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0] | 601 *pmax_size = -1; |
924 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d] | 602 |
925 where we do not know maxsize for variable index accesses to | 603 return exp; |
926 the array. The simplest way to conservatively deal with this | 604 } |
927 is to punt in the case that offset + maxsize reaches the | 605 |
928 base type boundary. This needs to include possible trailing padding | 606 *psize = bitsize.to_shwi (); |
929 that is there for alignment purposes. | 607 |
930 | 608 if (!wi::fits_shwi_p (bit_offset)) |
931 That is of course only true if the base object is not a decl. */ | 609 { |
610 *poffset = 0; | |
611 *pmax_size = -1; | |
612 | |
613 return exp; | |
614 } | |
615 | |
616 /* In case of a decl or constant base object we can do better. */ | |
932 | 617 |
933 if (DECL_P (exp)) | 618 if (DECL_P (exp)) |
934 { | 619 { |
620 if (flag_unconstrained_commons && VAR_P (exp) && DECL_COMMON (exp)) | |
621 { | |
622 tree sz_tree = TYPE_SIZE (TREE_TYPE (exp)); | |
623 /* If size is unknown, or we have read to the end, assume there | |
624 may be more to the structure than we are told. */ | |
625 if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE | |
626 || (seen_variable_array_ref | |
627 && (sz_tree == NULL_TREE | |
628 || TREE_CODE (sz_tree) != INTEGER_CST | |
629 || (bit_offset + maxsize == wi::to_offset (sz_tree))))) | |
630 maxsize = -1; | |
631 } | |
935 /* If maxsize is unknown adjust it according to the size of the | 632 /* If maxsize is unknown adjust it according to the size of the |
936 base decl. */ | 633 base decl. */ |
634 else if (maxsize == -1 | |
635 && DECL_SIZE (exp) | |
636 && TREE_CODE (DECL_SIZE (exp)) == INTEGER_CST) | |
637 maxsize = wi::to_offset (DECL_SIZE (exp)) - bit_offset; | |
638 } | |
639 else if (CONSTANT_CLASS_P (exp)) | |
640 { | |
641 /* If maxsize is unknown adjust it according to the size of the | |
642 base type constant. */ | |
937 if (maxsize == -1 | 643 if (maxsize == -1 |
938 && host_integerp (DECL_SIZE (exp), 1)) | 644 && TYPE_SIZE (TREE_TYPE (exp)) |
939 maxsize = TREE_INT_CST_LOW (DECL_SIZE (exp)) - bit_offset; | 645 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST) |
940 } | 646 maxsize = (wi::to_offset (TYPE_SIZE (TREE_TYPE (exp))) |
941 else if (seen_variable_array_ref | 647 - bit_offset); |
942 && maxsize != -1 | 648 } |
943 && (!host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1) | |
944 || (bit_offset + maxsize | |
945 == (signed) TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))) | |
946 maxsize = -1; | |
947 | 649 |
948 /* ??? Due to negative offsets in ARRAY_REF we can end up with | 650 /* ??? Due to negative offsets in ARRAY_REF we can end up with |
949 negative bit_offset here. We might want to store a zero offset | 651 negative bit_offset here. We might want to store a zero offset |
950 in this case. */ | 652 in this case. */ |
951 *poffset = bit_offset; | 653 *poffset = bit_offset.to_shwi (); |
952 *psize = bitsize; | 654 if (!wi::fits_shwi_p (maxsize) || wi::neg_p (maxsize)) |
953 *pmax_size = maxsize; | 655 *pmax_size = -1; |
656 else | |
657 { | |
658 *pmax_size = maxsize.to_shwi (); | |
659 if (*poffset > HOST_WIDE_INT_MAX - *pmax_size) | |
660 *pmax_size = -1; | |
661 } | |
662 | |
663 /* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't | |
664 check for such overflows individually and assume it works. */ | |
665 if (*psize != -1 && *poffset > HOST_WIDE_INT_MAX - *psize) | |
666 { | |
667 *poffset = 0; | |
668 *psize = -1; | |
669 *pmax_size = -1; | |
670 | |
671 return exp; | |
672 } | |
954 | 673 |
955 return exp; | 674 return exp; |
956 } | 675 } |
957 | 676 |
958 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that | 677 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that |
959 denotes the starting address of the memory access EXP. | 678 denotes the starting address of the memory access EXP. |
960 Returns NULL_TREE if the offset is not constant or any component | 679 Returns NULL_TREE if the offset is not constant or any component |
961 is not BITS_PER_UNIT-aligned. */ | 680 is not BITS_PER_UNIT-aligned. |
681 VALUEIZE if non-NULL is used to valueize SSA names. It should return | |
682 its argument or a constant if the argument is known to be constant. */ | |
962 | 683 |
963 tree | 684 tree |
964 get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset) | 685 get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset, |
686 tree (*valueize) (tree)) | |
965 { | 687 { |
966 HOST_WIDE_INT byte_offset = 0; | 688 HOST_WIDE_INT byte_offset = 0; |
967 | 689 |
968 /* Compute cumulative byte-offset for nested component-refs and array-refs, | 690 /* Compute cumulative byte-offset for nested component-refs and array-refs, |
969 and find the ultimate containing object. */ | 691 and find the ultimate containing object. */ |
970 while (1) | 692 while (1) |
971 { | 693 { |
972 switch (TREE_CODE (exp)) | 694 switch (TREE_CODE (exp)) |
973 { | 695 { |
974 case BIT_FIELD_REF: | 696 case BIT_FIELD_REF: |
975 return NULL_TREE; | 697 { |
698 HOST_WIDE_INT this_off = TREE_INT_CST_LOW (TREE_OPERAND (exp, 2)); | |
699 if (this_off % BITS_PER_UNIT) | |
700 return NULL_TREE; | |
701 byte_offset += this_off / BITS_PER_UNIT; | |
702 } | |
703 break; | |
976 | 704 |
977 case COMPONENT_REF: | 705 case COMPONENT_REF: |
978 { | 706 { |
979 tree field = TREE_OPERAND (exp, 1); | 707 tree field = TREE_OPERAND (exp, 1); |
980 tree this_offset = component_ref_field_offset (exp); | 708 tree this_offset = component_ref_field_offset (exp); |
996 case ARRAY_REF: | 724 case ARRAY_REF: |
997 case ARRAY_RANGE_REF: | 725 case ARRAY_RANGE_REF: |
998 { | 726 { |
999 tree index = TREE_OPERAND (exp, 1); | 727 tree index = TREE_OPERAND (exp, 1); |
1000 tree low_bound, unit_size; | 728 tree low_bound, unit_size; |
729 | |
730 if (valueize | |
731 && TREE_CODE (index) == SSA_NAME) | |
732 index = (*valueize) (index); | |
1001 | 733 |
1002 /* If the resulting bit-offset is constant, track it. */ | 734 /* If the resulting bit-offset is constant, track it. */ |
1003 if (TREE_CODE (index) == INTEGER_CST | 735 if (TREE_CODE (index) == INTEGER_CST |
1004 && (low_bound = array_ref_low_bound (exp), | 736 && (low_bound = array_ref_low_bound (exp), |
1005 TREE_CODE (low_bound) == INTEGER_CST) | 737 TREE_CODE (low_bound) == INTEGER_CST) |
1006 && (unit_size = array_ref_element_size (exp), | 738 && (unit_size = array_ref_element_size (exp), |
1007 TREE_CODE (unit_size) == INTEGER_CST)) | 739 TREE_CODE (unit_size) == INTEGER_CST)) |
1008 { | 740 { |
1009 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index); | 741 offset_int woffset |
1010 | 742 = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound), |
1011 hindex -= TREE_INT_CST_LOW (low_bound); | 743 TYPE_PRECISION (TREE_TYPE (index))); |
1012 hindex *= TREE_INT_CST_LOW (unit_size); | 744 woffset *= wi::to_offset (unit_size); |
1013 byte_offset += hindex; | 745 byte_offset += woffset.to_shwi (); |
1014 } | 746 } |
1015 else | 747 else |
1016 return NULL_TREE; | 748 return NULL_TREE; |
1017 } | 749 } |
1018 break; | 750 break; |
1026 | 758 |
1027 case VIEW_CONVERT_EXPR: | 759 case VIEW_CONVERT_EXPR: |
1028 break; | 760 break; |
1029 | 761 |
1030 case MEM_REF: | 762 case MEM_REF: |
1031 /* Hand back the decl for MEM[&decl, off]. */ | 763 { |
1032 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR) | 764 tree base = TREE_OPERAND (exp, 0); |
1033 { | 765 if (valueize |
1034 if (!integer_zerop (TREE_OPERAND (exp, 1))) | 766 && TREE_CODE (base) == SSA_NAME) |
1035 { | 767 base = (*valueize) (base); |
1036 double_int off = mem_ref_offset (exp); | 768 |
1037 gcc_assert (off.high == -1 || off.high == 0); | 769 /* Hand back the decl for MEM[&decl, off]. */ |
1038 byte_offset += double_int_to_shwi (off); | 770 if (TREE_CODE (base) == ADDR_EXPR) |
1039 } | 771 { |
1040 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | 772 if (!integer_zerop (TREE_OPERAND (exp, 1))) |
1041 } | 773 { |
1042 goto done; | 774 offset_int off = mem_ref_offset (exp); |
775 byte_offset += off.to_short_addr (); | |
776 } | |
777 exp = TREE_OPERAND (base, 0); | |
778 } | |
779 goto done; | |
780 } | |
1043 | 781 |
1044 case TARGET_MEM_REF: | 782 case TARGET_MEM_REF: |
1045 /* Hand back the decl for MEM[&decl, off]. */ | 783 { |
1046 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR) | 784 tree base = TREE_OPERAND (exp, 0); |
1047 { | 785 if (valueize |
1048 if (TMR_INDEX (exp) || TMR_INDEX2 (exp)) | 786 && TREE_CODE (base) == SSA_NAME) |
1049 return NULL_TREE; | 787 base = (*valueize) (base); |
1050 if (!integer_zerop (TMR_OFFSET (exp))) | 788 |
1051 { | 789 /* Hand back the decl for MEM[&decl, off]. */ |
1052 double_int off = mem_ref_offset (exp); | 790 if (TREE_CODE (base) == ADDR_EXPR) |
1053 gcc_assert (off.high == -1 || off.high == 0); | 791 { |
1054 byte_offset += double_int_to_shwi (off); | 792 if (TMR_INDEX (exp) || TMR_INDEX2 (exp)) |
1055 } | 793 return NULL_TREE; |
1056 exp = TREE_OPERAND (TMR_BASE (exp), 0); | 794 if (!integer_zerop (TMR_OFFSET (exp))) |
1057 } | 795 { |
1058 goto done; | 796 offset_int off = mem_ref_offset (exp); |
797 byte_offset += off.to_short_addr (); | |
798 } | |
799 exp = TREE_OPERAND (base, 0); | |
800 } | |
801 goto done; | |
802 } | |
1059 | 803 |
1060 default: | 804 default: |
1061 goto done; | 805 goto done; |
1062 } | 806 } |
1063 | 807 |
1067 | 811 |
1068 *poffset = byte_offset; | 812 *poffset = byte_offset; |
1069 return exp; | 813 return exp; |
1070 } | 814 } |
1071 | 815 |
816 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that | |
817 denotes the starting address of the memory access EXP. | |
818 Returns NULL_TREE if the offset is not constant or any component | |
819 is not BITS_PER_UNIT-aligned. */ | |
820 | |
821 tree | |
822 get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset) | |
823 { | |
824 return get_addr_base_and_unit_offset_1 (exp, poffset, NULL); | |
825 } | |
826 | |
1072 /* Returns true if STMT references an SSA_NAME that has | 827 /* Returns true if STMT references an SSA_NAME that has |
1073 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */ | 828 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */ |
1074 | 829 |
1075 bool | 830 bool |
1076 stmt_references_abnormal_ssa_name (gimple stmt) | 831 stmt_references_abnormal_ssa_name (gimple *stmt) |
1077 { | 832 { |
1078 ssa_op_iter oi; | 833 ssa_op_iter oi; |
1079 use_operand_p use_p; | 834 use_operand_p use_p; |
1080 | 835 |
1081 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE) | 836 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE) |
1085 } | 840 } |
1086 | 841 |
1087 return false; | 842 return false; |
1088 } | 843 } |
1089 | 844 |
845 /* If STMT takes any abnormal PHI values as input, replace them with | |
846 local copies. */ | |
847 | |
848 void | |
849 replace_abnormal_ssa_names (gimple *stmt) | |
850 { | |
851 ssa_op_iter oi; | |
852 use_operand_p use_p; | |
853 | |
854 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE) | |
855 { | |
856 tree op = USE_FROM_PTR (use_p); | |
857 if (TREE_CODE (op) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op)) | |
858 { | |
859 gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
860 tree new_name = make_ssa_name (TREE_TYPE (op)); | |
861 gassign *assign = gimple_build_assign (new_name, op); | |
862 gsi_insert_before (&gsi, assign, GSI_SAME_STMT); | |
863 SET_USE (use_p, new_name); | |
864 } | |
865 } | |
866 } | |
867 | |
868 /* Pair of tree and a sorting index, for dump_enumerated_decls. */ | |
869 struct GTY(()) numbered_tree | |
870 { | |
871 tree t; | |
872 int num; | |
873 }; | |
874 | |
875 | |
876 /* Compare two declarations references by their DECL_UID / sequence number. | |
877 Called via qsort. */ | |
878 | |
879 static int | |
880 compare_decls_by_uid (const void *pa, const void *pb) | |
881 { | |
882 const numbered_tree *nt_a = ((const numbered_tree *)pa); | |
883 const numbered_tree *nt_b = ((const numbered_tree *)pb); | |
884 | |
885 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t)) | |
886 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t); | |
887 return nt_a->num - nt_b->num; | |
888 } | |
889 | |
890 /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */ | |
891 static tree | |
892 dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data) | |
893 { | |
894 struct walk_stmt_info *wi = (struct walk_stmt_info *) data; | |
895 vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info; | |
896 numbered_tree nt; | |
897 | |
898 if (!DECL_P (*tp)) | |
899 return NULL_TREE; | |
900 nt.t = *tp; | |
901 nt.num = list->length (); | |
902 list->safe_push (nt); | |
903 *walk_subtrees = 0; | |
904 return NULL_TREE; | |
905 } | |
906 | |
907 /* Find all the declarations used by the current function, sort them by uid, | |
908 and emit the sorted list. Each declaration is tagged with a sequence | |
909 number indicating when it was found during statement / tree walking, | |
910 so that TDF_NOUID comparisons of anonymous declarations are still | |
911 meaningful. Where a declaration was encountered more than once, we | |
912 emit only the sequence number of the first encounter. | |
913 FILE is the dump file where to output the list and FLAGS is as in | |
914 print_generic_expr. */ | |
915 void | |
916 dump_enumerated_decls (FILE *file, dump_flags_t flags) | |
917 { | |
918 basic_block bb; | |
919 struct walk_stmt_info wi; | |
920 auto_vec<numbered_tree, 40> decl_list; | |
921 | |
922 memset (&wi, '\0', sizeof (wi)); | |
923 wi.info = (void *) &decl_list; | |
924 FOR_EACH_BB_FN (bb, cfun) | |
925 { | |
926 gimple_stmt_iterator gsi; | |
927 | |
928 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
929 if (!is_gimple_debug (gsi_stmt (gsi))) | |
930 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi); | |
931 } | |
932 decl_list.qsort (compare_decls_by_uid); | |
933 if (decl_list.length ()) | |
934 { | |
935 unsigned ix; | |
936 numbered_tree *ntp; | |
937 tree last = NULL_TREE; | |
938 | |
939 fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n", | |
940 current_function_name ()); | |
941 FOR_EACH_VEC_ELT (decl_list, ix, ntp) | |
942 { | |
943 if (ntp->t == last) | |
944 continue; | |
945 fprintf (file, "%d: ", ntp->num); | |
946 print_generic_decl (file, ntp->t, flags); | |
947 fprintf (file, "\n"); | |
948 last = ntp->t; | |
949 } | |
950 } | |
951 } |