comparison gcc/tree-ssa-structalias.c @ 0:a06113de4d67

first commit
author kent <kent@cr.ie.u-ryukyu.ac.jp>
date Fri, 17 Jul 2009 14:47:48 +0900
parents
children 3bfb6c00c1e0
comparison
equal deleted inserted replaced
-1:000000000000 0:a06113de4d67
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "tree.h"
35 #include "c-common.h"
36 #include "tree-flow.h"
37 #include "tree-inline.h"
38 #include "varray.h"
39 #include "c-tree.h"
40 #include "diagnostic.h"
41 #include "toplev.h"
42 #include "gimple.h"
43 #include "hashtab.h"
44 #include "function.h"
45 #include "cgraph.h"
46 #include "tree-pass.h"
47 #include "timevar.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
50 #include "params.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53 #include "alias.h"
54 #include "pointer-set.h"
55
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
59
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
67
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
71
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
75
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
79
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
86
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
89
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
94
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
99
100 Thus,
101 struct f
102 {
103 int a;
104 int b;
105 } foo;
106 int *bar;
107
108 looks like
109
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
113
114
115 In order to solve the system of set constraints, the following is
116 done:
117
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
120
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
127
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
130
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
133
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
137
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
141
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
145
146 8. The process of walking the graph is iterated until no solution
147 sets change.
148
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
152
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
156
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
159
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
162
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
165
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
168
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
171
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
174
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
177
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
180
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
183
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
187
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
190
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
194
195 static struct constraint_stats
196 {
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
206
207 struct variable_info
208 {
209 /* ID of this variable */
210 unsigned int id;
211
212 /* True if this is a variable created by the constraint analysis, such as
213 heap variables and constraints we had to break up. */
214 unsigned int is_artificial_var:1;
215
216 /* True if this is a special variable whose solution set should not be
217 changed. */
218 unsigned int is_special_var:1;
219
220 /* True for variables whose size is not known or variable. */
221 unsigned int is_unknown_size_var:1;
222
223 /* True for (sub-)fields that represent a whole variable. */
224 unsigned int is_full_var : 1;
225
226 /* True if this is a heap variable. */
227 unsigned int is_heap_var:1;
228
229 /* True if we may not use TBAA to prune references to this
230 variable. This is used for C++ placement new. */
231 unsigned int no_tbaa_pruning : 1;
232
233 /* True if this field may contain pointers. */
234 unsigned int may_have_pointers : 1;
235
236 /* Variable id this was collapsed to due to type unsafety. Zero if
237 this variable was not collapsed. This should be unused completely
238 after build_succ_graph, or something is broken. */
239 unsigned int collapsed_to;
240
241 /* A link to the variable for the next field in this structure. */
242 struct variable_info *next;
243
244 /* Offset of this variable, in bits, from the base variable */
245 unsigned HOST_WIDE_INT offset;
246
247 /* Size of the variable, in bits. */
248 unsigned HOST_WIDE_INT size;
249
250 /* Full size of the base variable, in bits. */
251 unsigned HOST_WIDE_INT fullsize;
252
253 /* Name of this variable */
254 const char *name;
255
256 /* Tree that this variable is associated with. */
257 tree decl;
258
259 /* Points-to set for this variable. */
260 bitmap solution;
261
262 /* Old points-to set for this variable. */
263 bitmap oldsolution;
264 };
265 typedef struct variable_info *varinfo_t;
266
267 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
268 static varinfo_t lookup_vi_for_tree (tree);
269
270 /* Pool of variable info structures. */
271 static alloc_pool variable_info_pool;
272
273 DEF_VEC_P(varinfo_t);
274
275 DEF_VEC_ALLOC_P(varinfo_t, heap);
276
277 /* Table of variable info structures for constraint variables.
278 Indexed directly by variable info id. */
279 static VEC(varinfo_t,heap) *varmap;
280
281 /* Return the varmap element N */
282
283 static inline varinfo_t
284 get_varinfo (unsigned int n)
285 {
286 return VEC_index (varinfo_t, varmap, n);
287 }
288
289 /* Return the varmap element N, following the collapsed_to link. */
290
291 static inline varinfo_t
292 get_varinfo_fc (unsigned int n)
293 {
294 varinfo_t v = VEC_index (varinfo_t, varmap, n);
295
296 if (v->collapsed_to != 0)
297 return get_varinfo (v->collapsed_to);
298 return v;
299 }
300
301 /* Static IDs for the special variables. */
302 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
303 escaped_id = 3, nonlocal_id = 4, callused_id = 5,
304 storedanything_id = 6, integer_id = 7 };
305
306 /* Variable that represents the unknown pointer. */
307 static varinfo_t var_anything;
308 static tree anything_tree;
309
310 /* Variable that represents the NULL pointer. */
311 static varinfo_t var_nothing;
312 static tree nothing_tree;
313
314 /* Variable that represents read only memory. */
315 static varinfo_t var_readonly;
316 static tree readonly_tree;
317
318 /* Variable that represents escaped memory. */
319 static varinfo_t var_escaped;
320 static tree escaped_tree;
321
322 /* Variable that represents nonlocal memory. */
323 static varinfo_t var_nonlocal;
324 static tree nonlocal_tree;
325
326 /* Variable that represents call-used memory. */
327 static varinfo_t var_callused;
328 static tree callused_tree;
329
330 /* Variable that represents variables that are stored to anything. */
331 static varinfo_t var_storedanything;
332 static tree storedanything_tree;
333
334 /* Variable that represents integers. This is used for when people do things
335 like &0->a.b. */
336 static varinfo_t var_integer;
337 static tree integer_tree;
338
339 /* Lookup a heap var for FROM, and return it if we find one. */
340
341 static tree
342 heapvar_lookup (tree from)
343 {
344 struct tree_map *h, in;
345 in.base.from = from;
346
347 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
348 htab_hash_pointer (from));
349 if (h)
350 return h->to;
351 return NULL_TREE;
352 }
353
354 /* Insert a mapping FROM->TO in the heap var for statement
355 hashtable. */
356
357 static void
358 heapvar_insert (tree from, tree to)
359 {
360 struct tree_map *h;
361 void **loc;
362
363 h = GGC_NEW (struct tree_map);
364 h->hash = htab_hash_pointer (from);
365 h->base.from = from;
366 h->to = to;
367 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
368 *(struct tree_map **) loc = h;
369 }
370
371 /* Return a new variable info structure consisting for a variable
372 named NAME, and using constraint graph node NODE. */
373
374 static varinfo_t
375 new_var_info (tree t, unsigned int id, const char *name)
376 {
377 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
378 tree var;
379
380 ret->id = id;
381 ret->name = name;
382 ret->decl = t;
383 ret->is_artificial_var = false;
384 ret->is_heap_var = false;
385 ret->is_special_var = false;
386 ret->is_unknown_size_var = false;
387 ret->is_full_var = false;
388 ret->may_have_pointers = true;
389 var = t;
390 if (TREE_CODE (var) == SSA_NAME)
391 var = SSA_NAME_VAR (var);
392 ret->no_tbaa_pruning = (DECL_P (var)
393 && POINTER_TYPE_P (TREE_TYPE (var))
394 && DECL_NO_TBAA_P (var));
395 ret->solution = BITMAP_ALLOC (&pta_obstack);
396 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
397 ret->next = NULL;
398 ret->collapsed_to = 0;
399 return ret;
400 }
401
402 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
403
404 /* An expression that appears in a constraint. */
405
406 struct constraint_expr
407 {
408 /* Constraint type. */
409 constraint_expr_type type;
410
411 /* Variable we are referring to in the constraint. */
412 unsigned int var;
413
414 /* Offset, in bits, of this constraint from the beginning of
415 variables it ends up referring to.
416
417 IOW, in a deref constraint, we would deref, get the result set,
418 then add OFFSET to each member. */
419 unsigned HOST_WIDE_INT offset;
420 };
421
422 typedef struct constraint_expr ce_s;
423 DEF_VEC_O(ce_s);
424 DEF_VEC_ALLOC_O(ce_s, heap);
425 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
426 static void get_constraint_for (tree, VEC(ce_s, heap) **);
427 static void do_deref (VEC (ce_s, heap) **);
428
429 /* Our set constraints are made up of two constraint expressions, one
430 LHS, and one RHS.
431
432 As described in the introduction, our set constraints each represent an
433 operation between set valued variables.
434 */
435 struct constraint
436 {
437 struct constraint_expr lhs;
438 struct constraint_expr rhs;
439 };
440
441 /* List of constraints that we use to build the constraint graph from. */
442
443 static VEC(constraint_t,heap) *constraints;
444 static alloc_pool constraint_pool;
445
446
447 DEF_VEC_I(int);
448 DEF_VEC_ALLOC_I(int, heap);
449
450 /* The constraint graph is represented as an array of bitmaps
451 containing successor nodes. */
452
453 struct constraint_graph
454 {
455 /* Size of this graph, which may be different than the number of
456 nodes in the variable map. */
457 unsigned int size;
458
459 /* Explicit successors of each node. */
460 bitmap *succs;
461
462 /* Implicit predecessors of each node (Used for variable
463 substitution). */
464 bitmap *implicit_preds;
465
466 /* Explicit predecessors of each node (Used for variable substitution). */
467 bitmap *preds;
468
469 /* Indirect cycle representatives, or -1 if the node has no indirect
470 cycles. */
471 int *indirect_cycles;
472
473 /* Representative node for a node. rep[a] == a unless the node has
474 been unified. */
475 unsigned int *rep;
476
477 /* Equivalence class representative for a label. This is used for
478 variable substitution. */
479 int *eq_rep;
480
481 /* Pointer equivalence label for a node. All nodes with the same
482 pointer equivalence label can be unified together at some point
483 (either during constraint optimization or after the constraint
484 graph is built). */
485 unsigned int *pe;
486
487 /* Pointer equivalence representative for a label. This is used to
488 handle nodes that are pointer equivalent but not location
489 equivalent. We can unite these once the addressof constraints
490 are transformed into initial points-to sets. */
491 int *pe_rep;
492
493 /* Pointer equivalence label for each node, used during variable
494 substitution. */
495 unsigned int *pointer_label;
496
497 /* Location equivalence label for each node, used during location
498 equivalence finding. */
499 unsigned int *loc_label;
500
501 /* Pointed-by set for each node, used during location equivalence
502 finding. This is pointed-by rather than pointed-to, because it
503 is constructed using the predecessor graph. */
504 bitmap *pointed_by;
505
506 /* Points to sets for pointer equivalence. This is *not* the actual
507 points-to sets for nodes. */
508 bitmap *points_to;
509
510 /* Bitmap of nodes where the bit is set if the node is a direct
511 node. Used for variable substitution. */
512 sbitmap direct_nodes;
513
514 /* Bitmap of nodes where the bit is set if the node is address
515 taken. Used for variable substitution. */
516 bitmap address_taken;
517
518 /* Vector of complex constraints for each graph node. Complex
519 constraints are those involving dereferences or offsets that are
520 not 0. */
521 VEC(constraint_t,heap) **complex;
522 };
523
524 static constraint_graph_t graph;
525
526 /* During variable substitution and the offline version of indirect
527 cycle finding, we create nodes to represent dereferences and
528 address taken constraints. These represent where these start and
529 end. */
530 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
531 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
532
533 /* Return the representative node for NODE, if NODE has been unioned
534 with another NODE.
535 This function performs path compression along the way to finding
536 the representative. */
537
538 static unsigned int
539 find (unsigned int node)
540 {
541 gcc_assert (node < graph->size);
542 if (graph->rep[node] != node)
543 return graph->rep[node] = find (graph->rep[node]);
544 return node;
545 }
546
547 /* Union the TO and FROM nodes to the TO nodes.
548 Note that at some point in the future, we may want to do
549 union-by-rank, in which case we are going to have to return the
550 node we unified to. */
551
552 static bool
553 unite (unsigned int to, unsigned int from)
554 {
555 gcc_assert (to < graph->size && from < graph->size);
556 if (to != from && graph->rep[from] != to)
557 {
558 graph->rep[from] = to;
559 return true;
560 }
561 return false;
562 }
563
564 /* Create a new constraint consisting of LHS and RHS expressions. */
565
566 static constraint_t
567 new_constraint (const struct constraint_expr lhs,
568 const struct constraint_expr rhs)
569 {
570 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
571 ret->lhs = lhs;
572 ret->rhs = rhs;
573 return ret;
574 }
575
576 /* Print out constraint C to FILE. */
577
578 void
579 dump_constraint (FILE *file, constraint_t c)
580 {
581 if (c->lhs.type == ADDRESSOF)
582 fprintf (file, "&");
583 else if (c->lhs.type == DEREF)
584 fprintf (file, "*");
585 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
586 if (c->lhs.offset != 0)
587 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
588 fprintf (file, " = ");
589 if (c->rhs.type == ADDRESSOF)
590 fprintf (file, "&");
591 else if (c->rhs.type == DEREF)
592 fprintf (file, "*");
593 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
594 if (c->rhs.offset != 0)
595 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
596 fprintf (file, "\n");
597 }
598
599 /* Print out constraint C to stderr. */
600
601 void
602 debug_constraint (constraint_t c)
603 {
604 dump_constraint (stderr, c);
605 }
606
607 /* Print out all constraints to FILE */
608
609 void
610 dump_constraints (FILE *file)
611 {
612 int i;
613 constraint_t c;
614 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
615 dump_constraint (file, c);
616 }
617
618 /* Print out all constraints to stderr. */
619
620 void
621 debug_constraints (void)
622 {
623 dump_constraints (stderr);
624 }
625
626 /* Print out to FILE the edge in the constraint graph that is created by
627 constraint c. The edge may have a label, depending on the type of
628 constraint that it represents. If complex1, e.g: a = *b, then the label
629 is "=*", if complex2, e.g: *a = b, then the label is "*=", if
630 complex with an offset, e.g: a = b + 8, then the label is "+".
631 Otherwise the edge has no label. */
632
633 void
634 dump_constraint_edge (FILE *file, constraint_t c)
635 {
636 if (c->rhs.type != ADDRESSOF)
637 {
638 const char *src = get_varinfo_fc (c->rhs.var)->name;
639 const char *dst = get_varinfo_fc (c->lhs.var)->name;
640 fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
641 /* Due to preprocessing of constraints, instructions like *a = *b are
642 illegal; thus, we do not have to handle such cases. */
643 if (c->lhs.type == DEREF)
644 fprintf (file, " [ label=\"*=\" ] ;\n");
645 else if (c->rhs.type == DEREF)
646 fprintf (file, " [ label=\"=*\" ] ;\n");
647 else
648 {
649 /* We must check the case where the constraint is an offset.
650 In this case, it is treated as a complex constraint. */
651 if (c->rhs.offset != c->lhs.offset)
652 fprintf (file, " [ label=\"+\" ] ;\n");
653 else
654 fprintf (file, " ;\n");
655 }
656 }
657 }
658
659 /* Print the constraint graph in dot format. */
660
661 void
662 dump_constraint_graph (FILE *file)
663 {
664 unsigned int i=0, size;
665 constraint_t c;
666
667 /* Only print the graph if it has already been initialized: */
668 if (!graph)
669 return;
670
671 /* Print the constraints used to produce the constraint graph. The
672 constraints will be printed as comments in the dot file: */
673 fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
674 dump_constraints (file);
675 fprintf (file, "*/\n");
676
677 /* Prints the header of the dot file: */
678 fprintf (file, "\n\n// The constraint graph in dot format:\n");
679 fprintf (file, "strict digraph {\n");
680 fprintf (file, " node [\n shape = box\n ]\n");
681 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
682 fprintf (file, "\n // List of nodes in the constraint graph:\n");
683
684 /* The next lines print the nodes in the graph. In order to get the
685 number of nodes in the graph, we must choose the minimum between the
686 vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
687 yet been initialized, then graph->size == 0, otherwise we must only
688 read nodes that have an entry in VEC (varinfo_t, varmap). */
689 size = VEC_length (varinfo_t, varmap);
690 size = size < graph->size ? size : graph->size;
691 for (i = 0; i < size; i++)
692 {
693 const char *name = get_varinfo_fc (graph->rep[i])->name;
694 fprintf (file, " \"%s\" ;\n", name);
695 }
696
697 /* Go over the list of constraints printing the edges in the constraint
698 graph. */
699 fprintf (file, "\n // The constraint edges:\n");
700 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
701 if (c)
702 dump_constraint_edge (file, c);
703
704 /* Prints the tail of the dot file. By now, only the closing bracket. */
705 fprintf (file, "}\n\n\n");
706 }
707
708 /* Print out the constraint graph to stderr. */
709
710 void
711 debug_constraint_graph (void)
712 {
713 dump_constraint_graph (stderr);
714 }
715
716 /* SOLVER FUNCTIONS
717
718 The solver is a simple worklist solver, that works on the following
719 algorithm:
720
721 sbitmap changed_nodes = all zeroes;
722 changed_count = 0;
723 For each node that is not already collapsed:
724 changed_count++;
725 set bit in changed nodes
726
727 while (changed_count > 0)
728 {
729 compute topological ordering for constraint graph
730
731 find and collapse cycles in the constraint graph (updating
732 changed if necessary)
733
734 for each node (n) in the graph in topological order:
735 changed_count--;
736
737 Process each complex constraint associated with the node,
738 updating changed if necessary.
739
740 For each outgoing edge from n, propagate the solution from n to
741 the destination of the edge, updating changed as necessary.
742
743 } */
744
745 /* Return true if two constraint expressions A and B are equal. */
746
747 static bool
748 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
749 {
750 return a.type == b.type && a.var == b.var && a.offset == b.offset;
751 }
752
753 /* Return true if constraint expression A is less than constraint expression
754 B. This is just arbitrary, but consistent, in order to give them an
755 ordering. */
756
757 static bool
758 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
759 {
760 if (a.type == b.type)
761 {
762 if (a.var == b.var)
763 return a.offset < b.offset;
764 else
765 return a.var < b.var;
766 }
767 else
768 return a.type < b.type;
769 }
770
771 /* Return true if constraint A is less than constraint B. This is just
772 arbitrary, but consistent, in order to give them an ordering. */
773
774 static bool
775 constraint_less (const constraint_t a, const constraint_t b)
776 {
777 if (constraint_expr_less (a->lhs, b->lhs))
778 return true;
779 else if (constraint_expr_less (b->lhs, a->lhs))
780 return false;
781 else
782 return constraint_expr_less (a->rhs, b->rhs);
783 }
784
785 /* Return true if two constraints A and B are equal. */
786
787 static bool
788 constraint_equal (struct constraint a, struct constraint b)
789 {
790 return constraint_expr_equal (a.lhs, b.lhs)
791 && constraint_expr_equal (a.rhs, b.rhs);
792 }
793
794
795 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
796
797 static constraint_t
798 constraint_vec_find (VEC(constraint_t,heap) *vec,
799 struct constraint lookfor)
800 {
801 unsigned int place;
802 constraint_t found;
803
804 if (vec == NULL)
805 return NULL;
806
807 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
808 if (place >= VEC_length (constraint_t, vec))
809 return NULL;
810 found = VEC_index (constraint_t, vec, place);
811 if (!constraint_equal (*found, lookfor))
812 return NULL;
813 return found;
814 }
815
816 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
817
818 static void
819 constraint_set_union (VEC(constraint_t,heap) **to,
820 VEC(constraint_t,heap) **from)
821 {
822 int i;
823 constraint_t c;
824
825 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
826 {
827 if (constraint_vec_find (*to, *c) == NULL)
828 {
829 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
830 constraint_less);
831 VEC_safe_insert (constraint_t, heap, *to, place, c);
832 }
833 }
834 }
835
836 /* Take a solution set SET, add OFFSET to each member of the set, and
837 overwrite SET with the result when done. */
838
839 static void
840 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
841 {
842 bitmap result = BITMAP_ALLOC (&iteration_obstack);
843 unsigned int i;
844 bitmap_iterator bi;
845
846 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
847 {
848 varinfo_t vi = get_varinfo (i);
849
850 /* If this is a variable with just one field just set its bit
851 in the result. */
852 if (vi->is_artificial_var
853 || vi->is_unknown_size_var
854 || vi->is_full_var)
855 bitmap_set_bit (result, i);
856 else
857 {
858 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
859 varinfo_t v = first_vi_for_offset (vi, fieldoffset);
860 /* If the result is outside of the variable use the last field. */
861 if (!v)
862 {
863 v = vi;
864 while (v->next != NULL)
865 v = v->next;
866 }
867 bitmap_set_bit (result, v->id);
868 /* If the result is not exactly at fieldoffset include the next
869 field as well. See get_constraint_for_ptr_offset for more
870 rationale. */
871 if (v->offset != fieldoffset
872 && v->next != NULL)
873 bitmap_set_bit (result, v->next->id);
874 }
875 }
876
877 bitmap_copy (set, result);
878 BITMAP_FREE (result);
879 }
880
881 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
882 process. */
883
884 static bool
885 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
886 {
887 if (inc == 0)
888 return bitmap_ior_into (to, from);
889 else
890 {
891 bitmap tmp;
892 bool res;
893
894 tmp = BITMAP_ALLOC (&iteration_obstack);
895 bitmap_copy (tmp, from);
896 solution_set_add (tmp, inc);
897 res = bitmap_ior_into (to, tmp);
898 BITMAP_FREE (tmp);
899 return res;
900 }
901 }
902
903 /* Insert constraint C into the list of complex constraints for graph
904 node VAR. */
905
906 static void
907 insert_into_complex (constraint_graph_t graph,
908 unsigned int var, constraint_t c)
909 {
910 VEC (constraint_t, heap) *complex = graph->complex[var];
911 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
912 constraint_less);
913
914 /* Only insert constraints that do not already exist. */
915 if (place >= VEC_length (constraint_t, complex)
916 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
917 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
918 }
919
920
921 /* Condense two variable nodes into a single variable node, by moving
922 all associated info from SRC to TO. */
923
924 static void
925 merge_node_constraints (constraint_graph_t graph, unsigned int to,
926 unsigned int from)
927 {
928 unsigned int i;
929 constraint_t c;
930
931 gcc_assert (find (from) == to);
932
933 /* Move all complex constraints from src node into to node */
934 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
935 {
936 /* In complex constraints for node src, we may have either
937 a = *src, and *src = a, or an offseted constraint which are
938 always added to the rhs node's constraints. */
939
940 if (c->rhs.type == DEREF)
941 c->rhs.var = to;
942 else if (c->lhs.type == DEREF)
943 c->lhs.var = to;
944 else
945 c->rhs.var = to;
946 }
947 constraint_set_union (&graph->complex[to], &graph->complex[from]);
948 VEC_free (constraint_t, heap, graph->complex[from]);
949 graph->complex[from] = NULL;
950 }
951
952
953 /* Remove edges involving NODE from GRAPH. */
954
955 static void
956 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
957 {
958 if (graph->succs[node])
959 BITMAP_FREE (graph->succs[node]);
960 }
961
962 /* Merge GRAPH nodes FROM and TO into node TO. */
963
964 static void
965 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
966 unsigned int from)
967 {
968 if (graph->indirect_cycles[from] != -1)
969 {
970 /* If we have indirect cycles with the from node, and we have
971 none on the to node, the to node has indirect cycles from the
972 from node now that they are unified.
973 If indirect cycles exist on both, unify the nodes that they
974 are in a cycle with, since we know they are in a cycle with
975 each other. */
976 if (graph->indirect_cycles[to] == -1)
977 graph->indirect_cycles[to] = graph->indirect_cycles[from];
978 }
979
980 /* Merge all the successor edges. */
981 if (graph->succs[from])
982 {
983 if (!graph->succs[to])
984 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
985 bitmap_ior_into (graph->succs[to],
986 graph->succs[from]);
987 }
988
989 clear_edges_for_node (graph, from);
990 }
991
992
993 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
994 it doesn't exist in the graph already. */
995
996 static void
997 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
998 unsigned int from)
999 {
1000 if (to == from)
1001 return;
1002
1003 if (!graph->implicit_preds[to])
1004 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1005
1006 if (bitmap_set_bit (graph->implicit_preds[to], from))
1007 stats.num_implicit_edges++;
1008 }
1009
1010 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1011 it doesn't exist in the graph already.
1012 Return false if the edge already existed, true otherwise. */
1013
1014 static void
1015 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1016 unsigned int from)
1017 {
1018 if (!graph->preds[to])
1019 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1020 bitmap_set_bit (graph->preds[to], from);
1021 }
1022
1023 /* Add a graph edge to GRAPH, going from FROM to TO if
1024 it doesn't exist in the graph already.
1025 Return false if the edge already existed, true otherwise. */
1026
1027 static bool
1028 add_graph_edge (constraint_graph_t graph, unsigned int to,
1029 unsigned int from)
1030 {
1031 if (to == from)
1032 {
1033 return false;
1034 }
1035 else
1036 {
1037 bool r = false;
1038
1039 if (!graph->succs[from])
1040 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1041 if (bitmap_set_bit (graph->succs[from], to))
1042 {
1043 r = true;
1044 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1045 stats.num_edges++;
1046 }
1047 return r;
1048 }
1049 }
1050
1051
1052 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1053
1054 static bool
1055 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1056 unsigned int dest)
1057 {
1058 return (graph->succs[dest]
1059 && bitmap_bit_p (graph->succs[dest], src));
1060 }
1061
1062 /* Initialize the constraint graph structure to contain SIZE nodes. */
1063
1064 static void
1065 init_graph (unsigned int size)
1066 {
1067 unsigned int j;
1068
1069 graph = XCNEW (struct constraint_graph);
1070 graph->size = size;
1071 graph->succs = XCNEWVEC (bitmap, graph->size);
1072 graph->indirect_cycles = XNEWVEC (int, graph->size);
1073 graph->rep = XNEWVEC (unsigned int, graph->size);
1074 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1075 graph->pe = XCNEWVEC (unsigned int, graph->size);
1076 graph->pe_rep = XNEWVEC (int, graph->size);
1077
1078 for (j = 0; j < graph->size; j++)
1079 {
1080 graph->rep[j] = j;
1081 graph->pe_rep[j] = -1;
1082 graph->indirect_cycles[j] = -1;
1083 }
1084 }
1085
1086 /* Build the constraint graph, adding only predecessor edges right now. */
1087
1088 static void
1089 build_pred_graph (void)
1090 {
1091 int i;
1092 constraint_t c;
1093 unsigned int j;
1094
1095 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1096 graph->preds = XCNEWVEC (bitmap, graph->size);
1097 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1098 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1099 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1100 graph->points_to = XCNEWVEC (bitmap, graph->size);
1101 graph->eq_rep = XNEWVEC (int, graph->size);
1102 graph->direct_nodes = sbitmap_alloc (graph->size);
1103 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1104 sbitmap_zero (graph->direct_nodes);
1105
1106 for (j = 0; j < FIRST_REF_NODE; j++)
1107 {
1108 if (!get_varinfo (j)->is_special_var)
1109 SET_BIT (graph->direct_nodes, j);
1110 }
1111
1112 for (j = 0; j < graph->size; j++)
1113 graph->eq_rep[j] = -1;
1114
1115 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1116 graph->indirect_cycles[j] = -1;
1117
1118 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1119 {
1120 struct constraint_expr lhs = c->lhs;
1121 struct constraint_expr rhs = c->rhs;
1122 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1123 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1124
1125 if (lhs.type == DEREF)
1126 {
1127 /* *x = y. */
1128 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1129 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1130 }
1131 else if (rhs.type == DEREF)
1132 {
1133 /* x = *y */
1134 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1135 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1136 else
1137 RESET_BIT (graph->direct_nodes, lhsvar);
1138 }
1139 else if (rhs.type == ADDRESSOF)
1140 {
1141 varinfo_t v;
1142
1143 /* x = &y */
1144 if (graph->points_to[lhsvar] == NULL)
1145 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1146 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1147
1148 if (graph->pointed_by[rhsvar] == NULL)
1149 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1150 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1151
1152 /* Implicitly, *x = y */
1153 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1154
1155 /* All related variables are no longer direct nodes. */
1156 RESET_BIT (graph->direct_nodes, rhsvar);
1157 v = get_varinfo (rhsvar);
1158 if (!v->is_full_var)
1159 {
1160 v = lookup_vi_for_tree (v->decl);
1161 do
1162 {
1163 RESET_BIT (graph->direct_nodes, v->id);
1164 v = v->next;
1165 }
1166 while (v != NULL);
1167 }
1168 bitmap_set_bit (graph->address_taken, rhsvar);
1169 }
1170 else if (lhsvar > anything_id
1171 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1172 {
1173 /* x = y */
1174 add_pred_graph_edge (graph, lhsvar, rhsvar);
1175 /* Implicitly, *x = *y */
1176 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1177 FIRST_REF_NODE + rhsvar);
1178 }
1179 else if (lhs.offset != 0 || rhs.offset != 0)
1180 {
1181 if (rhs.offset != 0)
1182 RESET_BIT (graph->direct_nodes, lhs.var);
1183 else if (lhs.offset != 0)
1184 RESET_BIT (graph->direct_nodes, rhs.var);
1185 }
1186 }
1187 }
1188
1189 /* Build the constraint graph, adding successor edges. */
1190
1191 static void
1192 build_succ_graph (void)
1193 {
1194 unsigned i, t;
1195 constraint_t c;
1196
1197 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1198 {
1199 struct constraint_expr lhs;
1200 struct constraint_expr rhs;
1201 unsigned int lhsvar;
1202 unsigned int rhsvar;
1203
1204 if (!c)
1205 continue;
1206
1207 lhs = c->lhs;
1208 rhs = c->rhs;
1209 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1210 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1211
1212 if (lhs.type == DEREF)
1213 {
1214 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1215 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1216 }
1217 else if (rhs.type == DEREF)
1218 {
1219 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1220 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1221 }
1222 else if (rhs.type == ADDRESSOF)
1223 {
1224 /* x = &y */
1225 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1226 == get_varinfo_fc (rhs.var)->id);
1227 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1228 }
1229 else if (lhsvar > anything_id
1230 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1231 {
1232 add_graph_edge (graph, lhsvar, rhsvar);
1233 }
1234 }
1235
1236 /* Add edges from STOREDANYTHING to all non-direct nodes. */
1237 t = find (storedanything_id);
1238 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1239 {
1240 if (!TEST_BIT (graph->direct_nodes, i))
1241 add_graph_edge (graph, find (i), t);
1242 }
1243 }
1244
1245
1246 /* Changed variables on the last iteration. */
1247 static unsigned int changed_count;
1248 static sbitmap changed;
1249
1250 DEF_VEC_I(unsigned);
1251 DEF_VEC_ALLOC_I(unsigned,heap);
1252
1253
1254 /* Strongly Connected Component visitation info. */
1255
1256 struct scc_info
1257 {
1258 sbitmap visited;
1259 sbitmap deleted;
1260 unsigned int *dfs;
1261 unsigned int *node_mapping;
1262 int current_index;
1263 VEC(unsigned,heap) *scc_stack;
1264 };
1265
1266
1267 /* Recursive routine to find strongly connected components in GRAPH.
1268 SI is the SCC info to store the information in, and N is the id of current
1269 graph node we are processing.
1270
1271 This is Tarjan's strongly connected component finding algorithm, as
1272 modified by Nuutila to keep only non-root nodes on the stack.
1273 The algorithm can be found in "On finding the strongly connected
1274 connected components in a directed graph" by Esko Nuutila and Eljas
1275 Soisalon-Soininen, in Information Processing Letters volume 49,
1276 number 1, pages 9-14. */
1277
1278 static void
1279 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1280 {
1281 unsigned int i;
1282 bitmap_iterator bi;
1283 unsigned int my_dfs;
1284
1285 SET_BIT (si->visited, n);
1286 si->dfs[n] = si->current_index ++;
1287 my_dfs = si->dfs[n];
1288
1289 /* Visit all the successors. */
1290 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1291 {
1292 unsigned int w;
1293
1294 if (i > LAST_REF_NODE)
1295 break;
1296
1297 w = find (i);
1298 if (TEST_BIT (si->deleted, w))
1299 continue;
1300
1301 if (!TEST_BIT (si->visited, w))
1302 scc_visit (graph, si, w);
1303 {
1304 unsigned int t = find (w);
1305 unsigned int nnode = find (n);
1306 gcc_assert (nnode == n);
1307
1308 if (si->dfs[t] < si->dfs[nnode])
1309 si->dfs[n] = si->dfs[t];
1310 }
1311 }
1312
1313 /* See if any components have been identified. */
1314 if (si->dfs[n] == my_dfs)
1315 {
1316 if (VEC_length (unsigned, si->scc_stack) > 0
1317 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1318 {
1319 bitmap scc = BITMAP_ALLOC (NULL);
1320 bool have_ref_node = n >= FIRST_REF_NODE;
1321 unsigned int lowest_node;
1322 bitmap_iterator bi;
1323
1324 bitmap_set_bit (scc, n);
1325
1326 while (VEC_length (unsigned, si->scc_stack) != 0
1327 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1328 {
1329 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1330
1331 bitmap_set_bit (scc, w);
1332 if (w >= FIRST_REF_NODE)
1333 have_ref_node = true;
1334 }
1335
1336 lowest_node = bitmap_first_set_bit (scc);
1337 gcc_assert (lowest_node < FIRST_REF_NODE);
1338
1339 /* Collapse the SCC nodes into a single node, and mark the
1340 indirect cycles. */
1341 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1342 {
1343 if (i < FIRST_REF_NODE)
1344 {
1345 if (unite (lowest_node, i))
1346 unify_nodes (graph, lowest_node, i, false);
1347 }
1348 else
1349 {
1350 unite (lowest_node, i);
1351 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1352 }
1353 }
1354 }
1355 SET_BIT (si->deleted, n);
1356 }
1357 else
1358 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1359 }
1360
1361 /* Unify node FROM into node TO, updating the changed count if
1362 necessary when UPDATE_CHANGED is true. */
1363
1364 static void
1365 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1366 bool update_changed)
1367 {
1368
1369 gcc_assert (to != from && find (to) == to);
1370 if (dump_file && (dump_flags & TDF_DETAILS))
1371 fprintf (dump_file, "Unifying %s to %s\n",
1372 get_varinfo (from)->name,
1373 get_varinfo (to)->name);
1374
1375 if (update_changed)
1376 stats.unified_vars_dynamic++;
1377 else
1378 stats.unified_vars_static++;
1379
1380 merge_graph_nodes (graph, to, from);
1381 merge_node_constraints (graph, to, from);
1382
1383 if (get_varinfo (from)->no_tbaa_pruning)
1384 get_varinfo (to)->no_tbaa_pruning = true;
1385
1386 /* Mark TO as changed if FROM was changed. If TO was already marked
1387 as changed, decrease the changed count. */
1388
1389 if (update_changed && TEST_BIT (changed, from))
1390 {
1391 RESET_BIT (changed, from);
1392 if (!TEST_BIT (changed, to))
1393 SET_BIT (changed, to);
1394 else
1395 {
1396 gcc_assert (changed_count > 0);
1397 changed_count--;
1398 }
1399 }
1400 if (get_varinfo (from)->solution)
1401 {
1402 /* If the solution changes because of the merging, we need to mark
1403 the variable as changed. */
1404 if (bitmap_ior_into (get_varinfo (to)->solution,
1405 get_varinfo (from)->solution))
1406 {
1407 if (update_changed && !TEST_BIT (changed, to))
1408 {
1409 SET_BIT (changed, to);
1410 changed_count++;
1411 }
1412 }
1413
1414 BITMAP_FREE (get_varinfo (from)->solution);
1415 BITMAP_FREE (get_varinfo (from)->oldsolution);
1416
1417 if (stats.iterations > 0)
1418 {
1419 BITMAP_FREE (get_varinfo (to)->oldsolution);
1420 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1421 }
1422 }
1423 if (valid_graph_edge (graph, to, to))
1424 {
1425 if (graph->succs[to])
1426 bitmap_clear_bit (graph->succs[to], to);
1427 }
1428 }
1429
1430 /* Information needed to compute the topological ordering of a graph. */
1431
1432 struct topo_info
1433 {
1434 /* sbitmap of visited nodes. */
1435 sbitmap visited;
1436 /* Array that stores the topological order of the graph, *in
1437 reverse*. */
1438 VEC(unsigned,heap) *topo_order;
1439 };
1440
1441
1442 /* Initialize and return a topological info structure. */
1443
1444 static struct topo_info *
1445 init_topo_info (void)
1446 {
1447 size_t size = graph->size;
1448 struct topo_info *ti = XNEW (struct topo_info);
1449 ti->visited = sbitmap_alloc (size);
1450 sbitmap_zero (ti->visited);
1451 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1452 return ti;
1453 }
1454
1455
1456 /* Free the topological sort info pointed to by TI. */
1457
1458 static void
1459 free_topo_info (struct topo_info *ti)
1460 {
1461 sbitmap_free (ti->visited);
1462 VEC_free (unsigned, heap, ti->topo_order);
1463 free (ti);
1464 }
1465
1466 /* Visit the graph in topological order, and store the order in the
1467 topo_info structure. */
1468
1469 static void
1470 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1471 unsigned int n)
1472 {
1473 bitmap_iterator bi;
1474 unsigned int j;
1475
1476 SET_BIT (ti->visited, n);
1477
1478 if (graph->succs[n])
1479 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1480 {
1481 if (!TEST_BIT (ti->visited, j))
1482 topo_visit (graph, ti, j);
1483 }
1484
1485 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1486 }
1487
1488 /* Return true if variable N + OFFSET is a legal field of N. */
1489
1490 static bool
1491 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1492 {
1493 varinfo_t ninfo = get_varinfo (n);
1494
1495 /* For things we've globbed to single variables, any offset into the
1496 variable acts like the entire variable, so that it becomes offset
1497 0. */
1498 if (ninfo->is_special_var
1499 || ninfo->is_artificial_var
1500 || ninfo->is_unknown_size_var
1501 || ninfo->is_full_var)
1502 {
1503 *offset = 0;
1504 return true;
1505 }
1506 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1507 }
1508
1509 /* Process a constraint C that represents x = *y, using DELTA as the
1510 starting solution. */
1511
1512 static void
1513 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1514 bitmap delta)
1515 {
1516 unsigned int lhs = c->lhs.var;
1517 bool flag = false;
1518 bitmap sol = get_varinfo (lhs)->solution;
1519 unsigned int j;
1520 bitmap_iterator bi;
1521
1522 /* For x = *ESCAPED and x = *CALLUSED we want to compute the
1523 reachability set of the rhs var. As a pointer to a sub-field
1524 of a variable can also reach all other fields of the variable
1525 we simply have to expand the solution to contain all sub-fields
1526 if one sub-field is contained. */
1527 if (c->rhs.var == find (escaped_id)
1528 || c->rhs.var == find (callused_id))
1529 {
1530 bitmap vars = NULL;
1531 /* In a first pass record all variables we need to add all
1532 sub-fields off. This avoids quadratic behavior. */
1533 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1534 {
1535 varinfo_t v = get_varinfo (j);
1536 if (v->is_full_var)
1537 continue;
1538
1539 v = lookup_vi_for_tree (v->decl);
1540 if (v->next != NULL)
1541 {
1542 if (vars == NULL)
1543 vars = BITMAP_ALLOC (NULL);
1544 bitmap_set_bit (vars, v->id);
1545 }
1546 }
1547 /* In the second pass now do the addition to the solution and
1548 to speed up solving add it to the delta as well. */
1549 if (vars != NULL)
1550 {
1551 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
1552 {
1553 varinfo_t v = get_varinfo (j);
1554 for (; v != NULL; v = v->next)
1555 {
1556 if (bitmap_set_bit (sol, v->id))
1557 {
1558 flag = true;
1559 bitmap_set_bit (delta, v->id);
1560 }
1561 }
1562 }
1563 BITMAP_FREE (vars);
1564 }
1565 }
1566
1567 if (bitmap_bit_p (delta, anything_id))
1568 {
1569 flag |= bitmap_set_bit (sol, anything_id);
1570 goto done;
1571 }
1572
1573 /* For each variable j in delta (Sol(y)), add
1574 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1575 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1576 {
1577 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1578 if (type_safe (j, &roffset))
1579 {
1580 varinfo_t v;
1581 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1582 unsigned int t;
1583
1584 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1585 /* If the access is outside of the variable we can ignore it. */
1586 if (!v)
1587 continue;
1588 t = find (v->id);
1589
1590 /* Adding edges from the special vars is pointless.
1591 They don't have sets that can change. */
1592 if (get_varinfo (t)->is_special_var)
1593 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1594 /* Merging the solution from ESCAPED needlessly increases
1595 the set. Use ESCAPED as representative instead.
1596 Same for CALLUSED. */
1597 else if (get_varinfo (t)->id == find (escaped_id))
1598 flag |= bitmap_set_bit (sol, escaped_id);
1599 else if (get_varinfo (t)->id == find (callused_id))
1600 flag |= bitmap_set_bit (sol, callused_id);
1601 else if (add_graph_edge (graph, lhs, t))
1602 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1603 }
1604 }
1605
1606 done:
1607 /* If the LHS solution changed, mark the var as changed. */
1608 if (flag)
1609 {
1610 get_varinfo (lhs)->solution = sol;
1611 if (!TEST_BIT (changed, lhs))
1612 {
1613 SET_BIT (changed, lhs);
1614 changed_count++;
1615 }
1616 }
1617 }
1618
1619 /* Process a constraint C that represents *x = y. */
1620
1621 static void
1622 do_ds_constraint (constraint_t c, bitmap delta)
1623 {
1624 unsigned int rhs = c->rhs.var;
1625 bitmap sol = get_varinfo (rhs)->solution;
1626 unsigned int j;
1627 bitmap_iterator bi;
1628
1629 /* Our IL does not allow this. */
1630 gcc_assert (c->rhs.offset == 0);
1631
1632 /* If the solution of y contains ANYTHING simply use the ANYTHING
1633 solution. This avoids needlessly increasing the points-to sets. */
1634 if (bitmap_bit_p (sol, anything_id))
1635 sol = get_varinfo (find (anything_id))->solution;
1636
1637 /* If the solution for x contains ANYTHING we have to merge the
1638 solution of y into all pointer variables which we do via
1639 STOREDANYTHING. */
1640 if (bitmap_bit_p (delta, anything_id))
1641 {
1642 unsigned t = find (storedanything_id);
1643 if (add_graph_edge (graph, t, rhs))
1644 {
1645 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1646 {
1647 if (!TEST_BIT (changed, t))
1648 {
1649 SET_BIT (changed, t);
1650 changed_count++;
1651 }
1652 }
1653 }
1654 return;
1655 }
1656
1657 /* For each member j of delta (Sol(x)), add an edge from y to j and
1658 union Sol(y) into Sol(j) */
1659 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1660 {
1661 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1662 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1663 {
1664 varinfo_t v;
1665 unsigned int t;
1666 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1667
1668 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1669 /* If the access is outside of the variable we can ignore it. */
1670 if (!v)
1671 continue;
1672
1673 if (v->may_have_pointers)
1674 {
1675 t = find (v->id);
1676 if (add_graph_edge (graph, t, rhs))
1677 {
1678 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1679 {
1680 if (t == rhs)
1681 sol = get_varinfo (rhs)->solution;
1682 if (!TEST_BIT (changed, t))
1683 {
1684 SET_BIT (changed, t);
1685 changed_count++;
1686 }
1687 }
1688 }
1689 }
1690 }
1691 }
1692 }
1693
1694 /* Handle a non-simple (simple meaning requires no iteration),
1695 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1696
1697 static void
1698 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1699 {
1700 if (c->lhs.type == DEREF)
1701 {
1702 if (c->rhs.type == ADDRESSOF)
1703 {
1704 gcc_unreachable();
1705 }
1706 else
1707 {
1708 /* *x = y */
1709 do_ds_constraint (c, delta);
1710 }
1711 }
1712 else if (c->rhs.type == DEREF)
1713 {
1714 /* x = *y */
1715 if (!(get_varinfo (c->lhs.var)->is_special_var))
1716 do_sd_constraint (graph, c, delta);
1717 }
1718 else
1719 {
1720 bitmap tmp;
1721 bitmap solution;
1722 bool flag = false;
1723
1724 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1725 solution = get_varinfo (c->rhs.var)->solution;
1726 tmp = get_varinfo (c->lhs.var)->solution;
1727
1728 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1729
1730 if (flag)
1731 {
1732 get_varinfo (c->lhs.var)->solution = tmp;
1733 if (!TEST_BIT (changed, c->lhs.var))
1734 {
1735 SET_BIT (changed, c->lhs.var);
1736 changed_count++;
1737 }
1738 }
1739 }
1740 }
1741
1742 /* Initialize and return a new SCC info structure. */
1743
1744 static struct scc_info *
1745 init_scc_info (size_t size)
1746 {
1747 struct scc_info *si = XNEW (struct scc_info);
1748 size_t i;
1749
1750 si->current_index = 0;
1751 si->visited = sbitmap_alloc (size);
1752 sbitmap_zero (si->visited);
1753 si->deleted = sbitmap_alloc (size);
1754 sbitmap_zero (si->deleted);
1755 si->node_mapping = XNEWVEC (unsigned int, size);
1756 si->dfs = XCNEWVEC (unsigned int, size);
1757
1758 for (i = 0; i < size; i++)
1759 si->node_mapping[i] = i;
1760
1761 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1762 return si;
1763 }
1764
1765 /* Free an SCC info structure pointed to by SI */
1766
1767 static void
1768 free_scc_info (struct scc_info *si)
1769 {
1770 sbitmap_free (si->visited);
1771 sbitmap_free (si->deleted);
1772 free (si->node_mapping);
1773 free (si->dfs);
1774 VEC_free (unsigned, heap, si->scc_stack);
1775 free (si);
1776 }
1777
1778
1779 /* Find indirect cycles in GRAPH that occur, using strongly connected
1780 components, and note them in the indirect cycles map.
1781
1782 This technique comes from Ben Hardekopf and Calvin Lin,
1783 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1784 Lines of Code", submitted to PLDI 2007. */
1785
1786 static void
1787 find_indirect_cycles (constraint_graph_t graph)
1788 {
1789 unsigned int i;
1790 unsigned int size = graph->size;
1791 struct scc_info *si = init_scc_info (size);
1792
1793 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1794 if (!TEST_BIT (si->visited, i) && find (i) == i)
1795 scc_visit (graph, si, i);
1796
1797 free_scc_info (si);
1798 }
1799
1800 /* Compute a topological ordering for GRAPH, and store the result in the
1801 topo_info structure TI. */
1802
1803 static void
1804 compute_topo_order (constraint_graph_t graph,
1805 struct topo_info *ti)
1806 {
1807 unsigned int i;
1808 unsigned int size = graph->size;
1809
1810 for (i = 0; i != size; ++i)
1811 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1812 topo_visit (graph, ti, i);
1813 }
1814
1815 /* Structure used to for hash value numbering of pointer equivalence
1816 classes. */
1817
1818 typedef struct equiv_class_label
1819 {
1820 hashval_t hashcode;
1821 unsigned int equivalence_class;
1822 bitmap labels;
1823 } *equiv_class_label_t;
1824 typedef const struct equiv_class_label *const_equiv_class_label_t;
1825
1826 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1827 classes. */
1828 static htab_t pointer_equiv_class_table;
1829
1830 /* A hashtable for mapping a bitmap of labels->location equivalence
1831 classes. */
1832 static htab_t location_equiv_class_table;
1833
1834 /* Hash function for a equiv_class_label_t */
1835
1836 static hashval_t
1837 equiv_class_label_hash (const void *p)
1838 {
1839 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1840 return ecl->hashcode;
1841 }
1842
1843 /* Equality function for two equiv_class_label_t's. */
1844
1845 static int
1846 equiv_class_label_eq (const void *p1, const void *p2)
1847 {
1848 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1849 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1850 return bitmap_equal_p (eql1->labels, eql2->labels);
1851 }
1852
1853 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1854 contains. */
1855
1856 static unsigned int
1857 equiv_class_lookup (htab_t table, bitmap labels)
1858 {
1859 void **slot;
1860 struct equiv_class_label ecl;
1861
1862 ecl.labels = labels;
1863 ecl.hashcode = bitmap_hash (labels);
1864
1865 slot = htab_find_slot_with_hash (table, &ecl,
1866 ecl.hashcode, NO_INSERT);
1867 if (!slot)
1868 return 0;
1869 else
1870 return ((equiv_class_label_t) *slot)->equivalence_class;
1871 }
1872
1873
1874 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1875 to TABLE. */
1876
1877 static void
1878 equiv_class_add (htab_t table, unsigned int equivalence_class,
1879 bitmap labels)
1880 {
1881 void **slot;
1882 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1883
1884 ecl->labels = labels;
1885 ecl->equivalence_class = equivalence_class;
1886 ecl->hashcode = bitmap_hash (labels);
1887
1888 slot = htab_find_slot_with_hash (table, ecl,
1889 ecl->hashcode, INSERT);
1890 gcc_assert (!*slot);
1891 *slot = (void *) ecl;
1892 }
1893
1894 /* Perform offline variable substitution.
1895
1896 This is a worst case quadratic time way of identifying variables
1897 that must have equivalent points-to sets, including those caused by
1898 static cycles, and single entry subgraphs, in the constraint graph.
1899
1900 The technique is described in "Exploiting Pointer and Location
1901 Equivalence to Optimize Pointer Analysis. In the 14th International
1902 Static Analysis Symposium (SAS), August 2007." It is known as the
1903 "HU" algorithm, and is equivalent to value numbering the collapsed
1904 constraint graph including evaluating unions.
1905
1906 The general method of finding equivalence classes is as follows:
1907 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1908 Initialize all non-REF nodes to be direct nodes.
1909 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1910 variable}
1911 For each constraint containing the dereference, we also do the same
1912 thing.
1913
1914 We then compute SCC's in the graph and unify nodes in the same SCC,
1915 including pts sets.
1916
1917 For each non-collapsed node x:
1918 Visit all unvisited explicit incoming edges.
1919 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1920 where y->x.
1921 Lookup the equivalence class for pts(x).
1922 If we found one, equivalence_class(x) = found class.
1923 Otherwise, equivalence_class(x) = new class, and new_class is
1924 added to the lookup table.
1925
1926 All direct nodes with the same equivalence class can be replaced
1927 with a single representative node.
1928 All unlabeled nodes (label == 0) are not pointers and all edges
1929 involving them can be eliminated.
1930 We perform these optimizations during rewrite_constraints
1931
1932 In addition to pointer equivalence class finding, we also perform
1933 location equivalence class finding. This is the set of variables
1934 that always appear together in points-to sets. We use this to
1935 compress the size of the points-to sets. */
1936
1937 /* Current maximum pointer equivalence class id. */
1938 static int pointer_equiv_class;
1939
1940 /* Current maximum location equivalence class id. */
1941 static int location_equiv_class;
1942
1943 /* Recursive routine to find strongly connected components in GRAPH,
1944 and label it's nodes with DFS numbers. */
1945
1946 static void
1947 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1948 {
1949 unsigned int i;
1950 bitmap_iterator bi;
1951 unsigned int my_dfs;
1952
1953 gcc_assert (si->node_mapping[n] == n);
1954 SET_BIT (si->visited, n);
1955 si->dfs[n] = si->current_index ++;
1956 my_dfs = si->dfs[n];
1957
1958 /* Visit all the successors. */
1959 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1960 {
1961 unsigned int w = si->node_mapping[i];
1962
1963 if (TEST_BIT (si->deleted, w))
1964 continue;
1965
1966 if (!TEST_BIT (si->visited, w))
1967 condense_visit (graph, si, w);
1968 {
1969 unsigned int t = si->node_mapping[w];
1970 unsigned int nnode = si->node_mapping[n];
1971 gcc_assert (nnode == n);
1972
1973 if (si->dfs[t] < si->dfs[nnode])
1974 si->dfs[n] = si->dfs[t];
1975 }
1976 }
1977
1978 /* Visit all the implicit predecessors. */
1979 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1980 {
1981 unsigned int w = si->node_mapping[i];
1982
1983 if (TEST_BIT (si->deleted, w))
1984 continue;
1985
1986 if (!TEST_BIT (si->visited, w))
1987 condense_visit (graph, si, w);
1988 {
1989 unsigned int t = si->node_mapping[w];
1990 unsigned int nnode = si->node_mapping[n];
1991 gcc_assert (nnode == n);
1992
1993 if (si->dfs[t] < si->dfs[nnode])
1994 si->dfs[n] = si->dfs[t];
1995 }
1996 }
1997
1998 /* See if any components have been identified. */
1999 if (si->dfs[n] == my_dfs)
2000 {
2001 while (VEC_length (unsigned, si->scc_stack) != 0
2002 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
2003 {
2004 unsigned int w = VEC_pop (unsigned, si->scc_stack);
2005 si->node_mapping[w] = n;
2006
2007 if (!TEST_BIT (graph->direct_nodes, w))
2008 RESET_BIT (graph->direct_nodes, n);
2009
2010 /* Unify our nodes. */
2011 if (graph->preds[w])
2012 {
2013 if (!graph->preds[n])
2014 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2015 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2016 }
2017 if (graph->implicit_preds[w])
2018 {
2019 if (!graph->implicit_preds[n])
2020 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2021 bitmap_ior_into (graph->implicit_preds[n],
2022 graph->implicit_preds[w]);
2023 }
2024 if (graph->points_to[w])
2025 {
2026 if (!graph->points_to[n])
2027 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2028 bitmap_ior_into (graph->points_to[n],
2029 graph->points_to[w]);
2030 }
2031 }
2032 SET_BIT (si->deleted, n);
2033 }
2034 else
2035 VEC_safe_push (unsigned, heap, si->scc_stack, n);
2036 }
2037
2038 /* Label pointer equivalences. */
2039
2040 static void
2041 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2042 {
2043 unsigned int i;
2044 bitmap_iterator bi;
2045 SET_BIT (si->visited, n);
2046
2047 if (!graph->points_to[n])
2048 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2049
2050 /* Label and union our incoming edges's points to sets. */
2051 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2052 {
2053 unsigned int w = si->node_mapping[i];
2054 if (!TEST_BIT (si->visited, w))
2055 label_visit (graph, si, w);
2056
2057 /* Skip unused edges */
2058 if (w == n || graph->pointer_label[w] == 0)
2059 continue;
2060
2061 if (graph->points_to[w])
2062 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2063 }
2064 /* Indirect nodes get fresh variables. */
2065 if (!TEST_BIT (graph->direct_nodes, n))
2066 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2067
2068 if (!bitmap_empty_p (graph->points_to[n]))
2069 {
2070 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2071 graph->points_to[n]);
2072 if (!label)
2073 {
2074 label = pointer_equiv_class++;
2075 equiv_class_add (pointer_equiv_class_table,
2076 label, graph->points_to[n]);
2077 }
2078 graph->pointer_label[n] = label;
2079 }
2080 }
2081
2082 /* Perform offline variable substitution, discovering equivalence
2083 classes, and eliminating non-pointer variables. */
2084
2085 static struct scc_info *
2086 perform_var_substitution (constraint_graph_t graph)
2087 {
2088 unsigned int i;
2089 unsigned int size = graph->size;
2090 struct scc_info *si = init_scc_info (size);
2091
2092 bitmap_obstack_initialize (&iteration_obstack);
2093 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2094 equiv_class_label_eq, free);
2095 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2096 equiv_class_label_eq, free);
2097 pointer_equiv_class = 1;
2098 location_equiv_class = 1;
2099
2100 /* Condense the nodes, which means to find SCC's, count incoming
2101 predecessors, and unite nodes in SCC's. */
2102 for (i = 0; i < FIRST_REF_NODE; i++)
2103 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2104 condense_visit (graph, si, si->node_mapping[i]);
2105
2106 sbitmap_zero (si->visited);
2107 /* Actually the label the nodes for pointer equivalences */
2108 for (i = 0; i < FIRST_REF_NODE; i++)
2109 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2110 label_visit (graph, si, si->node_mapping[i]);
2111
2112 /* Calculate location equivalence labels. */
2113 for (i = 0; i < FIRST_REF_NODE; i++)
2114 {
2115 bitmap pointed_by;
2116 bitmap_iterator bi;
2117 unsigned int j;
2118 unsigned int label;
2119
2120 if (!graph->pointed_by[i])
2121 continue;
2122 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2123
2124 /* Translate the pointed-by mapping for pointer equivalence
2125 labels. */
2126 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2127 {
2128 bitmap_set_bit (pointed_by,
2129 graph->pointer_label[si->node_mapping[j]]);
2130 }
2131 /* The original pointed_by is now dead. */
2132 BITMAP_FREE (graph->pointed_by[i]);
2133
2134 /* Look up the location equivalence label if one exists, or make
2135 one otherwise. */
2136 label = equiv_class_lookup (location_equiv_class_table,
2137 pointed_by);
2138 if (label == 0)
2139 {
2140 label = location_equiv_class++;
2141 equiv_class_add (location_equiv_class_table,
2142 label, pointed_by);
2143 }
2144 else
2145 {
2146 if (dump_file && (dump_flags & TDF_DETAILS))
2147 fprintf (dump_file, "Found location equivalence for node %s\n",
2148 get_varinfo (i)->name);
2149 BITMAP_FREE (pointed_by);
2150 }
2151 graph->loc_label[i] = label;
2152
2153 }
2154
2155 if (dump_file && (dump_flags & TDF_DETAILS))
2156 for (i = 0; i < FIRST_REF_NODE; i++)
2157 {
2158 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2159 fprintf (dump_file,
2160 "Equivalence classes for %s node id %d:%s are pointer: %d"
2161 ", location:%d\n",
2162 direct_node ? "Direct node" : "Indirect node", i,
2163 get_varinfo (i)->name,
2164 graph->pointer_label[si->node_mapping[i]],
2165 graph->loc_label[si->node_mapping[i]]);
2166 }
2167
2168 /* Quickly eliminate our non-pointer variables. */
2169
2170 for (i = 0; i < FIRST_REF_NODE; i++)
2171 {
2172 unsigned int node = si->node_mapping[i];
2173
2174 if (graph->pointer_label[node] == 0)
2175 {
2176 if (dump_file && (dump_flags & TDF_DETAILS))
2177 fprintf (dump_file,
2178 "%s is a non-pointer variable, eliminating edges.\n",
2179 get_varinfo (node)->name);
2180 stats.nonpointer_vars++;
2181 clear_edges_for_node (graph, node);
2182 }
2183 }
2184
2185 return si;
2186 }
2187
2188 /* Free information that was only necessary for variable
2189 substitution. */
2190
2191 static void
2192 free_var_substitution_info (struct scc_info *si)
2193 {
2194 free_scc_info (si);
2195 free (graph->pointer_label);
2196 free (graph->loc_label);
2197 free (graph->pointed_by);
2198 free (graph->points_to);
2199 free (graph->eq_rep);
2200 sbitmap_free (graph->direct_nodes);
2201 htab_delete (pointer_equiv_class_table);
2202 htab_delete (location_equiv_class_table);
2203 bitmap_obstack_release (&iteration_obstack);
2204 }
2205
2206 /* Return an existing node that is equivalent to NODE, which has
2207 equivalence class LABEL, if one exists. Return NODE otherwise. */
2208
2209 static unsigned int
2210 find_equivalent_node (constraint_graph_t graph,
2211 unsigned int node, unsigned int label)
2212 {
2213 /* If the address version of this variable is unused, we can
2214 substitute it for anything else with the same label.
2215 Otherwise, we know the pointers are equivalent, but not the
2216 locations, and we can unite them later. */
2217
2218 if (!bitmap_bit_p (graph->address_taken, node))
2219 {
2220 gcc_assert (label < graph->size);
2221
2222 if (graph->eq_rep[label] != -1)
2223 {
2224 /* Unify the two variables since we know they are equivalent. */
2225 if (unite (graph->eq_rep[label], node))
2226 unify_nodes (graph, graph->eq_rep[label], node, false);
2227 return graph->eq_rep[label];
2228 }
2229 else
2230 {
2231 graph->eq_rep[label] = node;
2232 graph->pe_rep[label] = node;
2233 }
2234 }
2235 else
2236 {
2237 gcc_assert (label < graph->size);
2238 graph->pe[node] = label;
2239 if (graph->pe_rep[label] == -1)
2240 graph->pe_rep[label] = node;
2241 }
2242
2243 return node;
2244 }
2245
2246 /* Unite pointer equivalent but not location equivalent nodes in
2247 GRAPH. This may only be performed once variable substitution is
2248 finished. */
2249
2250 static void
2251 unite_pointer_equivalences (constraint_graph_t graph)
2252 {
2253 unsigned int i;
2254
2255 /* Go through the pointer equivalences and unite them to their
2256 representative, if they aren't already. */
2257 for (i = 0; i < FIRST_REF_NODE; i++)
2258 {
2259 unsigned int label = graph->pe[i];
2260 if (label)
2261 {
2262 int label_rep = graph->pe_rep[label];
2263
2264 if (label_rep == -1)
2265 continue;
2266
2267 label_rep = find (label_rep);
2268 if (label_rep >= 0 && unite (label_rep, find (i)))
2269 unify_nodes (graph, label_rep, i, false);
2270 }
2271 }
2272 }
2273
2274 /* Move complex constraints to the GRAPH nodes they belong to. */
2275
2276 static void
2277 move_complex_constraints (constraint_graph_t graph)
2278 {
2279 int i;
2280 constraint_t c;
2281
2282 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2283 {
2284 if (c)
2285 {
2286 struct constraint_expr lhs = c->lhs;
2287 struct constraint_expr rhs = c->rhs;
2288
2289 if (lhs.type == DEREF)
2290 {
2291 insert_into_complex (graph, lhs.var, c);
2292 }
2293 else if (rhs.type == DEREF)
2294 {
2295 if (!(get_varinfo (lhs.var)->is_special_var))
2296 insert_into_complex (graph, rhs.var, c);
2297 }
2298 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2299 && (lhs.offset != 0 || rhs.offset != 0))
2300 {
2301 insert_into_complex (graph, rhs.var, c);
2302 }
2303 }
2304 }
2305 }
2306
2307
2308 /* Optimize and rewrite complex constraints while performing
2309 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2310 result of perform_variable_substitution. */
2311
2312 static void
2313 rewrite_constraints (constraint_graph_t graph,
2314 struct scc_info *si)
2315 {
2316 int i;
2317 unsigned int j;
2318 constraint_t c;
2319
2320 for (j = 0; j < graph->size; j++)
2321 gcc_assert (find (j) == j);
2322
2323 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2324 {
2325 struct constraint_expr lhs = c->lhs;
2326 struct constraint_expr rhs = c->rhs;
2327 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2328 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2329 unsigned int lhsnode, rhsnode;
2330 unsigned int lhslabel, rhslabel;
2331
2332 lhsnode = si->node_mapping[lhsvar];
2333 rhsnode = si->node_mapping[rhsvar];
2334 lhslabel = graph->pointer_label[lhsnode];
2335 rhslabel = graph->pointer_label[rhsnode];
2336
2337 /* See if it is really a non-pointer variable, and if so, ignore
2338 the constraint. */
2339 if (lhslabel == 0)
2340 {
2341 if (dump_file && (dump_flags & TDF_DETAILS))
2342 {
2343
2344 fprintf (dump_file, "%s is a non-pointer variable,"
2345 "ignoring constraint:",
2346 get_varinfo (lhs.var)->name);
2347 dump_constraint (dump_file, c);
2348 }
2349 VEC_replace (constraint_t, constraints, i, NULL);
2350 continue;
2351 }
2352
2353 if (rhslabel == 0)
2354 {
2355 if (dump_file && (dump_flags & TDF_DETAILS))
2356 {
2357
2358 fprintf (dump_file, "%s is a non-pointer variable,"
2359 "ignoring constraint:",
2360 get_varinfo (rhs.var)->name);
2361 dump_constraint (dump_file, c);
2362 }
2363 VEC_replace (constraint_t, constraints, i, NULL);
2364 continue;
2365 }
2366
2367 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2368 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2369 c->lhs.var = lhsvar;
2370 c->rhs.var = rhsvar;
2371
2372 }
2373 }
2374
2375 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2376 part of an SCC, false otherwise. */
2377
2378 static bool
2379 eliminate_indirect_cycles (unsigned int node)
2380 {
2381 if (graph->indirect_cycles[node] != -1
2382 && !bitmap_empty_p (get_varinfo (node)->solution))
2383 {
2384 unsigned int i;
2385 VEC(unsigned,heap) *queue = NULL;
2386 int queuepos;
2387 unsigned int to = find (graph->indirect_cycles[node]);
2388 bitmap_iterator bi;
2389
2390 /* We can't touch the solution set and call unify_nodes
2391 at the same time, because unify_nodes is going to do
2392 bitmap unions into it. */
2393
2394 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2395 {
2396 if (find (i) == i && i != to)
2397 {
2398 if (unite (to, i))
2399 VEC_safe_push (unsigned, heap, queue, i);
2400 }
2401 }
2402
2403 for (queuepos = 0;
2404 VEC_iterate (unsigned, queue, queuepos, i);
2405 queuepos++)
2406 {
2407 unify_nodes (graph, to, i, true);
2408 }
2409 VEC_free (unsigned, heap, queue);
2410 return true;
2411 }
2412 return false;
2413 }
2414
2415 /* Solve the constraint graph GRAPH using our worklist solver.
2416 This is based on the PW* family of solvers from the "Efficient Field
2417 Sensitive Pointer Analysis for C" paper.
2418 It works by iterating over all the graph nodes, processing the complex
2419 constraints and propagating the copy constraints, until everything stops
2420 changed. This corresponds to steps 6-8 in the solving list given above. */
2421
2422 static void
2423 solve_graph (constraint_graph_t graph)
2424 {
2425 unsigned int size = graph->size;
2426 unsigned int i;
2427 bitmap pts;
2428
2429 changed_count = 0;
2430 changed = sbitmap_alloc (size);
2431 sbitmap_zero (changed);
2432
2433 /* Mark all initial non-collapsed nodes as changed. */
2434 for (i = 0; i < size; i++)
2435 {
2436 varinfo_t ivi = get_varinfo (i);
2437 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2438 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2439 || VEC_length (constraint_t, graph->complex[i]) > 0))
2440 {
2441 SET_BIT (changed, i);
2442 changed_count++;
2443 }
2444 }
2445
2446 /* Allocate a bitmap to be used to store the changed bits. */
2447 pts = BITMAP_ALLOC (&pta_obstack);
2448
2449 while (changed_count > 0)
2450 {
2451 unsigned int i;
2452 struct topo_info *ti = init_topo_info ();
2453 stats.iterations++;
2454
2455 bitmap_obstack_initialize (&iteration_obstack);
2456
2457 compute_topo_order (graph, ti);
2458
2459 while (VEC_length (unsigned, ti->topo_order) != 0)
2460 {
2461
2462 i = VEC_pop (unsigned, ti->topo_order);
2463
2464 /* If this variable is not a representative, skip it. */
2465 if (find (i) != i)
2466 continue;
2467
2468 /* In certain indirect cycle cases, we may merge this
2469 variable to another. */
2470 if (eliminate_indirect_cycles (i) && find (i) != i)
2471 continue;
2472
2473 /* If the node has changed, we need to process the
2474 complex constraints and outgoing edges again. */
2475 if (TEST_BIT (changed, i))
2476 {
2477 unsigned int j;
2478 constraint_t c;
2479 bitmap solution;
2480 VEC(constraint_t,heap) *complex = graph->complex[i];
2481 bool solution_empty;
2482
2483 RESET_BIT (changed, i);
2484 changed_count--;
2485
2486 /* Compute the changed set of solution bits. */
2487 bitmap_and_compl (pts, get_varinfo (i)->solution,
2488 get_varinfo (i)->oldsolution);
2489
2490 if (bitmap_empty_p (pts))
2491 continue;
2492
2493 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2494
2495 solution = get_varinfo (i)->solution;
2496 solution_empty = bitmap_empty_p (solution);
2497
2498 /* Process the complex constraints */
2499 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2500 {
2501 /* XXX: This is going to unsort the constraints in
2502 some cases, which will occasionally add duplicate
2503 constraints during unification. This does not
2504 affect correctness. */
2505 c->lhs.var = find (c->lhs.var);
2506 c->rhs.var = find (c->rhs.var);
2507
2508 /* The only complex constraint that can change our
2509 solution to non-empty, given an empty solution,
2510 is a constraint where the lhs side is receiving
2511 some set from elsewhere. */
2512 if (!solution_empty || c->lhs.type != DEREF)
2513 do_complex_constraint (graph, c, pts);
2514 }
2515
2516 solution_empty = bitmap_empty_p (solution);
2517
2518 if (!solution_empty
2519 /* Do not propagate the ESCAPED/CALLUSED solutions. */
2520 && i != find (escaped_id)
2521 && i != find (callused_id))
2522 {
2523 bitmap_iterator bi;
2524
2525 /* Propagate solution to all successors. */
2526 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2527 0, j, bi)
2528 {
2529 bitmap tmp;
2530 bool flag;
2531
2532 unsigned int to = find (j);
2533 tmp = get_varinfo (to)->solution;
2534 flag = false;
2535
2536 /* Don't try to propagate to ourselves. */
2537 if (to == i)
2538 continue;
2539
2540 flag = set_union_with_increment (tmp, pts, 0);
2541
2542 if (flag)
2543 {
2544 get_varinfo (to)->solution = tmp;
2545 if (!TEST_BIT (changed, to))
2546 {
2547 SET_BIT (changed, to);
2548 changed_count++;
2549 }
2550 }
2551 }
2552 }
2553 }
2554 }
2555 free_topo_info (ti);
2556 bitmap_obstack_release (&iteration_obstack);
2557 }
2558
2559 BITMAP_FREE (pts);
2560 sbitmap_free (changed);
2561 bitmap_obstack_release (&oldpta_obstack);
2562 }
2563
2564 /* Map from trees to variable infos. */
2565 static struct pointer_map_t *vi_for_tree;
2566
2567
2568 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2569
2570 static void
2571 insert_vi_for_tree (tree t, varinfo_t vi)
2572 {
2573 void **slot = pointer_map_insert (vi_for_tree, t);
2574 gcc_assert (vi);
2575 gcc_assert (*slot == NULL);
2576 *slot = vi;
2577 }
2578
2579 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2580 exist in the map, return NULL, otherwise, return the varinfo we found. */
2581
2582 static varinfo_t
2583 lookup_vi_for_tree (tree t)
2584 {
2585 void **slot = pointer_map_contains (vi_for_tree, t);
2586 if (slot == NULL)
2587 return NULL;
2588
2589 return (varinfo_t) *slot;
2590 }
2591
2592 /* Return a printable name for DECL */
2593
2594 static const char *
2595 alias_get_name (tree decl)
2596 {
2597 const char *res = get_name (decl);
2598 char *temp;
2599 int num_printed = 0;
2600
2601 if (res != NULL)
2602 return res;
2603
2604 res = "NULL";
2605 if (!dump_file)
2606 return res;
2607
2608 if (TREE_CODE (decl) == SSA_NAME)
2609 {
2610 num_printed = asprintf (&temp, "%s_%u",
2611 alias_get_name (SSA_NAME_VAR (decl)),
2612 SSA_NAME_VERSION (decl));
2613 }
2614 else if (DECL_P (decl))
2615 {
2616 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2617 }
2618 if (num_printed > 0)
2619 {
2620 res = ggc_strdup (temp);
2621 free (temp);
2622 }
2623 return res;
2624 }
2625
2626 /* Find the variable id for tree T in the map.
2627 If T doesn't exist in the map, create an entry for it and return it. */
2628
2629 static varinfo_t
2630 get_vi_for_tree (tree t)
2631 {
2632 void **slot = pointer_map_contains (vi_for_tree, t);
2633 if (slot == NULL)
2634 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2635
2636 return (varinfo_t) *slot;
2637 }
2638
2639 /* Get a constraint expression for a new temporary variable. */
2640
2641 static struct constraint_expr
2642 get_constraint_exp_for_temp (tree t)
2643 {
2644 struct constraint_expr cexpr;
2645
2646 gcc_assert (SSA_VAR_P (t));
2647
2648 cexpr.type = SCALAR;
2649 cexpr.var = get_vi_for_tree (t)->id;
2650 cexpr.offset = 0;
2651
2652 return cexpr;
2653 }
2654
2655 /* Get a constraint expression vector from an SSA_VAR_P node.
2656 If address_p is true, the result will be taken its address of. */
2657
2658 static void
2659 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2660 {
2661 struct constraint_expr cexpr;
2662 varinfo_t vi;
2663
2664 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2665 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2666
2667 /* For parameters, get at the points-to set for the actual parm
2668 decl. */
2669 if (TREE_CODE (t) == SSA_NAME
2670 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2671 && SSA_NAME_IS_DEFAULT_DEF (t))
2672 {
2673 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2674 return;
2675 }
2676
2677 vi = get_vi_for_tree (t);
2678 cexpr.var = vi->id;
2679 cexpr.type = SCALAR;
2680 cexpr.offset = 0;
2681 /* If we determine the result is "anything", and we know this is readonly,
2682 say it points to readonly memory instead. */
2683 if (cexpr.var == anything_id && TREE_READONLY (t))
2684 {
2685 gcc_unreachable ();
2686 cexpr.type = ADDRESSOF;
2687 cexpr.var = readonly_id;
2688 }
2689
2690 /* If we are not taking the address of the constraint expr, add all
2691 sub-fiels of the variable as well. */
2692 if (!address_p)
2693 {
2694 for (; vi; vi = vi->next)
2695 {
2696 cexpr.var = vi->id;
2697 VEC_safe_push (ce_s, heap, *results, &cexpr);
2698 }
2699 return;
2700 }
2701
2702 VEC_safe_push (ce_s, heap, *results, &cexpr);
2703 }
2704
2705 /* Process constraint T, performing various simplifications and then
2706 adding it to our list of overall constraints. */
2707
2708 static void
2709 process_constraint (constraint_t t)
2710 {
2711 struct constraint_expr rhs = t->rhs;
2712 struct constraint_expr lhs = t->lhs;
2713
2714 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2715 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2716
2717 /* ANYTHING == ANYTHING is pointless. */
2718 if (lhs.var == anything_id && rhs.var == anything_id)
2719 return;
2720
2721 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2722 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2723 {
2724 rhs = t->lhs;
2725 t->lhs = t->rhs;
2726 t->rhs = rhs;
2727 process_constraint (t);
2728 }
2729 /* This can happen in our IR with things like n->a = *p */
2730 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2731 {
2732 /* Split into tmp = *rhs, *lhs = tmp */
2733 tree rhsdecl = get_varinfo (rhs.var)->decl;
2734 tree pointertype = TREE_TYPE (rhsdecl);
2735 tree pointedtotype = TREE_TYPE (pointertype);
2736 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2737 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2738
2739 process_constraint (new_constraint (tmplhs, rhs));
2740 process_constraint (new_constraint (lhs, tmplhs));
2741 }
2742 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2743 {
2744 /* Split into tmp = &rhs, *lhs = tmp */
2745 tree rhsdecl = get_varinfo (rhs.var)->decl;
2746 tree pointertype = TREE_TYPE (rhsdecl);
2747 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2748 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2749
2750 process_constraint (new_constraint (tmplhs, rhs));
2751 process_constraint (new_constraint (lhs, tmplhs));
2752 }
2753 else
2754 {
2755 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2756 VEC_safe_push (constraint_t, heap, constraints, t);
2757 }
2758 }
2759
2760 /* Return true if T is a type that could contain pointers. */
2761
2762 static bool
2763 type_could_have_pointers (tree type)
2764 {
2765 if (POINTER_TYPE_P (type))
2766 return true;
2767
2768 if (TREE_CODE (type) == ARRAY_TYPE)
2769 return type_could_have_pointers (TREE_TYPE (type));
2770
2771 return AGGREGATE_TYPE_P (type);
2772 }
2773
2774 /* Return true if T is a variable of a type that could contain
2775 pointers. */
2776
2777 static bool
2778 could_have_pointers (tree t)
2779 {
2780 return type_could_have_pointers (TREE_TYPE (t));
2781 }
2782
2783 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2784 structure. */
2785
2786 static HOST_WIDE_INT
2787 bitpos_of_field (const tree fdecl)
2788 {
2789
2790 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2791 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2792 return -1;
2793
2794 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
2795 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2796 }
2797
2798
2799 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2800 resulting constraint expressions in *RESULTS. */
2801
2802 static void
2803 get_constraint_for_ptr_offset (tree ptr, tree offset,
2804 VEC (ce_s, heap) **results)
2805 {
2806 struct constraint_expr *c;
2807 unsigned int j, n;
2808 unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
2809
2810 /* If we do not do field-sensitive PTA adding offsets to pointers
2811 does not change the points-to solution. */
2812 if (!use_field_sensitive)
2813 {
2814 get_constraint_for (ptr, results);
2815 return;
2816 }
2817
2818 /* If the offset is not a non-negative integer constant that fits
2819 in a HOST_WIDE_INT, we have to fall back to a conservative
2820 solution which includes all sub-fields of all pointed-to
2821 variables of ptr.
2822 ??? As we do not have the ability to express this, fall back
2823 to anything. */
2824 if (!host_integerp (offset, 1))
2825 {
2826 struct constraint_expr temp;
2827 temp.var = anything_id;
2828 temp.type = SCALAR;
2829 temp.offset = 0;
2830 VEC_safe_push (ce_s, heap, *results, &temp);
2831 return;
2832 }
2833
2834 /* Make sure the bit-offset also fits. */
2835 rhsunitoffset = TREE_INT_CST_LOW (offset);
2836 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2837 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2838 {
2839 struct constraint_expr temp;
2840 temp.var = anything_id;
2841 temp.type = SCALAR;
2842 temp.offset = 0;
2843 VEC_safe_push (ce_s, heap, *results, &temp);
2844 return;
2845 }
2846
2847 get_constraint_for (ptr, results);
2848 if (rhsoffset == 0)
2849 return;
2850
2851 /* As we are eventually appending to the solution do not use
2852 VEC_iterate here. */
2853 n = VEC_length (ce_s, *results);
2854 for (j = 0; j < n; j++)
2855 {
2856 varinfo_t curr;
2857 c = VEC_index (ce_s, *results, j);
2858 curr = get_varinfo (c->var);
2859
2860 if (c->type == ADDRESSOF
2861 && !curr->is_full_var)
2862 {
2863 varinfo_t temp, curr = get_varinfo (c->var);
2864
2865 /* Search the sub-field which overlaps with the
2866 pointed-to offset. As we deal with positive offsets
2867 only, we can start the search from the current variable. */
2868 temp = first_vi_for_offset (curr, curr->offset + rhsoffset);
2869
2870 /* If the result is outside of the variable we have to provide
2871 a conservative result, as the variable is still reachable
2872 from the resulting pointer (even though it technically
2873 cannot point to anything). The last sub-field is such
2874 a conservative result.
2875 ??? If we always had a sub-field for &object + 1 then
2876 we could represent this in a more precise way. */
2877 if (temp == NULL)
2878 {
2879 temp = curr;
2880 while (temp->next != NULL)
2881 temp = temp->next;
2882 continue;
2883 }
2884
2885 /* If the found variable is not exactly at the pointed to
2886 result, we have to include the next variable in the
2887 solution as well. Otherwise two increments by offset / 2
2888 do not result in the same or a conservative superset
2889 solution. */
2890 if (temp->offset != curr->offset + rhsoffset
2891 && temp->next != NULL)
2892 {
2893 struct constraint_expr c2;
2894 c2.var = temp->next->id;
2895 c2.type = ADDRESSOF;
2896 c2.offset = 0;
2897 VEC_safe_push (ce_s, heap, *results, &c2);
2898 }
2899 c->var = temp->id;
2900 c->offset = 0;
2901 }
2902 else if (c->type == ADDRESSOF
2903 /* If this varinfo represents a full variable just use it. */
2904 && curr->is_full_var)
2905 c->offset = 0;
2906 else
2907 c->offset = rhsoffset;
2908 }
2909 }
2910
2911
2912 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2913 If address_p is true the result will be taken its address of. */
2914
2915 static void
2916 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
2917 bool address_p)
2918 {
2919 tree orig_t = t;
2920 HOST_WIDE_INT bitsize = -1;
2921 HOST_WIDE_INT bitmaxsize = -1;
2922 HOST_WIDE_INT bitpos;
2923 tree forzero;
2924 struct constraint_expr *result;
2925
2926 /* Some people like to do cute things like take the address of
2927 &0->a.b */
2928 forzero = t;
2929 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2930 forzero = TREE_OPERAND (forzero, 0);
2931
2932 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2933 {
2934 struct constraint_expr temp;
2935
2936 temp.offset = 0;
2937 temp.var = integer_id;
2938 temp.type = SCALAR;
2939 VEC_safe_push (ce_s, heap, *results, &temp);
2940 return;
2941 }
2942
2943 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2944
2945 /* Pretend to take the address of the base, we'll take care of
2946 adding the required subset of sub-fields below. */
2947 get_constraint_for_1 (t, results, true);
2948 gcc_assert (VEC_length (ce_s, *results) == 1);
2949 result = VEC_last (ce_s, *results);
2950
2951 /* This can also happen due to weird offsetof type macros. */
2952 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2953 result->type = SCALAR;
2954
2955 if (result->type == SCALAR
2956 && get_varinfo (result->var)->is_full_var)
2957 /* For single-field vars do not bother about the offset. */
2958 result->offset = 0;
2959 else if (result->type == SCALAR)
2960 {
2961 /* In languages like C, you can access one past the end of an
2962 array. You aren't allowed to dereference it, so we can
2963 ignore this constraint. When we handle pointer subtraction,
2964 we may have to do something cute here. */
2965
2966 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
2967 && bitmaxsize != 0)
2968 {
2969 /* It's also not true that the constraint will actually start at the
2970 right offset, it may start in some padding. We only care about
2971 setting the constraint to the first actual field it touches, so
2972 walk to find it. */
2973 struct constraint_expr cexpr = *result;
2974 varinfo_t curr;
2975 VEC_pop (ce_s, *results);
2976 cexpr.offset = 0;
2977 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
2978 {
2979 if (ranges_overlap_p (curr->offset, curr->size,
2980 bitpos, bitmaxsize))
2981 {
2982 cexpr.var = curr->id;
2983 VEC_safe_push (ce_s, heap, *results, &cexpr);
2984 if (address_p)
2985 break;
2986 }
2987 }
2988 /* If we are going to take the address of this field then
2989 to be able to compute reachability correctly add at least
2990 the last field of the variable. */
2991 if (address_p
2992 && VEC_length (ce_s, *results) == 0)
2993 {
2994 curr = get_varinfo (cexpr.var);
2995 while (curr->next != NULL)
2996 curr = curr->next;
2997 cexpr.var = curr->id;
2998 VEC_safe_push (ce_s, heap, *results, &cexpr);
2999 }
3000 else
3001 /* Assert that we found *some* field there. The user couldn't be
3002 accessing *only* padding. */
3003 /* Still the user could access one past the end of an array
3004 embedded in a struct resulting in accessing *only* padding. */
3005 gcc_assert (VEC_length (ce_s, *results) >= 1
3006 || ref_contains_array_ref (orig_t));
3007 }
3008 else if (bitmaxsize == 0)
3009 {
3010 if (dump_file && (dump_flags & TDF_DETAILS))
3011 fprintf (dump_file, "Access to zero-sized part of variable,"
3012 "ignoring\n");
3013 }
3014 else
3015 if (dump_file && (dump_flags & TDF_DETAILS))
3016 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3017 }
3018 else if (bitmaxsize == -1)
3019 {
3020 /* We can't handle DEREF constraints with unknown size, we'll
3021 get the wrong answer. Punt and return anything. */
3022 result->var = anything_id;
3023 result->offset = 0;
3024 }
3025 else
3026 result->offset = bitpos;
3027 }
3028
3029
3030 /* Dereference the constraint expression CONS, and return the result.
3031 DEREF (ADDRESSOF) = SCALAR
3032 DEREF (SCALAR) = DEREF
3033 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3034 This is needed so that we can handle dereferencing DEREF constraints. */
3035
3036 static void
3037 do_deref (VEC (ce_s, heap) **constraints)
3038 {
3039 struct constraint_expr *c;
3040 unsigned int i = 0;
3041
3042 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
3043 {
3044 if (c->type == SCALAR)
3045 c->type = DEREF;
3046 else if (c->type == ADDRESSOF)
3047 c->type = SCALAR;
3048 else if (c->type == DEREF)
3049 {
3050 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
3051 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
3052 process_constraint (new_constraint (tmplhs, *c));
3053 c->var = tmplhs.var;
3054 }
3055 else
3056 gcc_unreachable ();
3057 }
3058 }
3059
3060 /* Given a tree T, return the constraint expression for it. */
3061
3062 static void
3063 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
3064 {
3065 struct constraint_expr temp;
3066
3067 /* x = integer is all glommed to a single variable, which doesn't
3068 point to anything by itself. That is, of course, unless it is an
3069 integer constant being treated as a pointer, in which case, we
3070 will return that this is really the addressof anything. This
3071 happens below, since it will fall into the default case. The only
3072 case we know something about an integer treated like a pointer is
3073 when it is the NULL pointer, and then we just say it points to
3074 NULL.
3075
3076 Do not do that if -fno-delete-null-pointer-checks though, because
3077 in that case *NULL does not fail, so it _should_ alias *anything.
3078 It is not worth adding a new option or renaming the existing one,
3079 since this case is relatively obscure. */
3080 if (flag_delete_null_pointer_checks
3081 && TREE_CODE (t) == INTEGER_CST
3082 && integer_zerop (t))
3083 {
3084 temp.var = nothing_id;
3085 temp.type = ADDRESSOF;
3086 temp.offset = 0;
3087 VEC_safe_push (ce_s, heap, *results, &temp);
3088 return;
3089 }
3090
3091 /* String constants are read-only. */
3092 if (TREE_CODE (t) == STRING_CST)
3093 {
3094 temp.var = readonly_id;
3095 temp.type = SCALAR;
3096 temp.offset = 0;
3097 VEC_safe_push (ce_s, heap, *results, &temp);
3098 return;
3099 }
3100
3101 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3102 {
3103 case tcc_expression:
3104 {
3105 switch (TREE_CODE (t))
3106 {
3107 case ADDR_EXPR:
3108 {
3109 struct constraint_expr *c;
3110 unsigned int i;
3111 tree exp = TREE_OPERAND (t, 0);
3112
3113 get_constraint_for_1 (exp, results, true);
3114
3115 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
3116 {
3117 if (c->type == DEREF)
3118 c->type = SCALAR;
3119 else
3120 c->type = ADDRESSOF;
3121 }
3122 return;
3123 }
3124 break;
3125 default:;
3126 }
3127 break;
3128 }
3129 case tcc_reference:
3130 {
3131 switch (TREE_CODE (t))
3132 {
3133 case INDIRECT_REF:
3134 {
3135 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3136 do_deref (results);
3137 return;
3138 }
3139 case ARRAY_REF:
3140 case ARRAY_RANGE_REF:
3141 case COMPONENT_REF:
3142 get_constraint_for_component_ref (t, results, address_p);
3143 return;
3144 default:;
3145 }
3146 break;
3147 }
3148 case tcc_exceptional:
3149 {
3150 switch (TREE_CODE (t))
3151 {
3152 case SSA_NAME:
3153 {
3154 get_constraint_for_ssa_var (t, results, address_p);
3155 return;
3156 }
3157 default:;
3158 }
3159 break;
3160 }
3161 case tcc_declaration:
3162 {
3163 get_constraint_for_ssa_var (t, results, address_p);
3164 return;
3165 }
3166 default:;
3167 }
3168
3169 /* The default fallback is a constraint from anything. */
3170 temp.type = ADDRESSOF;
3171 temp.var = anything_id;
3172 temp.offset = 0;
3173 VEC_safe_push (ce_s, heap, *results, &temp);
3174 }
3175
3176 /* Given a gimple tree T, return the constraint expression vector for it. */
3177
3178 static void
3179 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3180 {
3181 gcc_assert (VEC_length (ce_s, *results) == 0);
3182
3183 get_constraint_for_1 (t, results, false);
3184 }
3185
3186 /* Handle the structure copy case where we have a simple structure copy
3187 between LHS and RHS that is of SIZE (in bits)
3188
3189 For each field of the lhs variable (lhsfield)
3190 For each field of the rhs variable at lhsfield.offset (rhsfield)
3191 add the constraint lhsfield = rhsfield
3192
3193 If we fail due to some kind of type unsafety or other thing we
3194 can't handle, return false. We expect the caller to collapse the
3195 variable in that case. */
3196
3197 static bool
3198 do_simple_structure_copy (const struct constraint_expr lhs,
3199 const struct constraint_expr rhs,
3200 const unsigned HOST_WIDE_INT size)
3201 {
3202 varinfo_t p = get_varinfo (lhs.var);
3203 unsigned HOST_WIDE_INT pstart, last;
3204 pstart = p->offset;
3205 last = p->offset + size;
3206 for (; p && p->offset < last; p = p->next)
3207 {
3208 varinfo_t q;
3209 struct constraint_expr templhs = lhs;
3210 struct constraint_expr temprhs = rhs;
3211 unsigned HOST_WIDE_INT fieldoffset;
3212
3213 templhs.var = p->id;
3214 q = get_varinfo (temprhs.var);
3215 fieldoffset = p->offset - pstart;
3216 q = first_vi_for_offset (q, q->offset + fieldoffset);
3217 if (!q)
3218 return false;
3219 temprhs.var = q->id;
3220 process_constraint (new_constraint (templhs, temprhs));
3221 }
3222 return true;
3223 }
3224
3225
3226 /* Handle the structure copy case where we have a structure copy between a
3227 aggregate on the LHS and a dereference of a pointer on the RHS
3228 that is of SIZE (in bits)
3229
3230 For each field of the lhs variable (lhsfield)
3231 rhs.offset = lhsfield->offset
3232 add the constraint lhsfield = rhs
3233 */
3234
3235 static void
3236 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3237 const struct constraint_expr rhs,
3238 const unsigned HOST_WIDE_INT size)
3239 {
3240 varinfo_t p = get_varinfo (lhs.var);
3241 unsigned HOST_WIDE_INT pstart,last;
3242 pstart = p->offset;
3243 last = p->offset + size;
3244
3245 for (; p && p->offset < last; p = p->next)
3246 {
3247 varinfo_t q;
3248 struct constraint_expr templhs = lhs;
3249 struct constraint_expr temprhs = rhs;
3250 unsigned HOST_WIDE_INT fieldoffset;
3251
3252
3253 if (templhs.type == SCALAR)
3254 templhs.var = p->id;
3255 else
3256 templhs.offset = p->offset;
3257
3258 q = get_varinfo (temprhs.var);
3259 fieldoffset = p->offset - pstart;
3260 temprhs.offset += fieldoffset;
3261 process_constraint (new_constraint (templhs, temprhs));
3262 }
3263 }
3264
3265 /* Handle the structure copy case where we have a structure copy
3266 between an aggregate on the RHS and a dereference of a pointer on
3267 the LHS that is of SIZE (in bits)
3268
3269 For each field of the rhs variable (rhsfield)
3270 lhs.offset = rhsfield->offset
3271 add the constraint lhs = rhsfield
3272 */
3273
3274 static void
3275 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3276 const struct constraint_expr rhs,
3277 const unsigned HOST_WIDE_INT size)
3278 {
3279 varinfo_t p = get_varinfo (rhs.var);
3280 unsigned HOST_WIDE_INT pstart,last;
3281 pstart = p->offset;
3282 last = p->offset + size;
3283
3284 for (; p && p->offset < last; p = p->next)
3285 {
3286 varinfo_t q;
3287 struct constraint_expr templhs = lhs;
3288 struct constraint_expr temprhs = rhs;
3289 unsigned HOST_WIDE_INT fieldoffset;
3290
3291
3292 if (temprhs.type == SCALAR)
3293 temprhs.var = p->id;
3294 else
3295 temprhs.offset = p->offset;
3296
3297 q = get_varinfo (templhs.var);
3298 fieldoffset = p->offset - pstart;
3299 templhs.offset += fieldoffset;
3300 process_constraint (new_constraint (templhs, temprhs));
3301 }
3302 }
3303
3304 /* Sometimes, frontends like to give us bad type information. This
3305 function will collapse all the fields from VAR to the end of VAR,
3306 into VAR, so that we treat those fields as a single variable.
3307 We return the variable they were collapsed into. */
3308
3309 static unsigned int
3310 collapse_rest_of_var (unsigned int var)
3311 {
3312 varinfo_t currvar = get_varinfo (var);
3313 varinfo_t field;
3314
3315 for (field = currvar->next; field; field = field->next)
3316 {
3317 if (dump_file)
3318 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3319 field->name, currvar->name);
3320
3321 gcc_assert (field->collapsed_to == 0);
3322 field->collapsed_to = currvar->id;
3323 }
3324
3325 currvar->next = NULL;
3326 currvar->size = currvar->fullsize - currvar->offset;
3327
3328 return currvar->id;
3329 }
3330
3331 /* Handle aggregate copies by expanding into copies of the respective
3332 fields of the structures. */
3333
3334 static void
3335 do_structure_copy (tree lhsop, tree rhsop)
3336 {
3337 struct constraint_expr lhs, rhs, tmp;
3338 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3339 varinfo_t p;
3340 unsigned HOST_WIDE_INT lhssize;
3341 unsigned HOST_WIDE_INT rhssize;
3342
3343 /* Pretend we are taking the address of the constraint exprs.
3344 We deal with walking the sub-fields ourselves. */
3345 get_constraint_for_1 (lhsop, &lhsc, true);
3346 get_constraint_for_1 (rhsop, &rhsc, true);
3347 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3348 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3349 lhs = *(VEC_last (ce_s, lhsc));
3350 rhs = *(VEC_last (ce_s, rhsc));
3351
3352 VEC_free (ce_s, heap, lhsc);
3353 VEC_free (ce_s, heap, rhsc);
3354
3355 /* If we have special var = x, swap it around. */
3356 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3357 {
3358 tmp = lhs;
3359 lhs = rhs;
3360 rhs = tmp;
3361 }
3362
3363 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3364 possible it's something we could handle. However, most cases falling
3365 into this are dealing with transparent unions, which are slightly
3366 weird. */
3367 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3368 {
3369 rhs.type = ADDRESSOF;
3370 rhs.var = anything_id;
3371 }
3372
3373 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3374 that special var. */
3375 if (rhs.var <= integer_id)
3376 {
3377 for (p = get_varinfo (lhs.var); p; p = p->next)
3378 {
3379 struct constraint_expr templhs = lhs;
3380 struct constraint_expr temprhs = rhs;
3381
3382 if (templhs.type == SCALAR )
3383 templhs.var = p->id;
3384 else
3385 templhs.offset += p->offset;
3386 process_constraint (new_constraint (templhs, temprhs));
3387 }
3388 }
3389 else
3390 {
3391 tree rhstype = TREE_TYPE (rhsop);
3392 tree lhstype = TREE_TYPE (lhsop);
3393 tree rhstypesize;
3394 tree lhstypesize;
3395
3396 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3397 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3398
3399 /* If we have a variably sized types on the rhs or lhs, and a deref
3400 constraint, add the constraint, lhsconstraint = &ANYTHING.
3401 This is conservatively correct because either the lhs is an unknown
3402 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3403 constraint, and every variable it can point to must be unknown sized
3404 anyway, so we don't need to worry about fields at all. */
3405 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3406 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3407 {
3408 rhs.var = anything_id;
3409 rhs.type = ADDRESSOF;
3410 rhs.offset = 0;
3411 process_constraint (new_constraint (lhs, rhs));
3412 return;
3413 }
3414
3415 /* The size only really matters insofar as we don't set more or less of
3416 the variable. If we hit an unknown size var, the size should be the
3417 whole darn thing. */
3418 if (get_varinfo (rhs.var)->is_unknown_size_var)
3419 rhssize = ~0;
3420 else
3421 rhssize = TREE_INT_CST_LOW (rhstypesize);
3422
3423 if (get_varinfo (lhs.var)->is_unknown_size_var)
3424 lhssize = ~0;
3425 else
3426 lhssize = TREE_INT_CST_LOW (lhstypesize);
3427
3428
3429 if (rhs.type == SCALAR && lhs.type == SCALAR)
3430 {
3431 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3432 {
3433 lhs.var = collapse_rest_of_var (get_varinfo_fc (lhs.var)->id);
3434 rhs.var = collapse_rest_of_var (get_varinfo_fc (rhs.var)->id);
3435 lhs.offset = 0;
3436 rhs.offset = 0;
3437 lhs.type = SCALAR;
3438 rhs.type = SCALAR;
3439 process_constraint (new_constraint (lhs, rhs));
3440 }
3441 }
3442 else if (lhs.type != DEREF && rhs.type == DEREF)
3443 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3444 else if (lhs.type == DEREF && rhs.type != DEREF)
3445 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3446 else
3447 {
3448 tree pointedtotype = lhstype;
3449 tree tmpvar;
3450
3451 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3452 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3453 do_structure_copy (tmpvar, rhsop);
3454 do_structure_copy (lhsop, tmpvar);
3455 }
3456 }
3457 }
3458
3459 /* Create a constraint ID = OP. */
3460
3461 static void
3462 make_constraint_to (unsigned id, tree op)
3463 {
3464 VEC(ce_s, heap) *rhsc = NULL;
3465 struct constraint_expr *c;
3466 struct constraint_expr includes;
3467 unsigned int j;
3468
3469 includes.var = id;
3470 includes.offset = 0;
3471 includes.type = SCALAR;
3472
3473 get_constraint_for (op, &rhsc);
3474 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3475 process_constraint (new_constraint (includes, *c));
3476 VEC_free (ce_s, heap, rhsc);
3477 }
3478
3479 /* Make constraints necessary to make OP escape. */
3480
3481 static void
3482 make_escape_constraint (tree op)
3483 {
3484 make_constraint_to (escaped_id, op);
3485 }
3486
3487 /* For non-IPA mode, generate constraints necessary for a call on the
3488 RHS. */
3489
3490 static void
3491 handle_rhs_call (gimple stmt)
3492 {
3493 unsigned i;
3494
3495 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3496 {
3497 tree arg = gimple_call_arg (stmt, i);
3498
3499 /* Find those pointers being passed, and make sure they end up
3500 pointing to anything. */
3501 if (could_have_pointers (arg))
3502 make_escape_constraint (arg);
3503 }
3504
3505 /* The static chain escapes as well. */
3506 if (gimple_call_chain (stmt))
3507 make_escape_constraint (gimple_call_chain (stmt));
3508 }
3509
3510 /* For non-IPA mode, generate constraints necessary for a call
3511 that returns a pointer and assigns it to LHS. This simply makes
3512 the LHS point to global and escaped variables. */
3513
3514 static void
3515 handle_lhs_call (tree lhs, int flags)
3516 {
3517 VEC(ce_s, heap) *lhsc = NULL;
3518 struct constraint_expr rhsc;
3519 unsigned int j;
3520 struct constraint_expr *lhsp;
3521
3522 get_constraint_for (lhs, &lhsc);
3523
3524 if (flags & ECF_MALLOC)
3525 {
3526 tree heapvar = heapvar_lookup (lhs);
3527 varinfo_t vi;
3528
3529 if (heapvar == NULL)
3530 {
3531 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
3532 DECL_EXTERNAL (heapvar) = 1;
3533 get_var_ann (heapvar)->is_heapvar = 1;
3534 if (gimple_referenced_vars (cfun))
3535 add_referenced_var (heapvar);
3536 heapvar_insert (lhs, heapvar);
3537 }
3538
3539 rhsc.var = create_variable_info_for (heapvar,
3540 alias_get_name (heapvar));
3541 vi = get_varinfo (rhsc.var);
3542 vi->is_artificial_var = 1;
3543 vi->is_heap_var = 1;
3544 vi->is_unknown_size_var = true;
3545 vi->fullsize = ~0;
3546 vi->size = ~0;
3547 rhsc.type = ADDRESSOF;
3548 rhsc.offset = 0;
3549 }
3550 else
3551 {
3552 rhsc.var = escaped_id;
3553 rhsc.offset = 0;
3554 rhsc.type = ADDRESSOF;
3555 }
3556 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3557 process_constraint (new_constraint (*lhsp, rhsc));
3558 VEC_free (ce_s, heap, lhsc);
3559 }
3560
3561 /* For non-IPA mode, generate constraints necessary for a call of a
3562 const function that returns a pointer in the statement STMT. */
3563
3564 static void
3565 handle_const_call (gimple stmt)
3566 {
3567 tree lhs = gimple_call_lhs (stmt);
3568 VEC(ce_s, heap) *lhsc = NULL;
3569 struct constraint_expr rhsc;
3570 unsigned int j, k;
3571 struct constraint_expr *lhsp;
3572 tree tmpvar;
3573 struct constraint_expr tmpc;
3574
3575 get_constraint_for (lhs, &lhsc);
3576
3577 /* If this is a nested function then it can return anything. */
3578 if (gimple_call_chain (stmt))
3579 {
3580 rhsc.var = anything_id;
3581 rhsc.offset = 0;
3582 rhsc.type = ADDRESSOF;
3583 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3584 process_constraint (new_constraint (*lhsp, rhsc));
3585 VEC_free (ce_s, heap, lhsc);
3586 return;
3587 }
3588
3589 /* We always use a temporary here, otherwise we end up with a quadratic
3590 amount of constraints for
3591 large_struct = const_call (large_struct);
3592 in field-sensitive PTA. */
3593 tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp");
3594 tmpc = get_constraint_exp_for_temp (tmpvar);
3595
3596 /* May return addresses of globals. */
3597 rhsc.var = nonlocal_id;
3598 rhsc.offset = 0;
3599 rhsc.type = ADDRESSOF;
3600 process_constraint (new_constraint (tmpc, rhsc));
3601
3602 /* May return arguments. */
3603 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3604 {
3605 tree arg = gimple_call_arg (stmt, k);
3606
3607 if (could_have_pointers (arg))
3608 {
3609 VEC(ce_s, heap) *argc = NULL;
3610 struct constraint_expr *argp;
3611 int i;
3612
3613 get_constraint_for (arg, &argc);
3614 for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
3615 process_constraint (new_constraint (tmpc, *argp));
3616 VEC_free (ce_s, heap, argc);
3617 }
3618 }
3619
3620 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3621 process_constraint (new_constraint (*lhsp, tmpc));
3622
3623 VEC_free (ce_s, heap, lhsc);
3624 }
3625
3626 /* For non-IPA mode, generate constraints necessary for a call to a
3627 pure function in statement STMT. */
3628
3629 static void
3630 handle_pure_call (gimple stmt)
3631 {
3632 unsigned i;
3633
3634 /* Memory reached from pointer arguments is call-used. */
3635 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3636 {
3637 tree arg = gimple_call_arg (stmt, i);
3638
3639 if (could_have_pointers (arg))
3640 make_constraint_to (callused_id, arg);
3641 }
3642
3643 /* The static chain is used as well. */
3644 if (gimple_call_chain (stmt))
3645 make_constraint_to (callused_id, gimple_call_chain (stmt));
3646
3647 /* If the call returns a pointer it may point to reachable memory
3648 from the arguments. Not so for malloc functions though. */
3649 if (gimple_call_lhs (stmt)
3650 && could_have_pointers (gimple_call_lhs (stmt))
3651 && !(gimple_call_flags (stmt) & ECF_MALLOC))
3652 {
3653 tree lhs = gimple_call_lhs (stmt);
3654 VEC(ce_s, heap) *lhsc = NULL;
3655 struct constraint_expr rhsc;
3656 struct constraint_expr *lhsp;
3657 unsigned j;
3658
3659 get_constraint_for (lhs, &lhsc);
3660
3661 /* If this is a nested function then it can return anything. */
3662 if (gimple_call_chain (stmt))
3663 {
3664 rhsc.var = anything_id;
3665 rhsc.offset = 0;
3666 rhsc.type = ADDRESSOF;
3667 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3668 process_constraint (new_constraint (*lhsp, rhsc));
3669 VEC_free (ce_s, heap, lhsc);
3670 return;
3671 }
3672
3673 /* Else just add the call-used memory here. Escaped variables
3674 and globals will be dealt with in handle_lhs_call. */
3675 rhsc.var = callused_id;
3676 rhsc.offset = 0;
3677 rhsc.type = ADDRESSOF;
3678 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3679 process_constraint (new_constraint (*lhsp, rhsc));
3680 VEC_free (ce_s, heap, lhsc);
3681 }
3682 }
3683
3684 /* Walk statement T setting up aliasing constraints according to the
3685 references found in T. This function is the main part of the
3686 constraint builder. AI points to auxiliary alias information used
3687 when building alias sets and computing alias grouping heuristics. */
3688
3689 static void
3690 find_func_aliases (gimple origt)
3691 {
3692 gimple t = origt;
3693 VEC(ce_s, heap) *lhsc = NULL;
3694 VEC(ce_s, heap) *rhsc = NULL;
3695 struct constraint_expr *c;
3696 enum escape_type stmt_escape_type;
3697
3698 /* Now build constraints expressions. */
3699 if (gimple_code (t) == GIMPLE_PHI)
3700 {
3701 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
3702
3703 /* Only care about pointers and structures containing
3704 pointers. */
3705 if (could_have_pointers (gimple_phi_result (t)))
3706 {
3707 size_t i;
3708 unsigned int j;
3709
3710 /* For a phi node, assign all the arguments to
3711 the result. */
3712 get_constraint_for (gimple_phi_result (t), &lhsc);
3713 for (i = 0; i < gimple_phi_num_args (t); i++)
3714 {
3715 tree rhstype;
3716 tree strippedrhs = PHI_ARG_DEF (t, i);
3717
3718 STRIP_NOPS (strippedrhs);
3719 rhstype = TREE_TYPE (strippedrhs);
3720 get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
3721
3722 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3723 {
3724 struct constraint_expr *c2;
3725 while (VEC_length (ce_s, rhsc) > 0)
3726 {
3727 c2 = VEC_last (ce_s, rhsc);
3728 process_constraint (new_constraint (*c, *c2));
3729 VEC_pop (ce_s, rhsc);
3730 }
3731 }
3732 }
3733 }
3734 }
3735 /* In IPA mode, we need to generate constraints to pass call
3736 arguments through their calls. There are two cases,
3737 either a GIMPLE_CALL returning a value, or just a plain
3738 GIMPLE_CALL when we are not.
3739
3740 In non-ipa mode, we need to generate constraints for each
3741 pointer passed by address. */
3742 else if (is_gimple_call (t))
3743 {
3744 if (!in_ipa_mode)
3745 {
3746 int flags = gimple_call_flags (t);
3747
3748 /* Const functions can return their arguments and addresses
3749 of global memory but not of escaped memory. */
3750 if (flags & ECF_CONST)
3751 {
3752 if (gimple_call_lhs (t)
3753 && could_have_pointers (gimple_call_lhs (t)))
3754 handle_const_call (t);
3755 }
3756 /* Pure functions can return addresses in and of memory
3757 reachable from their arguments, but they are not an escape
3758 point for reachable memory of their arguments. */
3759 else if (flags & ECF_PURE)
3760 {
3761 handle_pure_call (t);
3762 if (gimple_call_lhs (t)
3763 && could_have_pointers (gimple_call_lhs (t)))
3764 handle_lhs_call (gimple_call_lhs (t), flags);
3765 }
3766 else
3767 {
3768 handle_rhs_call (t);
3769 if (gimple_call_lhs (t)
3770 && could_have_pointers (gimple_call_lhs (t)))
3771 handle_lhs_call (gimple_call_lhs (t), flags);
3772 }
3773 }
3774 else
3775 {
3776 tree lhsop;
3777 varinfo_t fi;
3778 int i = 1;
3779 size_t j;
3780 tree decl;
3781
3782 lhsop = gimple_call_lhs (t);
3783 decl = gimple_call_fndecl (t);
3784
3785 /* If we can directly resolve the function being called, do so.
3786 Otherwise, it must be some sort of indirect expression that
3787 we should still be able to handle. */
3788 if (decl)
3789 fi = get_vi_for_tree (decl);
3790 else
3791 {
3792 decl = gimple_call_fn (t);
3793 fi = get_vi_for_tree (decl);
3794 }
3795
3796 /* Assign all the passed arguments to the appropriate incoming
3797 parameters of the function. */
3798 for (j = 0; j < gimple_call_num_args (t); j++)
3799 {
3800 struct constraint_expr lhs ;
3801 struct constraint_expr *rhsp;
3802 tree arg = gimple_call_arg (t, j);
3803
3804 get_constraint_for (arg, &rhsc);
3805 if (TREE_CODE (decl) != FUNCTION_DECL)
3806 {
3807 lhs.type = DEREF;
3808 lhs.var = fi->id;
3809 lhs.offset = i;
3810 }
3811 else
3812 {
3813 lhs.type = SCALAR;
3814 lhs.var = first_vi_for_offset (fi, i)->id;
3815 lhs.offset = 0;
3816 }
3817 while (VEC_length (ce_s, rhsc) != 0)
3818 {
3819 rhsp = VEC_last (ce_s, rhsc);
3820 process_constraint (new_constraint (lhs, *rhsp));
3821 VEC_pop (ce_s, rhsc);
3822 }
3823 i++;
3824 }
3825
3826 /* If we are returning a value, assign it to the result. */
3827 if (lhsop)
3828 {
3829 struct constraint_expr rhs;
3830 struct constraint_expr *lhsp;
3831 unsigned int j = 0;
3832
3833 get_constraint_for (lhsop, &lhsc);
3834 if (TREE_CODE (decl) != FUNCTION_DECL)
3835 {
3836 rhs.type = DEREF;
3837 rhs.var = fi->id;
3838 rhs.offset = i;
3839 }
3840 else
3841 {
3842 rhs.type = SCALAR;
3843 rhs.var = first_vi_for_offset (fi, i)->id;
3844 rhs.offset = 0;
3845 }
3846 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3847 process_constraint (new_constraint (*lhsp, rhs));
3848 }
3849 }
3850 }
3851 /* Otherwise, just a regular assignment statement. Only care about
3852 operations with pointer result, others are dealt with as escape
3853 points if they have pointer operands. */
3854 else if (is_gimple_assign (t)
3855 && could_have_pointers (gimple_assign_lhs (t)))
3856 {
3857 /* Otherwise, just a regular assignment statement. */
3858 tree lhsop = gimple_assign_lhs (t);
3859 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
3860
3861 if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
3862 do_structure_copy (lhsop, rhsop);
3863 else
3864 {
3865 unsigned int j;
3866 struct constraint_expr temp;
3867 get_constraint_for (lhsop, &lhsc);
3868
3869 if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
3870 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
3871 gimple_assign_rhs2 (t), &rhsc);
3872 else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3873 && !(POINTER_TYPE_P (gimple_expr_type (t))
3874 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
3875 || gimple_assign_single_p (t))
3876 get_constraint_for (rhsop, &rhsc);
3877 else
3878 {
3879 temp.type = ADDRESSOF;
3880 temp.var = anything_id;
3881 temp.offset = 0;
3882 VEC_safe_push (ce_s, heap, rhsc, &temp);
3883 }
3884 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3885 {
3886 struct constraint_expr *c2;
3887 unsigned int k;
3888
3889 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3890 process_constraint (new_constraint (*c, *c2));
3891 }
3892 }
3893 }
3894 else if (gimple_code (t) == GIMPLE_CHANGE_DYNAMIC_TYPE)
3895 {
3896 unsigned int j;
3897
3898 get_constraint_for (gimple_cdt_location (t), &lhsc);
3899 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3900 get_varinfo (c->var)->no_tbaa_pruning = true;
3901 }
3902
3903 stmt_escape_type = is_escape_site (t);
3904 if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3905 {
3906 gcc_assert (is_gimple_assign (t));
3907 if (gimple_assign_rhs_code (t) == ADDR_EXPR)
3908 {
3909 tree rhs = gimple_assign_rhs1 (t);
3910 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3911 if (base
3912 && (!DECL_P (base)
3913 || !is_global_var (base)))
3914 make_escape_constraint (rhs);
3915 }
3916 else if (get_gimple_rhs_class (gimple_assign_rhs_code (t))
3917 == GIMPLE_SINGLE_RHS)
3918 {
3919 if (could_have_pointers (gimple_assign_rhs1 (t)))
3920 make_escape_constraint (gimple_assign_rhs1 (t));
3921 }
3922 else
3923 gcc_unreachable ();
3924 }
3925 else if (stmt_escape_type == ESCAPE_BAD_CAST)
3926 {
3927 gcc_assert (is_gimple_assign (t));
3928 gcc_assert (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3929 || gimple_assign_rhs_code (t) == VIEW_CONVERT_EXPR);
3930 make_escape_constraint (gimple_assign_rhs1 (t));
3931 }
3932 else if (stmt_escape_type == ESCAPE_TO_ASM)
3933 {
3934 unsigned i;
3935 for (i = 0; i < gimple_asm_noutputs (t); ++i)
3936 {
3937 tree op = TREE_VALUE (gimple_asm_output_op (t, i));
3938 if (op && could_have_pointers (op))
3939 /* Strictly we'd only need the constraints from ESCAPED and
3940 NONLOCAL. */
3941 make_escape_constraint (op);
3942 }
3943 for (i = 0; i < gimple_asm_ninputs (t); ++i)
3944 {
3945 tree op = TREE_VALUE (gimple_asm_input_op (t, i));
3946 if (op && could_have_pointers (op))
3947 /* Strictly we'd only need the constraint to ESCAPED. */
3948 make_escape_constraint (op);
3949 }
3950 }
3951
3952 /* After promoting variables and computing aliasing we will
3953 need to re-scan most statements. FIXME: Try to minimize the
3954 number of statements re-scanned. It's not really necessary to
3955 re-scan *all* statements. */
3956 if (!in_ipa_mode)
3957 gimple_set_modified (origt, true);
3958 VEC_free (ce_s, heap, rhsc);
3959 VEC_free (ce_s, heap, lhsc);
3960 }
3961
3962
3963 /* Find the first varinfo in the same variable as START that overlaps with
3964 OFFSET.
3965 Effectively, walk the chain of fields for the variable START to find the
3966 first field that overlaps with OFFSET.
3967 Return NULL if we can't find one. */
3968
3969 static varinfo_t
3970 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3971 {
3972 varinfo_t curr = start;
3973 while (curr)
3974 {
3975 /* We may not find a variable in the field list with the actual
3976 offset when when we have glommed a structure to a variable.
3977 In that case, however, offset should still be within the size
3978 of the variable. */
3979 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3980 return curr;
3981 curr = curr->next;
3982 }
3983 return NULL;
3984 }
3985
3986
3987 /* Insert the varinfo FIELD into the field list for BASE, at the front
3988 of the list. */
3989
3990 static void
3991 insert_into_field_list (varinfo_t base, varinfo_t field)
3992 {
3993 varinfo_t prev = base;
3994 varinfo_t curr = base->next;
3995
3996 field->next = curr;
3997 prev->next = field;
3998 }
3999
4000 /* Insert the varinfo FIELD into the field list for BASE, ordered by
4001 offset. */
4002
4003 static void
4004 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
4005 {
4006 varinfo_t prev = base;
4007 varinfo_t curr = base->next;
4008
4009 if (curr == NULL)
4010 {
4011 prev->next = field;
4012 field->next = NULL;
4013 }
4014 else
4015 {
4016 while (curr)
4017 {
4018 if (field->offset <= curr->offset)
4019 break;
4020 prev = curr;
4021 curr = curr->next;
4022 }
4023 field->next = prev->next;
4024 prev->next = field;
4025 }
4026 }
4027
4028 /* This structure is used during pushing fields onto the fieldstack
4029 to track the offset of the field, since bitpos_of_field gives it
4030 relative to its immediate containing type, and we want it relative
4031 to the ultimate containing object. */
4032
4033 struct fieldoff
4034 {
4035 /* Offset from the base of the base containing object to this field. */
4036 HOST_WIDE_INT offset;
4037
4038 /* Size, in bits, of the field. */
4039 unsigned HOST_WIDE_INT size;
4040
4041 unsigned has_unknown_size : 1;
4042
4043 unsigned may_have_pointers : 1;
4044 };
4045 typedef struct fieldoff fieldoff_s;
4046
4047 DEF_VEC_O(fieldoff_s);
4048 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4049
4050 /* qsort comparison function for two fieldoff's PA and PB */
4051
4052 static int
4053 fieldoff_compare (const void *pa, const void *pb)
4054 {
4055 const fieldoff_s *foa = (const fieldoff_s *)pa;
4056 const fieldoff_s *fob = (const fieldoff_s *)pb;
4057 unsigned HOST_WIDE_INT foasize, fobsize;
4058
4059 if (foa->offset < fob->offset)
4060 return -1;
4061 else if (foa->offset > fob->offset)
4062 return 1;
4063
4064 foasize = foa->size;
4065 fobsize = fob->size;
4066 if (foasize < fobsize)
4067 return -1;
4068 else if (foasize > fobsize)
4069 return 1;
4070 return 0;
4071 }
4072
4073 /* Sort a fieldstack according to the field offset and sizes. */
4074 static void
4075 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4076 {
4077 qsort (VEC_address (fieldoff_s, fieldstack),
4078 VEC_length (fieldoff_s, fieldstack),
4079 sizeof (fieldoff_s),
4080 fieldoff_compare);
4081 }
4082
4083 /* Return true if V is a tree that we can have subvars for.
4084 Normally, this is any aggregate type. Also complex
4085 types which are not gimple registers can have subvars. */
4086
4087 static inline bool
4088 var_can_have_subvars (const_tree v)
4089 {
4090 /* Volatile variables should never have subvars. */
4091 if (TREE_THIS_VOLATILE (v))
4092 return false;
4093
4094 /* Non decls or memory tags can never have subvars. */
4095 if (!DECL_P (v) || MTAG_P (v))
4096 return false;
4097
4098 /* Aggregates without overlapping fields can have subvars. */
4099 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4100 return true;
4101
4102 return false;
4103 }
4104
4105 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4106 the fields of TYPE onto fieldstack, recording their offsets along
4107 the way.
4108
4109 OFFSET is used to keep track of the offset in this entire
4110 structure, rather than just the immediately containing structure.
4111 Returns the number of fields pushed. */
4112
4113 static int
4114 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4115 HOST_WIDE_INT offset)
4116 {
4117 tree field;
4118 int count = 0;
4119
4120 if (TREE_CODE (type) != RECORD_TYPE)
4121 return 0;
4122
4123 /* If the vector of fields is growing too big, bail out early.
4124 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4125 sure this fails. */
4126 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4127 return 0;
4128
4129 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4130 if (TREE_CODE (field) == FIELD_DECL)
4131 {
4132 bool push = false;
4133 int pushed = 0;
4134 HOST_WIDE_INT foff = bitpos_of_field (field);
4135
4136 if (!var_can_have_subvars (field)
4137 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4138 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
4139 push = true;
4140 else if (!(pushed = push_fields_onto_fieldstack
4141 (TREE_TYPE (field), fieldstack, offset + foff))
4142 && (DECL_SIZE (field)
4143 && !integer_zerop (DECL_SIZE (field))))
4144 /* Empty structures may have actual size, like in C++. So
4145 see if we didn't push any subfields and the size is
4146 nonzero, push the field onto the stack. */
4147 push = true;
4148
4149 if (push)
4150 {
4151 fieldoff_s *pair = NULL;
4152 bool has_unknown_size = false;
4153
4154 if (!VEC_empty (fieldoff_s, *fieldstack))
4155 pair = VEC_last (fieldoff_s, *fieldstack);
4156
4157 if (!DECL_SIZE (field)
4158 || !host_integerp (DECL_SIZE (field), 1))
4159 has_unknown_size = true;
4160
4161 /* If adjacent fields do not contain pointers merge them. */
4162 if (pair
4163 && !pair->may_have_pointers
4164 && !could_have_pointers (field)
4165 && !pair->has_unknown_size
4166 && !has_unknown_size
4167 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
4168 {
4169 pair = VEC_last (fieldoff_s, *fieldstack);
4170 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
4171 }
4172 else
4173 {
4174 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4175 pair->offset = offset + foff;
4176 pair->has_unknown_size = has_unknown_size;
4177 if (!has_unknown_size)
4178 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
4179 else
4180 pair->size = -1;
4181 pair->may_have_pointers = could_have_pointers (field);
4182 count++;
4183 }
4184 }
4185 else
4186 count += pushed;
4187 }
4188
4189 return count;
4190 }
4191
4192 /* Create a constraint ID = &FROM. */
4193
4194 static void
4195 make_constraint_from (varinfo_t vi, int from)
4196 {
4197 struct constraint_expr lhs, rhs;
4198
4199 lhs.var = vi->id;
4200 lhs.offset = 0;
4201 lhs.type = SCALAR;
4202
4203 rhs.var = from;
4204 rhs.offset = 0;
4205 rhs.type = ADDRESSOF;
4206 process_constraint (new_constraint (lhs, rhs));
4207 }
4208
4209 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4210 if it is a varargs function. */
4211
4212 static unsigned int
4213 count_num_arguments (tree decl, bool *is_varargs)
4214 {
4215 unsigned int i = 0;
4216 tree t;
4217
4218 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4219 t;
4220 t = TREE_CHAIN (t))
4221 {
4222 if (TREE_VALUE (t) == void_type_node)
4223 break;
4224 i++;
4225 }
4226
4227 if (!t)
4228 *is_varargs = true;
4229 return i;
4230 }
4231
4232 /* Creation function node for DECL, using NAME, and return the index
4233 of the variable we've created for the function. */
4234
4235 static unsigned int
4236 create_function_info_for (tree decl, const char *name)
4237 {
4238 unsigned int index = VEC_length (varinfo_t, varmap);
4239 varinfo_t vi;
4240 tree arg;
4241 unsigned int i;
4242 bool is_varargs = false;
4243
4244 /* Create the variable info. */
4245
4246 vi = new_var_info (decl, index, name);
4247 vi->decl = decl;
4248 vi->offset = 0;
4249 vi->size = 1;
4250 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4251 insert_vi_for_tree (vi->decl, vi);
4252 VEC_safe_push (varinfo_t, heap, varmap, vi);
4253
4254 stats.total_vars++;
4255
4256 /* If it's varargs, we don't know how many arguments it has, so we
4257 can't do much. */
4258 if (is_varargs)
4259 {
4260 vi->fullsize = ~0;
4261 vi->size = ~0;
4262 vi->is_unknown_size_var = true;
4263 return index;
4264 }
4265
4266
4267 arg = DECL_ARGUMENTS (decl);
4268
4269 /* Set up variables for each argument. */
4270 for (i = 1; i < vi->fullsize; i++)
4271 {
4272 varinfo_t argvi;
4273 const char *newname;
4274 char *tempname;
4275 unsigned int newindex;
4276 tree argdecl = decl;
4277
4278 if (arg)
4279 argdecl = arg;
4280
4281 newindex = VEC_length (varinfo_t, varmap);
4282 asprintf (&tempname, "%s.arg%d", name, i-1);
4283 newname = ggc_strdup (tempname);
4284 free (tempname);
4285
4286 argvi = new_var_info (argdecl, newindex, newname);
4287 argvi->decl = argdecl;
4288 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4289 argvi->offset = i;
4290 argvi->size = 1;
4291 argvi->is_full_var = true;
4292 argvi->fullsize = vi->fullsize;
4293 insert_into_field_list_sorted (vi, argvi);
4294 stats.total_vars ++;
4295 if (arg)
4296 {
4297 insert_vi_for_tree (arg, argvi);
4298 arg = TREE_CHAIN (arg);
4299 }
4300 }
4301
4302 /* Create a variable for the return var. */
4303 if (DECL_RESULT (decl) != NULL
4304 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4305 {
4306 varinfo_t resultvi;
4307 const char *newname;
4308 char *tempname;
4309 unsigned int newindex;
4310 tree resultdecl = decl;
4311
4312 vi->fullsize ++;
4313
4314 if (DECL_RESULT (decl))
4315 resultdecl = DECL_RESULT (decl);
4316
4317 newindex = VEC_length (varinfo_t, varmap);
4318 asprintf (&tempname, "%s.result", name);
4319 newname = ggc_strdup (tempname);
4320 free (tempname);
4321
4322 resultvi = new_var_info (resultdecl, newindex, newname);
4323 resultvi->decl = resultdecl;
4324 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4325 resultvi->offset = i;
4326 resultvi->size = 1;
4327 resultvi->fullsize = vi->fullsize;
4328 resultvi->is_full_var = true;
4329 insert_into_field_list_sorted (vi, resultvi);
4330 stats.total_vars ++;
4331 if (DECL_RESULT (decl))
4332 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4333 }
4334 return index;
4335 }
4336
4337
4338 /* Return true if FIELDSTACK contains fields that overlap.
4339 FIELDSTACK is assumed to be sorted by offset. */
4340
4341 static bool
4342 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4343 {
4344 fieldoff_s *fo = NULL;
4345 unsigned int i;
4346 HOST_WIDE_INT lastoffset = -1;
4347
4348 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4349 {
4350 if (fo->offset == lastoffset)
4351 return true;
4352 lastoffset = fo->offset;
4353 }
4354 return false;
4355 }
4356
4357 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4358 This will also create any varinfo structures necessary for fields
4359 of DECL. */
4360
4361 static unsigned int
4362 create_variable_info_for (tree decl, const char *name)
4363 {
4364 unsigned int index = VEC_length (varinfo_t, varmap);
4365 varinfo_t vi;
4366 tree decl_type = TREE_TYPE (decl);
4367 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
4368 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4369 VEC (fieldoff_s,heap) *fieldstack = NULL;
4370
4371 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4372 return create_function_info_for (decl, name);
4373
4374 if (var_can_have_subvars (decl) && use_field_sensitive
4375 && (!var_ann (decl)
4376 || var_ann (decl)->noalias_state == 0)
4377 && (!var_ann (decl)
4378 || !var_ann (decl)->is_heapvar))
4379 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
4380
4381 /* If the variable doesn't have subvars, we may end up needing to
4382 sort the field list and create fake variables for all the
4383 fields. */
4384 vi = new_var_info (decl, index, name);
4385 vi->decl = decl;
4386 vi->offset = 0;
4387 vi->may_have_pointers = could_have_pointers (decl);
4388 if (!declsize
4389 || !host_integerp (declsize, 1))
4390 {
4391 vi->is_unknown_size_var = true;
4392 vi->fullsize = ~0;
4393 vi->size = ~0;
4394 }
4395 else
4396 {
4397 vi->fullsize = TREE_INT_CST_LOW (declsize);
4398 vi->size = vi->fullsize;
4399 }
4400
4401 insert_vi_for_tree (vi->decl, vi);
4402 VEC_safe_push (varinfo_t, heap, varmap, vi);
4403 if (is_global && (!flag_whole_program || !in_ipa_mode)
4404 && vi->may_have_pointers)
4405 {
4406 if (var_ann (decl)
4407 && var_ann (decl)->noalias_state == NO_ALIAS_ANYTHING)
4408 make_constraint_from (vi, vi->id);
4409 else
4410 make_constraint_from (vi, escaped_id);
4411 }
4412
4413 stats.total_vars++;
4414 if (use_field_sensitive
4415 && !vi->is_unknown_size_var
4416 && var_can_have_subvars (decl)
4417 && VEC_length (fieldoff_s, fieldstack) > 1
4418 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4419 {
4420 unsigned int newindex = VEC_length (varinfo_t, varmap);
4421 fieldoff_s *fo = NULL;
4422 bool notokay = false;
4423 unsigned int i;
4424
4425 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4426 {
4427 if (fo->has_unknown_size
4428 || fo->offset < 0)
4429 {
4430 notokay = true;
4431 break;
4432 }
4433 }
4434
4435 /* We can't sort them if we have a field with a variable sized type,
4436 which will make notokay = true. In that case, we are going to return
4437 without creating varinfos for the fields anyway, so sorting them is a
4438 waste to boot. */
4439 if (!notokay)
4440 {
4441 sort_fieldstack (fieldstack);
4442 /* Due to some C++ FE issues, like PR 22488, we might end up
4443 what appear to be overlapping fields even though they,
4444 in reality, do not overlap. Until the C++ FE is fixed,
4445 we will simply disable field-sensitivity for these cases. */
4446 notokay = check_for_overlaps (fieldstack);
4447 }
4448
4449
4450 if (VEC_length (fieldoff_s, fieldstack) != 0)
4451 fo = VEC_index (fieldoff_s, fieldstack, 0);
4452
4453 if (fo == NULL || notokay)
4454 {
4455 vi->is_unknown_size_var = 1;
4456 vi->fullsize = ~0;
4457 vi->size = ~0;
4458 vi->is_full_var = true;
4459 VEC_free (fieldoff_s, heap, fieldstack);
4460 return index;
4461 }
4462
4463 vi->size = fo->size;
4464 vi->offset = fo->offset;
4465 vi->may_have_pointers = fo->may_have_pointers;
4466 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4467 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4468 i--)
4469 {
4470 varinfo_t newvi;
4471 const char *newname = "NULL";
4472 char *tempname;
4473
4474 newindex = VEC_length (varinfo_t, varmap);
4475 if (dump_file)
4476 {
4477 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
4478 "+" HOST_WIDE_INT_PRINT_DEC,
4479 vi->name, fo->offset, fo->size);
4480 newname = ggc_strdup (tempname);
4481 free (tempname);
4482 }
4483 newvi = new_var_info (decl, newindex, newname);
4484 newvi->offset = fo->offset;
4485 newvi->size = fo->size;
4486 newvi->fullsize = vi->fullsize;
4487 newvi->may_have_pointers = fo->may_have_pointers;
4488 insert_into_field_list (vi, newvi);
4489 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4490 if (is_global && (!flag_whole_program || !in_ipa_mode)
4491 && newvi->may_have_pointers)
4492 make_constraint_from (newvi, escaped_id);
4493
4494 stats.total_vars++;
4495 }
4496 }
4497 else
4498 vi->is_full_var = true;
4499
4500 VEC_free (fieldoff_s, heap, fieldstack);
4501
4502 return index;
4503 }
4504
4505 /* Print out the points-to solution for VAR to FILE. */
4506
4507 void
4508 dump_solution_for_var (FILE *file, unsigned int var)
4509 {
4510 varinfo_t vi = get_varinfo (var);
4511 unsigned int i;
4512 bitmap_iterator bi;
4513
4514 if (find (var) != var)
4515 {
4516 varinfo_t vipt = get_varinfo (find (var));
4517 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4518 }
4519 else
4520 {
4521 fprintf (file, "%s = { ", vi->name);
4522 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4523 {
4524 fprintf (file, "%s ", get_varinfo (i)->name);
4525 }
4526 fprintf (file, "}");
4527 if (vi->no_tbaa_pruning)
4528 fprintf (file, " no-tbaa-pruning");
4529 fprintf (file, "\n");
4530 }
4531 }
4532
4533 /* Print the points-to solution for VAR to stdout. */
4534
4535 void
4536 debug_solution_for_var (unsigned int var)
4537 {
4538 dump_solution_for_var (stdout, var);
4539 }
4540
4541 /* Create varinfo structures for all of the variables in the
4542 function for intraprocedural mode. */
4543
4544 static void
4545 intra_create_variable_infos (void)
4546 {
4547 tree t;
4548 struct constraint_expr lhs, rhs;
4549
4550 /* For each incoming pointer argument arg, create the constraint ARG
4551 = NONLOCAL or a dummy variable if flag_argument_noalias is set. */
4552 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4553 {
4554 varinfo_t p;
4555
4556 if (!could_have_pointers (t))
4557 continue;
4558
4559 /* If flag_argument_noalias is set, then function pointer
4560 arguments are guaranteed not to point to each other. In that
4561 case, create an artificial variable PARM_NOALIAS and the
4562 constraint ARG = &PARM_NOALIAS. */
4563 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4564 {
4565 varinfo_t vi;
4566 tree heapvar = heapvar_lookup (t);
4567
4568 lhs.offset = 0;
4569 lhs.type = SCALAR;
4570 lhs.var = get_vi_for_tree (t)->id;
4571
4572 if (heapvar == NULL_TREE)
4573 {
4574 var_ann_t ann;
4575 heapvar = create_tmp_var_raw (ptr_type_node,
4576 "PARM_NOALIAS");
4577 DECL_EXTERNAL (heapvar) = 1;
4578 if (gimple_referenced_vars (cfun))
4579 add_referenced_var (heapvar);
4580
4581 heapvar_insert (t, heapvar);
4582
4583 ann = get_var_ann (heapvar);
4584 ann->is_heapvar = 1;
4585 if (flag_argument_noalias == 1)
4586 ann->noalias_state = NO_ALIAS;
4587 else if (flag_argument_noalias == 2)
4588 ann->noalias_state = NO_ALIAS_GLOBAL;
4589 else if (flag_argument_noalias == 3)
4590 ann->noalias_state = NO_ALIAS_ANYTHING;
4591 else
4592 gcc_unreachable ();
4593 }
4594
4595 vi = get_vi_for_tree (heapvar);
4596 vi->is_artificial_var = 1;
4597 vi->is_heap_var = 1;
4598 vi->is_unknown_size_var = true;
4599 vi->fullsize = ~0;
4600 vi->size = ~0;
4601 rhs.var = vi->id;
4602 rhs.type = ADDRESSOF;
4603 rhs.offset = 0;
4604 for (p = get_varinfo (lhs.var); p; p = p->next)
4605 {
4606 struct constraint_expr temp = lhs;
4607 temp.var = p->id;
4608 process_constraint (new_constraint (temp, rhs));
4609 }
4610 }
4611 else
4612 {
4613 varinfo_t arg_vi = get_vi_for_tree (t);
4614
4615 for (p = arg_vi; p; p = p->next)
4616 make_constraint_from (p, nonlocal_id);
4617 }
4618 }
4619
4620 /* Add a constraint for a result decl that is passed by reference. */
4621 if (DECL_RESULT (cfun->decl)
4622 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
4623 {
4624 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
4625
4626 for (p = result_vi; p; p = p->next)
4627 make_constraint_from (p, nonlocal_id);
4628 }
4629
4630 /* Add a constraint for the incoming static chain parameter. */
4631 if (cfun->static_chain_decl != NULL_TREE)
4632 {
4633 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
4634
4635 for (p = chain_vi; p; p = p->next)
4636 make_constraint_from (p, nonlocal_id);
4637 }
4638 }
4639
4640 /* Structure used to put solution bitmaps in a hashtable so they can
4641 be shared among variables with the same points-to set. */
4642
4643 typedef struct shared_bitmap_info
4644 {
4645 bitmap pt_vars;
4646 hashval_t hashcode;
4647 } *shared_bitmap_info_t;
4648 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4649
4650 static htab_t shared_bitmap_table;
4651
4652 /* Hash function for a shared_bitmap_info_t */
4653
4654 static hashval_t
4655 shared_bitmap_hash (const void *p)
4656 {
4657 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4658 return bi->hashcode;
4659 }
4660
4661 /* Equality function for two shared_bitmap_info_t's. */
4662
4663 static int
4664 shared_bitmap_eq (const void *p1, const void *p2)
4665 {
4666 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4667 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4668 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4669 }
4670
4671 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4672 existing instance if there is one, NULL otherwise. */
4673
4674 static bitmap
4675 shared_bitmap_lookup (bitmap pt_vars)
4676 {
4677 void **slot;
4678 struct shared_bitmap_info sbi;
4679
4680 sbi.pt_vars = pt_vars;
4681 sbi.hashcode = bitmap_hash (pt_vars);
4682
4683 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4684 sbi.hashcode, NO_INSERT);
4685 if (!slot)
4686 return NULL;
4687 else
4688 return ((shared_bitmap_info_t) *slot)->pt_vars;
4689 }
4690
4691
4692 /* Add a bitmap to the shared bitmap hashtable. */
4693
4694 static void
4695 shared_bitmap_add (bitmap pt_vars)
4696 {
4697 void **slot;
4698 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4699
4700 sbi->pt_vars = pt_vars;
4701 sbi->hashcode = bitmap_hash (pt_vars);
4702
4703 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4704 sbi->hashcode, INSERT);
4705 gcc_assert (!*slot);
4706 *slot = (void *) sbi;
4707 }
4708
4709
4710 /* Set bits in INTO corresponding to the variable uids in solution set
4711 FROM, which came from variable PTR.
4712 For variables that are actually dereferenced, we also use type
4713 based alias analysis to prune the points-to sets.
4714 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4715 help determine whether we are we are allowed to prune using TBAA.
4716 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4717 the from set. Returns the number of pruned variables. */
4718
4719 static unsigned
4720 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4721 bool no_tbaa_pruning)
4722 {
4723 unsigned int i;
4724 bitmap_iterator bi;
4725 unsigned pruned = 0;
4726
4727 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4728
4729 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4730 {
4731 varinfo_t vi = get_varinfo (i);
4732
4733 /* The only artificial variables that are allowed in a may-alias
4734 set are heap variables. */
4735 if (vi->is_artificial_var && !vi->is_heap_var)
4736 continue;
4737
4738 if (TREE_CODE (vi->decl) == VAR_DECL
4739 || TREE_CODE (vi->decl) == PARM_DECL
4740 || TREE_CODE (vi->decl) == RESULT_DECL)
4741 {
4742 /* Just add VI->DECL to the alias set.
4743 Don't type prune artificial vars or points-to sets
4744 for pointers that have not been dereferenced or with
4745 type-based pruning disabled. */
4746 if (vi->is_artificial_var
4747 || !is_derefed
4748 || no_tbaa_pruning
4749 || vi->no_tbaa_pruning)
4750 bitmap_set_bit (into, DECL_UID (vi->decl));
4751 else
4752 {
4753 alias_set_type var_alias_set, mem_alias_set;
4754 var_alias_set = get_alias_set (vi->decl);
4755 mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4756 if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
4757 vi->decl, var_alias_set, true))
4758 bitmap_set_bit (into, DECL_UID (vi->decl));
4759 else
4760 ++pruned;
4761 }
4762 }
4763 }
4764
4765 return pruned;
4766 }
4767
4768
4769 static bool have_alias_info = false;
4770
4771 /* Emit a note for the pointer initialization point DEF. */
4772
4773 static void
4774 emit_pointer_definition (tree ptr, bitmap visited)
4775 {
4776 gimple def = SSA_NAME_DEF_STMT (ptr);
4777 if (gimple_code (def) == GIMPLE_PHI)
4778 {
4779 use_operand_p argp;
4780 ssa_op_iter oi;
4781
4782 FOR_EACH_PHI_ARG (argp, def, oi, SSA_OP_USE)
4783 {
4784 tree arg = USE_FROM_PTR (argp);
4785 if (TREE_CODE (arg) == SSA_NAME)
4786 {
4787 if (bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
4788 emit_pointer_definition (arg, visited);
4789 }
4790 else
4791 inform (0, "initialized from %qE", arg);
4792 }
4793 }
4794 else if (!gimple_nop_p (def))
4795 inform (gimple_location (def), "initialized from here");
4796 }
4797
4798 /* Emit a strict aliasing warning for dereferencing the pointer PTR. */
4799
4800 static void
4801 emit_alias_warning (tree ptr)
4802 {
4803 gimple use;
4804 imm_use_iterator ui;
4805 bool warned = false;
4806
4807 FOR_EACH_IMM_USE_STMT (use, ui, ptr)
4808 {
4809 tree deref = NULL_TREE;
4810
4811 if (gimple_has_lhs (use))
4812 {
4813 tree lhs = get_base_address (gimple_get_lhs (use));
4814 if (lhs
4815 && INDIRECT_REF_P (lhs)
4816 && TREE_OPERAND (lhs, 0) == ptr)
4817 deref = lhs;
4818 }
4819 if (gimple_assign_single_p (use))
4820 {
4821 tree rhs = get_base_address (gimple_assign_rhs1 (use));
4822 if (rhs
4823 && INDIRECT_REF_P (rhs)
4824 && TREE_OPERAND (rhs, 0) == ptr)
4825 deref = rhs;
4826 }
4827 else if (is_gimple_call (use))
4828 {
4829 unsigned i;
4830 for (i = 0; i < gimple_call_num_args (use); ++i)
4831 {
4832 tree op = get_base_address (gimple_call_arg (use, i));
4833 if (op
4834 && INDIRECT_REF_P (op)
4835 && TREE_OPERAND (op, 0) == ptr)
4836 deref = op;
4837 }
4838 }
4839 if (deref
4840 && !TREE_NO_WARNING (deref))
4841 {
4842 TREE_NO_WARNING (deref) = 1;
4843 warned |= warning_at (gimple_location (use), OPT_Wstrict_aliasing,
4844 "dereferencing pointer %qD does break "
4845 "strict-aliasing rules", SSA_NAME_VAR (ptr));
4846 }
4847 }
4848 if (warned)
4849 {
4850 bitmap visited = BITMAP_ALLOC (NULL);
4851 emit_pointer_definition (ptr, visited);
4852 BITMAP_FREE (visited);
4853 }
4854 }
4855
4856 /* Given a pointer variable P, fill in its points-to set, or return
4857 false if we can't.
4858 Rather than return false for variables that point-to anything, we
4859 instead find the corresponding SMT, and merge in its aliases. In
4860 addition to these aliases, we also set the bits for the SMT's
4861 themselves and their subsets, as SMT's are still in use by
4862 non-SSA_NAME's, and pruning may eliminate every one of their
4863 aliases. In such a case, if we did not include the right set of
4864 SMT's in the points-to set of the variable, we'd end up with
4865 statements that do not conflict but should. */
4866
4867 bool
4868 find_what_p_points_to (tree p)
4869 {
4870 tree lookup_p = p;
4871 varinfo_t vi;
4872
4873 if (!have_alias_info)
4874 return false;
4875
4876 /* For parameters, get at the points-to set for the actual parm
4877 decl. */
4878 if (TREE_CODE (p) == SSA_NAME
4879 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4880 && SSA_NAME_IS_DEFAULT_DEF (p))
4881 lookup_p = SSA_NAME_VAR (p);
4882
4883 vi = lookup_vi_for_tree (lookup_p);
4884 if (vi)
4885 {
4886 if (vi->is_artificial_var)
4887 return false;
4888
4889 /* See if this is a field or a structure. */
4890 if (vi->size != vi->fullsize)
4891 {
4892 /* Nothing currently asks about structure fields directly,
4893 but when they do, we need code here to hand back the
4894 points-to set. */
4895 return false;
4896 }
4897 else
4898 {
4899 struct ptr_info_def *pi = get_ptr_info (p);
4900 unsigned int i, pruned;
4901 bitmap_iterator bi;
4902 bool was_pt_anything = false;
4903 bitmap finished_solution;
4904 bitmap result;
4905
4906 if (!pi->memory_tag_needed)
4907 return false;
4908
4909 /* This variable may have been collapsed, let's get the real
4910 variable. */
4911 vi = get_varinfo (find (vi->id));
4912
4913 /* Translate artificial variables into SSA_NAME_PTR_INFO
4914 attributes. */
4915 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4916 {
4917 varinfo_t vi = get_varinfo (i);
4918
4919 if (vi->is_artificial_var)
4920 {
4921 /* FIXME. READONLY should be handled better so that
4922 flow insensitive aliasing can disregard writable
4923 aliases. */
4924 if (vi->id == nothing_id)
4925 pi->pt_null = 1;
4926 else if (vi->id == anything_id
4927 || vi->id == nonlocal_id
4928 || vi->id == escaped_id
4929 || vi->id == callused_id)
4930 was_pt_anything = 1;
4931 else if (vi->id == readonly_id)
4932 was_pt_anything = 1;
4933 else if (vi->id == integer_id)
4934 was_pt_anything = 1;
4935 else if (vi->is_heap_var)
4936 pi->pt_global_mem = 1;
4937 }
4938 }
4939
4940 /* Instead of doing extra work, simply do not create
4941 points-to information for pt_anything pointers. This
4942 will cause the operand scanner to fall back to the
4943 type-based SMT and its aliases. Which is the best
4944 we could do here for the points-to set as well. */
4945 if (was_pt_anything)
4946 return false;
4947
4948 /* Share the final set of variables when possible. */
4949 finished_solution = BITMAP_GGC_ALLOC ();
4950 stats.points_to_sets_created++;
4951
4952 pruned = set_uids_in_ptset (p, finished_solution, vi->solution,
4953 pi->is_dereferenced,
4954 vi->no_tbaa_pruning);
4955 result = shared_bitmap_lookup (finished_solution);
4956
4957 if (!result)
4958 {
4959 shared_bitmap_add (finished_solution);
4960 pi->pt_vars = finished_solution;
4961 }
4962 else
4963 {
4964 pi->pt_vars = result;
4965 bitmap_clear (finished_solution);
4966 }
4967
4968 if (bitmap_empty_p (pi->pt_vars))
4969 {
4970 pi->pt_vars = NULL;
4971 if (pruned > 0
4972 && !pi->pt_null
4973 && pi->is_dereferenced
4974 && warn_strict_aliasing > 0
4975 && !SSA_NAME_IS_DEFAULT_DEF (p))
4976 {
4977 if (dump_file && dump_flags & TDF_DETAILS)
4978 {
4979 fprintf (dump_file, "alias warning for ");
4980 print_generic_expr (dump_file, p, 0);
4981 fprintf (dump_file, "\n");
4982 }
4983 emit_alias_warning (p);
4984 }
4985 }
4986
4987 return true;
4988 }
4989 }
4990
4991 return false;
4992 }
4993
4994 /* Mark the ESCAPED solution as call clobbered. Returns false if
4995 pt_anything escaped which needs all locals that have their address
4996 taken marked call clobbered as well. */
4997
4998 bool
4999 clobber_what_escaped (void)
5000 {
5001 varinfo_t vi;
5002 unsigned int i;
5003 bitmap_iterator bi;
5004
5005 if (!have_alias_info)
5006 return false;
5007
5008 /* This variable may have been collapsed, let's get the real
5009 variable for escaped_id. */
5010 vi = get_varinfo (find (escaped_id));
5011
5012 /* If call-used memory escapes we need to include it in the
5013 set of escaped variables. This can happen if a pure
5014 function returns a pointer and this pointer escapes. */
5015 if (bitmap_bit_p (vi->solution, callused_id))
5016 {
5017 varinfo_t cu_vi = get_varinfo (find (callused_id));
5018 bitmap_ior_into (vi->solution, cu_vi->solution);
5019 }
5020
5021 /* Mark variables in the solution call-clobbered. */
5022 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5023 {
5024 varinfo_t vi = get_varinfo (i);
5025
5026 if (vi->is_artificial_var)
5027 {
5028 /* nothing_id and readonly_id do not cause any
5029 call clobber ops. For anything_id and integer_id
5030 we need to clobber all addressable vars. */
5031 if (vi->id == anything_id
5032 || vi->id == integer_id)
5033 return false;
5034 }
5035
5036 /* Only artificial heap-vars are further interesting. */
5037 if (vi->is_artificial_var && !vi->is_heap_var)
5038 continue;
5039
5040 if ((TREE_CODE (vi->decl) == VAR_DECL
5041 || TREE_CODE (vi->decl) == PARM_DECL
5042 || TREE_CODE (vi->decl) == RESULT_DECL)
5043 && !unmodifiable_var_p (vi->decl))
5044 mark_call_clobbered (vi->decl, ESCAPE_TO_CALL);
5045 }
5046
5047 return true;
5048 }
5049
5050 /* Compute the call-used variables. */
5051
5052 void
5053 compute_call_used_vars (void)
5054 {
5055 varinfo_t vi;
5056 unsigned int i;
5057 bitmap_iterator bi;
5058 bool has_anything_id = false;
5059
5060 if (!have_alias_info)
5061 return;
5062
5063 /* This variable may have been collapsed, let's get the real
5064 variable for escaped_id. */
5065 vi = get_varinfo (find (callused_id));
5066
5067 /* Mark variables in the solution call-clobbered. */
5068 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5069 {
5070 varinfo_t vi = get_varinfo (i);
5071
5072 if (vi->is_artificial_var)
5073 {
5074 /* For anything_id and integer_id we need to make
5075 all local addressable vars call-used. */
5076 if (vi->id == anything_id
5077 || vi->id == integer_id)
5078 has_anything_id = true;
5079 }
5080
5081 /* Only artificial heap-vars are further interesting. */
5082 if (vi->is_artificial_var && !vi->is_heap_var)
5083 continue;
5084
5085 if ((TREE_CODE (vi->decl) == VAR_DECL
5086 || TREE_CODE (vi->decl) == PARM_DECL
5087 || TREE_CODE (vi->decl) == RESULT_DECL)
5088 && !unmodifiable_var_p (vi->decl))
5089 bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
5090 }
5091
5092 /* If anything is call-used, add all addressable locals to the set. */
5093 if (has_anything_id)
5094 bitmap_ior_into (gimple_call_used_vars (cfun),
5095 gimple_addressable_vars (cfun));
5096 }
5097
5098
5099 /* Dump points-to information to OUTFILE. */
5100
5101 void
5102 dump_sa_points_to_info (FILE *outfile)
5103 {
5104 unsigned int i;
5105
5106 fprintf (outfile, "\nPoints-to sets\n\n");
5107
5108 if (dump_flags & TDF_STATS)
5109 {
5110 fprintf (outfile, "Stats:\n");
5111 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5112 fprintf (outfile, "Non-pointer vars: %d\n",
5113 stats.nonpointer_vars);
5114 fprintf (outfile, "Statically unified vars: %d\n",
5115 stats.unified_vars_static);
5116 fprintf (outfile, "Dynamically unified vars: %d\n",
5117 stats.unified_vars_dynamic);
5118 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5119 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5120 fprintf (outfile, "Number of implicit edges: %d\n",
5121 stats.num_implicit_edges);
5122 }
5123
5124 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5125 dump_solution_for_var (outfile, i);
5126 }
5127
5128
5129 /* Debug points-to information to stderr. */
5130
5131 void
5132 debug_sa_points_to_info (void)
5133 {
5134 dump_sa_points_to_info (stderr);
5135 }
5136
5137
5138 /* Initialize the always-existing constraint variables for NULL
5139 ANYTHING, READONLY, and INTEGER */
5140
5141 static void
5142 init_base_vars (void)
5143 {
5144 struct constraint_expr lhs, rhs;
5145
5146 /* Create the NULL variable, used to represent that a variable points
5147 to NULL. */
5148 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5149 var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
5150 insert_vi_for_tree (nothing_tree, var_nothing);
5151 var_nothing->is_artificial_var = 1;
5152 var_nothing->offset = 0;
5153 var_nothing->size = ~0;
5154 var_nothing->fullsize = ~0;
5155 var_nothing->is_special_var = 1;
5156 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5157
5158 /* Create the ANYTHING variable, used to represent that a variable
5159 points to some unknown piece of memory. */
5160 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5161 var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
5162 insert_vi_for_tree (anything_tree, var_anything);
5163 var_anything->is_artificial_var = 1;
5164 var_anything->size = ~0;
5165 var_anything->offset = 0;
5166 var_anything->next = NULL;
5167 var_anything->fullsize = ~0;
5168 var_anything->is_special_var = 1;
5169
5170 /* Anything points to anything. This makes deref constraints just
5171 work in the presence of linked list and other p = *p type loops,
5172 by saying that *ANYTHING = ANYTHING. */
5173 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5174 lhs.type = SCALAR;
5175 lhs.var = anything_id;
5176 lhs.offset = 0;
5177 rhs.type = ADDRESSOF;
5178 rhs.var = anything_id;
5179 rhs.offset = 0;
5180
5181 /* This specifically does not use process_constraint because
5182 process_constraint ignores all anything = anything constraints, since all
5183 but this one are redundant. */
5184 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5185
5186 /* Create the READONLY variable, used to represent that a variable
5187 points to readonly memory. */
5188 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5189 var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
5190 var_readonly->is_artificial_var = 1;
5191 var_readonly->offset = 0;
5192 var_readonly->size = ~0;
5193 var_readonly->fullsize = ~0;
5194 var_readonly->next = NULL;
5195 var_readonly->is_special_var = 1;
5196 insert_vi_for_tree (readonly_tree, var_readonly);
5197 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5198
5199 /* readonly memory points to anything, in order to make deref
5200 easier. In reality, it points to anything the particular
5201 readonly variable can point to, but we don't track this
5202 separately. */
5203 lhs.type = SCALAR;
5204 lhs.var = readonly_id;
5205 lhs.offset = 0;
5206 rhs.type = ADDRESSOF;
5207 rhs.var = readonly_id; /* FIXME */
5208 rhs.offset = 0;
5209 process_constraint (new_constraint (lhs, rhs));
5210
5211 /* Create the ESCAPED variable, used to represent the set of escaped
5212 memory. */
5213 escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED");
5214 var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
5215 insert_vi_for_tree (escaped_tree, var_escaped);
5216 var_escaped->is_artificial_var = 1;
5217 var_escaped->offset = 0;
5218 var_escaped->size = ~0;
5219 var_escaped->fullsize = ~0;
5220 var_escaped->is_special_var = 0;
5221 VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
5222 gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
5223
5224 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
5225 lhs.type = SCALAR;
5226 lhs.var = escaped_id;
5227 lhs.offset = 0;
5228 rhs.type = DEREF;
5229 rhs.var = escaped_id;
5230 rhs.offset = 0;
5231 process_constraint (new_constraint (lhs, rhs));
5232
5233 /* Create the NONLOCAL variable, used to represent the set of nonlocal
5234 memory. */
5235 nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL");
5236 var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
5237 insert_vi_for_tree (nonlocal_tree, var_nonlocal);
5238 var_nonlocal->is_artificial_var = 1;
5239 var_nonlocal->offset = 0;
5240 var_nonlocal->size = ~0;
5241 var_nonlocal->fullsize = ~0;
5242 var_nonlocal->is_special_var = 1;
5243 VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
5244
5245 /* Nonlocal memory points to escaped (which includes nonlocal),
5246 in order to make deref easier. */
5247 lhs.type = SCALAR;
5248 lhs.var = nonlocal_id;
5249 lhs.offset = 0;
5250 rhs.type = ADDRESSOF;
5251 rhs.var = escaped_id;
5252 rhs.offset = 0;
5253 process_constraint (new_constraint (lhs, rhs));
5254
5255 /* Create the CALLUSED variable, used to represent the set of call-used
5256 memory. */
5257 callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
5258 var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
5259 insert_vi_for_tree (callused_tree, var_callused);
5260 var_callused->is_artificial_var = 1;
5261 var_callused->offset = 0;
5262 var_callused->size = ~0;
5263 var_callused->fullsize = ~0;
5264 var_callused->is_special_var = 0;
5265 VEC_safe_push (varinfo_t, heap, varmap, var_callused);
5266
5267 /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
5268 lhs.type = SCALAR;
5269 lhs.var = callused_id;
5270 lhs.offset = 0;
5271 rhs.type = DEREF;
5272 rhs.var = callused_id;
5273 rhs.offset = 0;
5274 process_constraint (new_constraint (lhs, rhs));
5275
5276 /* Create the STOREDANYTHING variable, used to represent the set of
5277 variables stored to *ANYTHING. */
5278 storedanything_tree = create_tmp_var_raw (ptr_type_node, "STOREDANYTHING");
5279 var_storedanything = new_var_info (storedanything_tree, storedanything_id,
5280 "STOREDANYTHING");
5281 insert_vi_for_tree (storedanything_tree, var_storedanything);
5282 var_storedanything->is_artificial_var = 1;
5283 var_storedanything->offset = 0;
5284 var_storedanything->size = ~0;
5285 var_storedanything->fullsize = ~0;
5286 var_storedanything->is_special_var = 0;
5287 VEC_safe_push (varinfo_t, heap, varmap, var_storedanything);
5288
5289 /* Create the INTEGER variable, used to represent that a variable points
5290 to an INTEGER. */
5291 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5292 var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
5293 insert_vi_for_tree (integer_tree, var_integer);
5294 var_integer->is_artificial_var = 1;
5295 var_integer->size = ~0;
5296 var_integer->fullsize = ~0;
5297 var_integer->offset = 0;
5298 var_integer->next = NULL;
5299 var_integer->is_special_var = 1;
5300 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5301
5302 /* INTEGER = ANYTHING, because we don't know where a dereference of
5303 a random integer will point to. */
5304 lhs.type = SCALAR;
5305 lhs.var = integer_id;
5306 lhs.offset = 0;
5307 rhs.type = ADDRESSOF;
5308 rhs.var = anything_id;
5309 rhs.offset = 0;
5310 process_constraint (new_constraint (lhs, rhs));
5311
5312 /* *ESCAPED = &ESCAPED. This is true because we have to assume
5313 everything pointed to by escaped can also point to escaped. */
5314 lhs.type = DEREF;
5315 lhs.var = escaped_id;
5316 lhs.offset = 0;
5317 rhs.type = ADDRESSOF;
5318 rhs.var = escaped_id;
5319 rhs.offset = 0;
5320 process_constraint (new_constraint (lhs, rhs));
5321
5322 /* *ESCAPED = &NONLOCAL. This is true because we have to assume
5323 everything pointed to by escaped can also point to nonlocal. */
5324 lhs.type = DEREF;
5325 lhs.var = escaped_id;
5326 lhs.offset = 0;
5327 rhs.type = ADDRESSOF;
5328 rhs.var = nonlocal_id;
5329 rhs.offset = 0;
5330 process_constraint (new_constraint (lhs, rhs));
5331 }
5332
5333 /* Initialize things necessary to perform PTA */
5334
5335 static void
5336 init_alias_vars (void)
5337 {
5338 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
5339
5340 bitmap_obstack_initialize (&pta_obstack);
5341 bitmap_obstack_initialize (&oldpta_obstack);
5342 bitmap_obstack_initialize (&predbitmap_obstack);
5343
5344 constraint_pool = create_alloc_pool ("Constraint pool",
5345 sizeof (struct constraint), 30);
5346 variable_info_pool = create_alloc_pool ("Variable info pool",
5347 sizeof (struct variable_info), 30);
5348 constraints = VEC_alloc (constraint_t, heap, 8);
5349 varmap = VEC_alloc (varinfo_t, heap, 8);
5350 vi_for_tree = pointer_map_create ();
5351
5352 memset (&stats, 0, sizeof (stats));
5353 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5354 shared_bitmap_eq, free);
5355 init_base_vars ();
5356 }
5357
5358 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5359 predecessor edges. */
5360
5361 static void
5362 remove_preds_and_fake_succs (constraint_graph_t graph)
5363 {
5364 unsigned int i;
5365
5366 /* Clear the implicit ref and address nodes from the successor
5367 lists. */
5368 for (i = 0; i < FIRST_REF_NODE; i++)
5369 {
5370 if (graph->succs[i])
5371 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5372 FIRST_REF_NODE * 2);
5373 }
5374
5375 /* Free the successor list for the non-ref nodes. */
5376 for (i = FIRST_REF_NODE; i < graph->size; i++)
5377 {
5378 if (graph->succs[i])
5379 BITMAP_FREE (graph->succs[i]);
5380 }
5381
5382 /* Now reallocate the size of the successor list as, and blow away
5383 the predecessor bitmaps. */
5384 graph->size = VEC_length (varinfo_t, varmap);
5385 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5386
5387 free (graph->implicit_preds);
5388 graph->implicit_preds = NULL;
5389 free (graph->preds);
5390 graph->preds = NULL;
5391 bitmap_obstack_release (&predbitmap_obstack);
5392 }
5393
5394 /* Compute the set of variables we can't TBAA prune. */
5395
5396 static void
5397 compute_tbaa_pruning (void)
5398 {
5399 unsigned int size = VEC_length (varinfo_t, varmap);
5400 unsigned int i;
5401 bool any;
5402
5403 changed_count = 0;
5404 changed = sbitmap_alloc (size);
5405 sbitmap_zero (changed);
5406
5407 /* Mark all initial no_tbaa_pruning nodes as changed. */
5408 any = false;
5409 for (i = 0; i < size; ++i)
5410 {
5411 varinfo_t ivi = get_varinfo (i);
5412
5413 if (find (i) == i && ivi->no_tbaa_pruning)
5414 {
5415 any = true;
5416 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5417 || VEC_length (constraint_t, graph->complex[i]) > 0)
5418 {
5419 SET_BIT (changed, i);
5420 ++changed_count;
5421 }
5422 }
5423 }
5424
5425 while (changed_count > 0)
5426 {
5427 struct topo_info *ti = init_topo_info ();
5428 ++stats.iterations;
5429
5430 compute_topo_order (graph, ti);
5431
5432 while (VEC_length (unsigned, ti->topo_order) != 0)
5433 {
5434 bitmap_iterator bi;
5435
5436 i = VEC_pop (unsigned, ti->topo_order);
5437
5438 /* If this variable is not a representative, skip it. */
5439 if (find (i) != i)
5440 continue;
5441
5442 /* If the node has changed, we need to process the complex
5443 constraints and outgoing edges again. */
5444 if (TEST_BIT (changed, i))
5445 {
5446 unsigned int j;
5447 constraint_t c;
5448 VEC(constraint_t,heap) *complex = graph->complex[i];
5449
5450 RESET_BIT (changed, i);
5451 --changed_count;
5452
5453 /* Process the complex copy constraints. */
5454 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5455 {
5456 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5457 {
5458 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5459
5460 if (!lhsvi->no_tbaa_pruning)
5461 {
5462 lhsvi->no_tbaa_pruning = true;
5463 if (!TEST_BIT (changed, lhsvi->id))
5464 {
5465 SET_BIT (changed, lhsvi->id);
5466 ++changed_count;
5467 }
5468 }
5469 }
5470 }
5471
5472 /* Propagate to all successors. */
5473 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5474 {
5475 unsigned int to = find (j);
5476 varinfo_t tovi = get_varinfo (to);
5477
5478 /* Don't propagate to ourselves. */
5479 if (to == i)
5480 continue;
5481
5482 if (!tovi->no_tbaa_pruning)
5483 {
5484 tovi->no_tbaa_pruning = true;
5485 if (!TEST_BIT (changed, to))
5486 {
5487 SET_BIT (changed, to);
5488 ++changed_count;
5489 }
5490 }
5491 }
5492 }
5493 }
5494
5495 free_topo_info (ti);
5496 }
5497
5498 sbitmap_free (changed);
5499
5500 if (any)
5501 {
5502 for (i = 0; i < size; ++i)
5503 {
5504 varinfo_t ivi = get_varinfo (i);
5505 varinfo_t ivip = get_varinfo (find (i));
5506
5507 if (ivip->no_tbaa_pruning)
5508 {
5509 tree var = ivi->decl;
5510
5511 if (TREE_CODE (var) == SSA_NAME)
5512 var = SSA_NAME_VAR (var);
5513
5514 if (POINTER_TYPE_P (TREE_TYPE (var)))
5515 {
5516 DECL_NO_TBAA_P (var) = 1;
5517
5518 /* Tell the RTL layer that this pointer can alias
5519 anything. */
5520 DECL_POINTER_ALIAS_SET (var) = 0;
5521 }
5522 }
5523 }
5524 }
5525 }
5526
5527 /* Create points-to sets for the current function. See the comments
5528 at the start of the file for an algorithmic overview. */
5529
5530 void
5531 compute_points_to_sets (void)
5532 {
5533 struct scc_info *si;
5534 basic_block bb;
5535
5536 timevar_push (TV_TREE_PTA);
5537
5538 init_alias_vars ();
5539 init_alias_heapvars ();
5540
5541 intra_create_variable_infos ();
5542
5543 /* Now walk all statements and derive aliases. */
5544 FOR_EACH_BB (bb)
5545 {
5546 gimple_stmt_iterator gsi;
5547
5548 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5549 {
5550 gimple phi = gsi_stmt (gsi);
5551
5552 if (is_gimple_reg (gimple_phi_result (phi)))
5553 find_func_aliases (phi);
5554 }
5555
5556 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5557 find_func_aliases (gsi_stmt (gsi));
5558 }
5559
5560
5561 if (dump_file)
5562 {
5563 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5564 dump_constraints (dump_file);
5565 }
5566
5567 if (dump_file)
5568 fprintf (dump_file,
5569 "\nCollapsing static cycles and doing variable "
5570 "substitution\n");
5571
5572 init_graph (VEC_length (varinfo_t, varmap) * 2);
5573
5574 if (dump_file)
5575 fprintf (dump_file, "Building predecessor graph\n");
5576 build_pred_graph ();
5577
5578 if (dump_file)
5579 fprintf (dump_file, "Detecting pointer and location "
5580 "equivalences\n");
5581 si = perform_var_substitution (graph);
5582
5583 if (dump_file)
5584 fprintf (dump_file, "Rewriting constraints and unifying "
5585 "variables\n");
5586 rewrite_constraints (graph, si);
5587
5588 build_succ_graph ();
5589 free_var_substitution_info (si);
5590
5591 if (dump_file && (dump_flags & TDF_GRAPH))
5592 dump_constraint_graph (dump_file);
5593
5594 move_complex_constraints (graph);
5595
5596 if (dump_file)
5597 fprintf (dump_file, "Uniting pointer but not location equivalent "
5598 "variables\n");
5599 unite_pointer_equivalences (graph);
5600
5601 if (dump_file)
5602 fprintf (dump_file, "Finding indirect cycles\n");
5603 find_indirect_cycles (graph);
5604
5605 /* Implicit nodes and predecessors are no longer necessary at this
5606 point. */
5607 remove_preds_and_fake_succs (graph);
5608
5609 if (dump_file)
5610 fprintf (dump_file, "Solving graph\n");
5611
5612 solve_graph (graph);
5613
5614 compute_tbaa_pruning ();
5615
5616 if (dump_file)
5617 dump_sa_points_to_info (dump_file);
5618
5619 have_alias_info = true;
5620
5621 timevar_pop (TV_TREE_PTA);
5622 }
5623
5624
5625 /* Delete created points-to sets. */
5626
5627 void
5628 delete_points_to_sets (void)
5629 {
5630 unsigned int i;
5631
5632 htab_delete (shared_bitmap_table);
5633 if (dump_file && (dump_flags & TDF_STATS))
5634 fprintf (dump_file, "Points to sets created:%d\n",
5635 stats.points_to_sets_created);
5636
5637 pointer_map_destroy (vi_for_tree);
5638 bitmap_obstack_release (&pta_obstack);
5639 VEC_free (constraint_t, heap, constraints);
5640
5641 for (i = 0; i < graph->size; i++)
5642 VEC_free (constraint_t, heap, graph->complex[i]);
5643 free (graph->complex);
5644
5645 free (graph->rep);
5646 free (graph->succs);
5647 free (graph->pe);
5648 free (graph->pe_rep);
5649 free (graph->indirect_cycles);
5650 free (graph);
5651
5652 VEC_free (varinfo_t, heap, varmap);
5653 free_alloc_pool (variable_info_pool);
5654 free_alloc_pool (constraint_pool);
5655 have_alias_info = false;
5656 }
5657
5658 /* Return true if we should execute IPA PTA. */
5659 static bool
5660 gate_ipa_pta (void)
5661 {
5662 return (flag_ipa_pta
5663 /* Don't bother doing anything if the program has errors. */
5664 && !(errorcount || sorrycount));
5665 }
5666
5667 /* Execute the driver for IPA PTA. */
5668 static unsigned int
5669 ipa_pta_execute (void)
5670 {
5671 struct cgraph_node *node;
5672 struct scc_info *si;
5673
5674 in_ipa_mode = 1;
5675 init_alias_heapvars ();
5676 init_alias_vars ();
5677
5678 for (node = cgraph_nodes; node; node = node->next)
5679 {
5680 if (!node->analyzed || cgraph_is_master_clone (node))
5681 {
5682 unsigned int varid;
5683
5684 varid = create_function_info_for (node->decl,
5685 cgraph_node_name (node));
5686 if (node->local.externally_visible)
5687 {
5688 varinfo_t fi = get_varinfo (varid);
5689 for (; fi; fi = fi->next)
5690 make_constraint_from (fi, anything_id);
5691 }
5692 }
5693 }
5694 for (node = cgraph_nodes; node; node = node->next)
5695 {
5696 if (node->analyzed && cgraph_is_master_clone (node))
5697 {
5698 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5699 basic_block bb;
5700 tree old_func_decl = current_function_decl;
5701 if (dump_file)
5702 fprintf (dump_file,
5703 "Generating constraints for %s\n",
5704 cgraph_node_name (node));
5705 push_cfun (func);
5706 current_function_decl = node->decl;
5707
5708 FOR_EACH_BB_FN (bb, func)
5709 {
5710 gimple_stmt_iterator gsi;
5711
5712 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
5713 gsi_next (&gsi))
5714 {
5715 gimple phi = gsi_stmt (gsi);
5716
5717 if (is_gimple_reg (gimple_phi_result (phi)))
5718 find_func_aliases (phi);
5719 }
5720
5721 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5722 find_func_aliases (gsi_stmt (gsi));
5723 }
5724 current_function_decl = old_func_decl;
5725 pop_cfun ();
5726 }
5727 else
5728 {
5729 /* Make point to anything. */
5730 }
5731 }
5732
5733 if (dump_file)
5734 {
5735 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5736 dump_constraints (dump_file);
5737 }
5738
5739 if (dump_file)
5740 fprintf (dump_file,
5741 "\nCollapsing static cycles and doing variable "
5742 "substitution:\n");
5743
5744 init_graph (VEC_length (varinfo_t, varmap) * 2);
5745 build_pred_graph ();
5746 si = perform_var_substitution (graph);
5747 rewrite_constraints (graph, si);
5748
5749 build_succ_graph ();
5750 free_var_substitution_info (si);
5751 move_complex_constraints (graph);
5752 unite_pointer_equivalences (graph);
5753 find_indirect_cycles (graph);
5754
5755 /* Implicit nodes and predecessors are no longer necessary at this
5756 point. */
5757 remove_preds_and_fake_succs (graph);
5758
5759 if (dump_file)
5760 fprintf (dump_file, "\nSolving graph\n");
5761
5762 solve_graph (graph);
5763
5764 if (dump_file)
5765 dump_sa_points_to_info (dump_file);
5766
5767 in_ipa_mode = 0;
5768 delete_alias_heapvars ();
5769 delete_points_to_sets ();
5770 return 0;
5771 }
5772
5773 struct simple_ipa_opt_pass pass_ipa_pta =
5774 {
5775 {
5776 SIMPLE_IPA_PASS,
5777 "pta", /* name */
5778 gate_ipa_pta, /* gate */
5779 ipa_pta_execute, /* execute */
5780 NULL, /* sub */
5781 NULL, /* next */
5782 0, /* static_pass_number */
5783 TV_IPA_PTA, /* tv_id */
5784 0, /* properties_required */
5785 0, /* properties_provided */
5786 0, /* properties_destroyed */
5787 0, /* todo_flags_start */
5788 TODO_update_ssa /* todo_flags_finish */
5789 }
5790 };
5791
5792 /* Initialize the heapvar for statement mapping. */
5793 void
5794 init_alias_heapvars (void)
5795 {
5796 if (!heapvar_for_stmt)
5797 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5798 NULL);
5799 }
5800
5801 void
5802 delete_alias_heapvars (void)
5803 {
5804 htab_delete (heapvar_for_stmt);
5805 heapvar_for_stmt = NULL;
5806 }
5807
5808 #include "gt-tree-ssa-structalias.h"