111
|
1 /* Basic IPA utilities for type inheritance graph construction and
|
|
2 devirtualization.
|
145
|
3 Copyright (C) 2013-2020 Free Software Foundation, Inc.
|
111
|
4 Contributed by Jan Hubicka
|
|
5
|
|
6 This file is part of GCC.
|
|
7
|
|
8 GCC is free software; you can redistribute it and/or modify it under
|
|
9 the terms of the GNU General Public License as published by the Free
|
|
10 Software Foundation; either version 3, or (at your option) any later
|
|
11 version.
|
|
12
|
|
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
16 for more details.
|
|
17
|
|
18 You should have received a copy of the GNU General Public License
|
|
19 along with GCC; see the file COPYING3. If not see
|
|
20 <http://www.gnu.org/licenses/>. */
|
|
21
|
|
22 /* Brief vocabulary:
|
|
23 ODR = One Definition Rule
|
|
24 In short, the ODR states that:
|
|
25 1 In any translation unit, a template, type, function, or object can
|
|
26 have no more than one definition. Some of these can have any number
|
|
27 of declarations. A definition provides an instance.
|
|
28 2 In the entire program, an object or non-inline function cannot have
|
|
29 more than one definition; if an object or function is used, it must
|
|
30 have exactly one definition. You can declare an object or function
|
|
31 that is never used, in which case you don't have to provide
|
|
32 a definition. In no event can there be more than one definition.
|
|
33 3 Some things, like types, templates, and extern inline functions, can
|
|
34 be defined in more than one translation unit. For a given entity,
|
|
35 each definition must be the same. Non-extern objects and functions
|
|
36 in different translation units are different entities, even if their
|
|
37 names and types are the same.
|
|
38
|
|
39 OTR = OBJ_TYPE_REF
|
|
40 This is the Gimple representation of type information of a polymorphic call.
|
|
41 It contains two parameters:
|
|
42 otr_type is a type of class whose method is called.
|
|
43 otr_token is the index into virtual table where address is taken.
|
|
44
|
|
45 BINFO
|
|
46 This is the type inheritance information attached to each tree
|
|
47 RECORD_TYPE by the C++ frontend. It provides information about base
|
|
48 types and virtual tables.
|
|
49
|
|
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
|
|
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
|
|
52 BINFO_VTABLE.
|
|
53
|
|
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
|
|
55 vector. Members of this vectors are not BINFOs associated
|
|
56 with a base type. Rather they are new copies of BINFOs
|
|
57 (base BINFOs). Their virtual tables may differ from
|
|
58 virtual table of the base type. Also BINFO_OFFSET specifies
|
|
59 offset of the base within the type.
|
|
60
|
|
61 In the case of single inheritance, the virtual table is shared
|
|
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
|
|
63 inheritance the individual virtual tables are pointer to by
|
|
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
|
|
65 binfo associated to the base type).
|
|
66
|
|
67 BINFO lookup for a given base type and offset can be done by
|
|
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
|
|
69 can be used for lookup of virtual methods associated with the
|
|
70 base type.
|
|
71
|
|
72 token
|
|
73 This is an index of virtual method in virtual table associated
|
|
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
|
|
75 or from DECL_VINDEX of a given virtual table.
|
|
76
|
|
77 polymorphic (indirect) call
|
|
78 This is callgraph representation of virtual method call. Every
|
|
79 polymorphic call contains otr_type and otr_token taken from
|
|
80 original OBJ_TYPE_REF at callgraph construction time.
|
|
81
|
|
82 What we do here:
|
|
83
|
|
84 build_type_inheritance_graph triggers a construction of the type inheritance
|
|
85 graph.
|
|
86
|
|
87 We reconstruct it based on types of methods we see in the unit.
|
|
88 This means that the graph is not complete. Types with no methods are not
|
|
89 inserted into the graph. Also types without virtual methods are not
|
|
90 represented at all, though it may be easy to add this.
|
|
91
|
|
92 The inheritance graph is represented as follows:
|
|
93
|
|
94 Vertices are structures odr_type. Every odr_type may correspond
|
|
95 to one or more tree type nodes that are equivalent by ODR rule.
|
|
96 (the multiple type nodes appear only with linktime optimization)
|
|
97
|
|
98 Edges are represented by odr_type->base and odr_type->derived_types.
|
|
99 At the moment we do not track offsets of types for multiple inheritance.
|
|
100 Adding this is easy.
|
|
101
|
|
102 possible_polymorphic_call_targets returns, given an parameters found in
|
|
103 indirect polymorphic edge all possible polymorphic call targets of the call.
|
|
104
|
|
105 pass_ipa_devirt performs simple speculative devirtualization.
|
|
106 */
|
|
107
|
|
108 #include "config.h"
|
|
109 #include "system.h"
|
|
110 #include "coretypes.h"
|
|
111 #include "backend.h"
|
|
112 #include "rtl.h"
|
|
113 #include "tree.h"
|
|
114 #include "gimple.h"
|
|
115 #include "alloc-pool.h"
|
|
116 #include "tree-pass.h"
|
|
117 #include "cgraph.h"
|
|
118 #include "lto-streamer.h"
|
|
119 #include "fold-const.h"
|
|
120 #include "print-tree.h"
|
|
121 #include "calls.h"
|
|
122 #include "ipa-utils.h"
|
|
123 #include "gimple-fold.h"
|
|
124 #include "symbol-summary.h"
|
|
125 #include "tree-vrp.h"
|
|
126 #include "ipa-prop.h"
|
|
127 #include "ipa-fnsummary.h"
|
|
128 #include "demangle.h"
|
|
129 #include "dbgcnt.h"
|
|
130 #include "gimple-pretty-print.h"
|
|
131 #include "intl.h"
|
|
132 #include "stringpool.h"
|
|
133 #include "attribs.h"
|
|
134
|
|
135 /* Hash based set of pairs of types. */
|
|
136 struct type_pair
|
|
137 {
|
|
138 tree first;
|
|
139 tree second;
|
|
140 };
|
|
141
|
|
142 template <>
|
|
143 struct default_hash_traits <type_pair>
|
|
144 : typed_noop_remove <type_pair>
|
|
145 {
|
|
146 GTY((skip)) typedef type_pair value_type;
|
|
147 GTY((skip)) typedef type_pair compare_type;
|
|
148 static hashval_t
|
|
149 hash (type_pair p)
|
|
150 {
|
|
151 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
|
|
152 }
|
145
|
153 static const bool empty_zero_p = true;
|
111
|
154 static bool
|
|
155 is_empty (type_pair p)
|
|
156 {
|
|
157 return p.first == NULL;
|
|
158 }
|
|
159 static bool
|
|
160 is_deleted (type_pair p ATTRIBUTE_UNUSED)
|
|
161 {
|
|
162 return false;
|
|
163 }
|
|
164 static bool
|
|
165 equal (const type_pair &a, const type_pair &b)
|
|
166 {
|
|
167 return a.first==b.first && a.second == b.second;
|
|
168 }
|
|
169 static void
|
|
170 mark_empty (type_pair &e)
|
|
171 {
|
|
172 e.first = NULL;
|
|
173 }
|
|
174 };
|
|
175
|
145
|
176 /* HACK alert: this is used to communicate with ipa-inline-transform that
|
|
177 thunk is being expanded and there is no need to clear the polymorphic
|
|
178 call target cache. */
|
|
179 bool thunk_expansion;
|
|
180
|
111
|
181 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
|
|
182 hash_set<type_pair> *,
|
|
183 location_t, location_t);
|
145
|
184 static void warn_odr (tree t1, tree t2, tree st1, tree st2,
|
|
185 bool warn, bool *warned, const char *reason);
|
111
|
186
|
|
187 static bool odr_violation_reported = false;
|
|
188
|
|
189
|
|
190 /* Pointer set of all call targets appearing in the cache. */
|
|
191 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
|
|
192
|
|
193 /* The node of type inheritance graph. For each type unique in
|
|
194 One Definition Rule (ODR) sense, we produce one node linking all
|
|
195 main variants of types equivalent to it, bases and derived types. */
|
|
196
|
|
197 struct GTY(()) odr_type_d
|
|
198 {
|
|
199 /* leader type. */
|
|
200 tree type;
|
|
201 /* All bases; built only for main variants of types. */
|
|
202 vec<odr_type> GTY((skip)) bases;
|
|
203 /* All derived types with virtual methods seen in unit;
|
|
204 built only for main variants of types. */
|
|
205 vec<odr_type> GTY((skip)) derived_types;
|
|
206
|
|
207 /* All equivalent types, if more than one. */
|
|
208 vec<tree, va_gc> *types;
|
|
209 /* Set of all equivalent types, if NON-NULL. */
|
|
210 hash_set<tree> * GTY((skip)) types_set;
|
|
211
|
|
212 /* Unique ID indexing the type in odr_types array. */
|
|
213 int id;
|
|
214 /* Is it in anonymous namespace? */
|
|
215 bool anonymous_namespace;
|
|
216 /* Do we know about all derivations of given type? */
|
|
217 bool all_derivations_known;
|
|
218 /* Did we report ODR violation here? */
|
|
219 bool odr_violated;
|
145
|
220 /* Set when virtual table without RTTI prevailed table with. */
|
111
|
221 bool rtti_broken;
|
145
|
222 /* Set when the canonical type is determined using the type name. */
|
|
223 bool tbaa_enabled;
|
111
|
224 };
|
|
225
|
|
226 /* Return TRUE if all derived types of T are known and thus
|
|
227 we may consider the walk of derived type complete.
|
|
228
|
|
229 This is typically true only for final anonymous namespace types and types
|
|
230 defined within functions (that may be COMDAT and thus shared across units,
|
|
231 but with the same set of derived types). */
|
|
232
|
|
233 bool
|
|
234 type_all_derivations_known_p (const_tree t)
|
|
235 {
|
|
236 if (TYPE_FINAL_P (t))
|
|
237 return true;
|
|
238 if (flag_ltrans)
|
|
239 return false;
|
|
240 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
|
|
241 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
|
|
242 return true;
|
|
243 if (type_in_anonymous_namespace_p (t))
|
|
244 return true;
|
|
245 return (decl_function_context (TYPE_NAME (t)) != NULL);
|
|
246 }
|
|
247
|
|
248 /* Return TRUE if type's constructors are all visible. */
|
|
249
|
|
250 static bool
|
|
251 type_all_ctors_visible_p (tree t)
|
|
252 {
|
|
253 return !flag_ltrans
|
|
254 && symtab->state >= CONSTRUCTION
|
145
|
255 /* We cannot always use type_all_derivations_known_p.
|
111
|
256 For function local types we must assume case where
|
|
257 the function is COMDAT and shared in between units.
|
|
258
|
|
259 TODO: These cases are quite easy to get, but we need
|
|
260 to keep track of C++ privatizing via -Wno-weak
|
|
261 as well as the IPA privatizing. */
|
|
262 && type_in_anonymous_namespace_p (t);
|
|
263 }
|
|
264
|
|
265 /* Return TRUE if type may have instance. */
|
|
266
|
|
267 static bool
|
|
268 type_possibly_instantiated_p (tree t)
|
|
269 {
|
|
270 tree vtable;
|
|
271 varpool_node *vnode;
|
|
272
|
|
273 /* TODO: Add abstract types here. */
|
|
274 if (!type_all_ctors_visible_p (t))
|
|
275 return true;
|
|
276
|
|
277 vtable = BINFO_VTABLE (TYPE_BINFO (t));
|
|
278 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
|
|
279 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
|
|
280 vnode = varpool_node::get (vtable);
|
|
281 return vnode && vnode->definition;
|
|
282 }
|
|
283
|
|
284 /* Hash used to unify ODR types based on their mangled name and for anonymous
|
|
285 namespace types. */
|
|
286
|
|
287 struct odr_name_hasher : pointer_hash <odr_type_d>
|
|
288 {
|
|
289 typedef union tree_node *compare_type;
|
|
290 static inline hashval_t hash (const odr_type_d *);
|
|
291 static inline bool equal (const odr_type_d *, const tree_node *);
|
|
292 static inline void remove (odr_type_d *);
|
|
293 };
|
|
294
|
|
295 static bool
|
|
296 can_be_name_hashed_p (tree t)
|
|
297 {
|
|
298 return (!in_lto_p || odr_type_p (t));
|
|
299 }
|
|
300
|
|
301 /* Hash type by its ODR name. */
|
|
302
|
|
303 static hashval_t
|
|
304 hash_odr_name (const_tree t)
|
|
305 {
|
145
|
306 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
|
111
|
307
|
|
308 /* If not in LTO, all main variants are unique, so we can do
|
|
309 pointer hash. */
|
|
310 if (!in_lto_p)
|
|
311 return htab_hash_pointer (t);
|
|
312
|
|
313 /* Anonymous types are unique. */
|
|
314 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
|
|
315 return htab_hash_pointer (t);
|
|
316
|
|
317 gcc_checking_assert (TYPE_NAME (t)
|
|
318 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
|
|
319 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
|
|
320 }
|
|
321
|
|
322 /* Return the computed hashcode for ODR_TYPE. */
|
|
323
|
|
324 inline hashval_t
|
|
325 odr_name_hasher::hash (const odr_type_d *odr_type)
|
|
326 {
|
|
327 return hash_odr_name (odr_type->type);
|
|
328 }
|
|
329
|
|
330 /* For languages with One Definition Rule, work out if
|
|
331 types are the same based on their name.
|
|
332
|
|
333 This is non-trivial for LTO where minor differences in
|
|
334 the type representation may have prevented type merging
|
|
335 to merge two copies of otherwise equivalent type.
|
|
336
|
|
337 Until we start streaming mangled type names, this function works
|
|
338 only for polymorphic types.
|
|
339 */
|
|
340
|
|
341 bool
|
145
|
342 types_same_for_odr (const_tree type1, const_tree type2)
|
111
|
343 {
|
|
344 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
|
|
345
|
145
|
346 type1 = TYPE_MAIN_VARIANT (type1);
|
|
347 type2 = TYPE_MAIN_VARIANT (type2);
|
111
|
348
|
|
349 if (type1 == type2)
|
|
350 return true;
|
|
351
|
|
352 if (!in_lto_p)
|
|
353 return false;
|
|
354
|
145
|
355 /* Anonymous namespace types are never duplicated. */
|
111
|
356 if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
|
|
357 || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
|
|
358 return false;
|
|
359
|
145
|
360 /* If both type has mangled defined check if they are same.
|
|
361 Watch for anonymous types which are all mangled as "<anon">. */
|
|
362 if (!type_with_linkage_p (type1) || !type_with_linkage_p (type2))
|
|
363 return false;
|
|
364 if (type_in_anonymous_namespace_p (type1)
|
|
365 || type_in_anonymous_namespace_p (type2))
|
|
366 return false;
|
111
|
367 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
|
|
368 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
|
|
369 }
|
|
370
|
|
371 /* Return true if we can decide on ODR equivalency.
|
|
372
|
|
373 In non-LTO it is always decide, in LTO however it depends in the type has
|
145
|
374 ODR info attached. */
|
111
|
375
|
|
376 bool
|
145
|
377 types_odr_comparable (tree t1, tree t2)
|
111
|
378 {
|
|
379 return (!in_lto_p
|
145
|
380 || TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2)
|
|
381 || (odr_type_p (TYPE_MAIN_VARIANT (t1))
|
|
382 && odr_type_p (TYPE_MAIN_VARIANT (t2))));
|
111
|
383 }
|
|
384
|
|
385 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
|
|
386 known, be conservative and return false. */
|
|
387
|
|
388 bool
|
|
389 types_must_be_same_for_odr (tree t1, tree t2)
|
|
390 {
|
|
391 if (types_odr_comparable (t1, t2))
|
|
392 return types_same_for_odr (t1, t2);
|
|
393 else
|
|
394 return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
|
|
395 }
|
|
396
|
|
397 /* If T is compound type, return type it is based on. */
|
|
398
|
|
399 static tree
|
|
400 compound_type_base (const_tree t)
|
|
401 {
|
|
402 if (TREE_CODE (t) == ARRAY_TYPE
|
|
403 || POINTER_TYPE_P (t)
|
|
404 || TREE_CODE (t) == COMPLEX_TYPE
|
|
405 || VECTOR_TYPE_P (t))
|
|
406 return TREE_TYPE (t);
|
|
407 if (TREE_CODE (t) == METHOD_TYPE)
|
|
408 return TYPE_METHOD_BASETYPE (t);
|
|
409 if (TREE_CODE (t) == OFFSET_TYPE)
|
|
410 return TYPE_OFFSET_BASETYPE (t);
|
|
411 return NULL_TREE;
|
|
412 }
|
|
413
|
|
414 /* Return true if T is either ODR type or compound type based from it.
|
|
415 If the function return true, we know that T is a type originating from C++
|
|
416 source even at link-time. */
|
|
417
|
|
418 bool
|
|
419 odr_or_derived_type_p (const_tree t)
|
|
420 {
|
|
421 do
|
|
422 {
|
145
|
423 if (odr_type_p (TYPE_MAIN_VARIANT (t)))
|
111
|
424 return true;
|
|
425 /* Function type is a tricky one. Basically we can consider it
|
|
426 ODR derived if return type or any of the parameters is.
|
|
427 We need to check all parameters because LTO streaming merges
|
|
428 common types (such as void) and they are not considered ODR then. */
|
|
429 if (TREE_CODE (t) == FUNCTION_TYPE)
|
|
430 {
|
|
431 if (TYPE_METHOD_BASETYPE (t))
|
|
432 t = TYPE_METHOD_BASETYPE (t);
|
|
433 else
|
|
434 {
|
|
435 if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
|
|
436 return true;
|
|
437 for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
|
145
|
438 if (odr_or_derived_type_p (TYPE_MAIN_VARIANT (TREE_VALUE (t))))
|
111
|
439 return true;
|
|
440 return false;
|
|
441 }
|
|
442 }
|
|
443 else
|
|
444 t = compound_type_base (t);
|
|
445 }
|
|
446 while (t);
|
|
447 return t;
|
|
448 }
|
|
449
|
|
450 /* Compare types T1 and T2 and return true if they are
|
|
451 equivalent. */
|
|
452
|
|
453 inline bool
|
|
454 odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
|
|
455 {
|
|
456 tree t1 = o1->type;
|
|
457
|
145
|
458 gcc_checking_assert (TYPE_MAIN_VARIANT (t2) == t2);
|
|
459 gcc_checking_assert (TYPE_MAIN_VARIANT (t1) == t1);
|
111
|
460 if (t1 == t2)
|
|
461 return true;
|
|
462 if (!in_lto_p)
|
|
463 return false;
|
145
|
464 /* Check for anonymous namespaces. */
|
111
|
465 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
|
|
466 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
|
|
467 return false;
|
|
468 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
|
|
469 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
|
|
470 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
|
|
471 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
|
|
472 }
|
|
473
|
|
474 /* Free ODR type V. */
|
|
475
|
|
476 inline void
|
|
477 odr_name_hasher::remove (odr_type_d *v)
|
|
478 {
|
|
479 v->bases.release ();
|
|
480 v->derived_types.release ();
|
|
481 if (v->types_set)
|
|
482 delete v->types_set;
|
|
483 ggc_free (v);
|
|
484 }
|
|
485
|
|
486 /* ODR type hash used to look up ODR type based on tree type node. */
|
|
487
|
|
488 typedef hash_table<odr_name_hasher> odr_hash_type;
|
|
489 static odr_hash_type *odr_hash;
|
|
490
|
|
491 /* ODR types are also stored into ODR_TYPE vector to allow consistent
|
|
492 walking. Bases appear before derived types. Vector is garbage collected
|
|
493 so we won't end up visiting empty types. */
|
|
494
|
|
495 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
|
|
496 #define odr_types (*odr_types_ptr)
|
|
497
|
|
498 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
|
|
499 void
|
|
500 set_type_binfo (tree type, tree binfo)
|
|
501 {
|
|
502 for (; type; type = TYPE_NEXT_VARIANT (type))
|
|
503 if (COMPLETE_TYPE_P (type))
|
|
504 TYPE_BINFO (type) = binfo;
|
|
505 else
|
|
506 gcc_assert (!TYPE_BINFO (type));
|
|
507 }
|
|
508
|
145
|
509 /* Return true if type variants match.
|
|
510 This assumes that we already verified that T1 and T2 are variants of the
|
|
511 same type. */
|
|
512
|
|
513 static bool
|
|
514 type_variants_equivalent_p (tree t1, tree t2)
|
|
515 {
|
|
516 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
|
|
517 return false;
|
|
518
|
|
519 if (comp_type_attributes (t1, t2) != 1)
|
|
520 return false;
|
|
521
|
|
522 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
|
|
523 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
|
|
524 return false;
|
|
525
|
|
526 return true;
|
|
527 }
|
|
528
|
131
|
529 /* Compare T1 and T2 based on name or structure. */
|
111
|
530
|
|
531 static bool
|
|
532 odr_subtypes_equivalent_p (tree t1, tree t2,
|
|
533 hash_set<type_pair> *visited,
|
|
534 location_t loc1, location_t loc2)
|
|
535 {
|
|
536
|
|
537 /* This can happen in incomplete types that should be handled earlier. */
|
|
538 gcc_assert (t1 && t2);
|
|
539
|
|
540 if (t1 == t2)
|
|
541 return true;
|
|
542
|
|
543 /* Anonymous namespace types must match exactly. */
|
145
|
544 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
|
|
545 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
|
|
546 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
|
|
547 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
|
111
|
548 return false;
|
|
549
|
|
550 /* For ODR types be sure to compare their names.
|
131
|
551 To support -Wno-odr-type-merging we allow one type to be non-ODR
|
111
|
552 and other ODR even though it is a violation. */
|
145
|
553 if (types_odr_comparable (t1, t2))
|
111
|
554 {
|
145
|
555 if (t1 != t2
|
|
556 && odr_type_p (TYPE_MAIN_VARIANT (t1))
|
|
557 && get_odr_type (TYPE_MAIN_VARIANT (t1), true)->odr_violated)
|
|
558 return false;
|
|
559 if (!types_same_for_odr (t1, t2))
|
111
|
560 return false;
|
145
|
561 if (!type_variants_equivalent_p (t1, t2))
|
|
562 return false;
|
111
|
563 /* Limit recursion: If subtypes are ODR types and we know
|
131
|
564 that they are same, be happy. */
|
145
|
565 if (odr_type_p (TYPE_MAIN_VARIANT (t1)))
|
111
|
566 return true;
|
|
567 }
|
|
568
|
|
569 /* Component types, builtins and possibly violating ODR types
|
|
570 have to be compared structurally. */
|
|
571 if (TREE_CODE (t1) != TREE_CODE (t2))
|
|
572 return false;
|
|
573 if (AGGREGATE_TYPE_P (t1)
|
|
574 && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
|
|
575 return false;
|
|
576
|
145
|
577 type_pair pair={TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2)};
|
|
578 if (TYPE_UID (TYPE_MAIN_VARIANT (t1)) > TYPE_UID (TYPE_MAIN_VARIANT (t2)))
|
111
|
579 {
|
145
|
580 pair.first = TYPE_MAIN_VARIANT (t2);
|
|
581 pair.second = TYPE_MAIN_VARIANT (t1);
|
111
|
582 }
|
|
583 if (visited->add (pair))
|
|
584 return true;
|
145
|
585 if (!odr_types_equivalent_p (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
|
|
586 false, NULL, visited, loc1, loc2))
|
|
587 return false;
|
|
588 if (!type_variants_equivalent_p (t1, t2))
|
|
589 return false;
|
|
590 return true;
|
111
|
591 }
|
|
592
|
|
593 /* Return true if DECL1 and DECL2 are identical methods. Consider
|
|
594 name equivalent to name.localalias.xyz. */
|
|
595
|
|
596 static bool
|
|
597 methods_equal_p (tree decl1, tree decl2)
|
|
598 {
|
|
599 if (DECL_ASSEMBLER_NAME (decl1) == DECL_ASSEMBLER_NAME (decl2))
|
|
600 return true;
|
|
601 const char sep = symbol_table::symbol_suffix_separator ();
|
|
602
|
|
603 const char *name1 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1));
|
|
604 const char *ptr1 = strchr (name1, sep);
|
|
605 int len1 = ptr1 ? ptr1 - name1 : strlen (name1);
|
|
606
|
|
607 const char *name2 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2));
|
|
608 const char *ptr2 = strchr (name2, sep);
|
|
609 int len2 = ptr2 ? ptr2 - name2 : strlen (name2);
|
|
610
|
|
611 if (len1 != len2)
|
|
612 return false;
|
|
613 return !strncmp (name1, name2, len1);
|
|
614 }
|
|
615
|
|
616 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
|
|
617 violation warnings. */
|
|
618
|
|
619 void
|
|
620 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
|
|
621 {
|
|
622 int n1, n2;
|
|
623
|
|
624 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
|
|
625 {
|
|
626 odr_violation_reported = true;
|
|
627 if (DECL_VIRTUAL_P (prevailing->decl))
|
|
628 {
|
|
629 varpool_node *tmp = prevailing;
|
|
630 prevailing = vtable;
|
|
631 vtable = tmp;
|
|
632 }
|
131
|
633 auto_diagnostic_group d;
|
111
|
634 if (warning_at (DECL_SOURCE_LOCATION
|
|
635 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
|
|
636 OPT_Wodr,
|
|
637 "virtual table of type %qD violates one definition rule",
|
|
638 DECL_CONTEXT (vtable->decl)))
|
|
639 inform (DECL_SOURCE_LOCATION (prevailing->decl),
|
|
640 "variable of same assembler name as the virtual table is "
|
|
641 "defined in another translation unit");
|
|
642 return;
|
|
643 }
|
|
644 if (!prevailing->definition || !vtable->definition)
|
|
645 return;
|
|
646
|
|
647 /* If we do not stream ODR type info, do not bother to do useful compare. */
|
|
648 if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
|
|
649 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
|
|
650 return;
|
|
651
|
|
652 odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
|
|
653
|
|
654 if (class_type->odr_violated)
|
|
655 return;
|
|
656
|
|
657 for (n1 = 0, n2 = 0; true; n1++, n2++)
|
|
658 {
|
|
659 struct ipa_ref *ref1, *ref2;
|
|
660 bool end1, end2;
|
|
661
|
|
662 end1 = !prevailing->iterate_reference (n1, ref1);
|
|
663 end2 = !vtable->iterate_reference (n2, ref2);
|
|
664
|
|
665 /* !DECL_VIRTUAL_P means RTTI entry;
|
145
|
666 We warn when RTTI is lost because non-RTTI prevails; we silently
|
111
|
667 accept the other case. */
|
|
668 while (!end2
|
|
669 && (end1
|
|
670 || (methods_equal_p (ref1->referred->decl,
|
|
671 ref2->referred->decl)
|
|
672 && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
|
|
673 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
|
|
674 {
|
131
|
675 if (!class_type->rtti_broken)
|
111
|
676 {
|
131
|
677 auto_diagnostic_group d;
|
|
678 if (warning_at (DECL_SOURCE_LOCATION
|
|
679 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
|
|
680 OPT_Wodr,
|
|
681 "virtual table of type %qD contains RTTI "
|
|
682 "information",
|
|
683 DECL_CONTEXT (vtable->decl)))
|
|
684 {
|
|
685 inform (DECL_SOURCE_LOCATION
|
|
686 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
|
|
687 "but is prevailed by one without from other"
|
|
688 " translation unit");
|
|
689 inform (DECL_SOURCE_LOCATION
|
|
690 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
|
|
691 "RTTI will not work on this type");
|
|
692 class_type->rtti_broken = true;
|
|
693 }
|
111
|
694 }
|
|
695 n2++;
|
|
696 end2 = !vtable->iterate_reference (n2, ref2);
|
|
697 }
|
|
698 while (!end1
|
|
699 && (end2
|
|
700 || (methods_equal_p (ref2->referred->decl, ref1->referred->decl)
|
|
701 && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
|
|
702 && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
|
|
703 {
|
|
704 n1++;
|
|
705 end1 = !prevailing->iterate_reference (n1, ref1);
|
|
706 }
|
|
707
|
|
708 /* Finished? */
|
|
709 if (end1 && end2)
|
|
710 {
|
|
711 /* Extra paranoia; compare the sizes. We do not have information
|
|
712 about virtual inheritance offsets, so just be sure that these
|
|
713 match.
|
|
714 Do this as very last check so the not very informative error
|
|
715 is not output too often. */
|
|
716 if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
|
|
717 {
|
|
718 class_type->odr_violated = true;
|
131
|
719 auto_diagnostic_group d;
|
145
|
720 tree ctx = TYPE_NAME (DECL_CONTEXT (vtable->decl));
|
|
721 if (warning_at (DECL_SOURCE_LOCATION (ctx), OPT_Wodr,
|
111
|
722 "virtual table of type %qD violates "
|
145
|
723 "one definition rule",
|
111
|
724 DECL_CONTEXT (vtable->decl)))
|
|
725 {
|
145
|
726 ctx = TYPE_NAME (DECL_CONTEXT (prevailing->decl));
|
|
727 inform (DECL_SOURCE_LOCATION (ctx),
|
|
728 "the conflicting type defined in another translation"
|
|
729 " unit has virtual table of different size");
|
111
|
730 }
|
|
731 }
|
|
732 return;
|
|
733 }
|
|
734
|
|
735 if (!end1 && !end2)
|
|
736 {
|
|
737 if (methods_equal_p (ref1->referred->decl, ref2->referred->decl))
|
|
738 continue;
|
|
739
|
|
740 class_type->odr_violated = true;
|
|
741
|
|
742 /* If the loops above stopped on non-virtual pointer, we have
|
|
743 mismatch in RTTI information mangling. */
|
|
744 if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
|
|
745 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
|
|
746 {
|
131
|
747 auto_diagnostic_group d;
|
111
|
748 if (warning_at (DECL_SOURCE_LOCATION
|
|
749 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
|
|
750 OPT_Wodr,
|
|
751 "virtual table of type %qD violates "
|
145
|
752 "one definition rule",
|
111
|
753 DECL_CONTEXT (vtable->decl)))
|
|
754 {
|
|
755 inform (DECL_SOURCE_LOCATION
|
|
756 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
|
|
757 "the conflicting type defined in another translation "
|
|
758 "unit with different RTTI information");
|
|
759 }
|
|
760 return;
|
|
761 }
|
|
762 /* At this point both REF1 and REF2 points either to virtual table
|
|
763 or virtual method. If one points to virtual table and other to
|
|
764 method we can complain the same way as if one table was shorter
|
|
765 than other pointing out the extra method. */
|
|
766 if (TREE_CODE (ref1->referred->decl)
|
|
767 != TREE_CODE (ref2->referred->decl))
|
|
768 {
|
|
769 if (VAR_P (ref1->referred->decl))
|
|
770 end1 = true;
|
|
771 else if (VAR_P (ref2->referred->decl))
|
|
772 end2 = true;
|
|
773 }
|
|
774 }
|
|
775
|
|
776 class_type->odr_violated = true;
|
|
777
|
145
|
778 /* Complain about size mismatch. Either we have too many virtual
|
111
|
779 functions or too many virtual table pointers. */
|
|
780 if (end1 || end2)
|
|
781 {
|
|
782 if (end1)
|
|
783 {
|
|
784 varpool_node *tmp = prevailing;
|
|
785 prevailing = vtable;
|
|
786 vtable = tmp;
|
|
787 ref1 = ref2;
|
|
788 }
|
131
|
789 auto_diagnostic_group d;
|
111
|
790 if (warning_at (DECL_SOURCE_LOCATION
|
|
791 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
|
|
792 OPT_Wodr,
|
|
793 "virtual table of type %qD violates "
|
|
794 "one definition rule",
|
|
795 DECL_CONTEXT (vtable->decl)))
|
|
796 {
|
|
797 if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
|
|
798 {
|
|
799 inform (DECL_SOURCE_LOCATION
|
|
800 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
|
|
801 "the conflicting type defined in another translation "
|
|
802 "unit");
|
|
803 inform (DECL_SOURCE_LOCATION
|
|
804 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
|
|
805 "contains additional virtual method %qD",
|
|
806 ref1->referred->decl);
|
|
807 }
|
|
808 else
|
|
809 {
|
|
810 inform (DECL_SOURCE_LOCATION
|
|
811 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
|
|
812 "the conflicting type defined in another translation "
|
|
813 "unit has virtual table with more entries");
|
|
814 }
|
|
815 }
|
|
816 return;
|
|
817 }
|
|
818
|
145
|
819 /* And in the last case we have either mismatch in between two virtual
|
111
|
820 methods or two virtual table pointers. */
|
131
|
821 auto_diagnostic_group d;
|
111
|
822 if (warning_at (DECL_SOURCE_LOCATION
|
|
823 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
|
|
824 "virtual table of type %qD violates "
|
145
|
825 "one definition rule",
|
111
|
826 DECL_CONTEXT (vtable->decl)))
|
|
827 {
|
|
828 if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
|
|
829 {
|
|
830 inform (DECL_SOURCE_LOCATION
|
|
831 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
|
|
832 "the conflicting type defined in another translation "
|
|
833 "unit");
|
|
834 gcc_assert (TREE_CODE (ref2->referred->decl)
|
|
835 == FUNCTION_DECL);
|
|
836 inform (DECL_SOURCE_LOCATION
|
|
837 (ref1->referred->ultimate_alias_target ()->decl),
|
|
838 "virtual method %qD",
|
|
839 ref1->referred->ultimate_alias_target ()->decl);
|
|
840 inform (DECL_SOURCE_LOCATION
|
|
841 (ref2->referred->ultimate_alias_target ()->decl),
|
|
842 "ought to match virtual method %qD but does not",
|
|
843 ref2->referred->ultimate_alias_target ()->decl);
|
|
844 }
|
|
845 else
|
|
846 inform (DECL_SOURCE_LOCATION
|
|
847 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
|
|
848 "the conflicting type defined in another translation "
|
|
849 "unit has virtual table with different contents");
|
|
850 return;
|
|
851 }
|
|
852 }
|
|
853 }
|
|
854
|
|
855 /* Output ODR violation warning about T1 and T2 with REASON.
|
|
856 Display location of ST1 and ST2 if REASON speaks about field or
|
|
857 method of the type.
|
|
858 If WARN is false, do nothing. Set WARNED if warning was indeed
|
|
859 output. */
|
|
860
|
145
|
861 static void
|
111
|
862 warn_odr (tree t1, tree t2, tree st1, tree st2,
|
|
863 bool warn, bool *warned, const char *reason)
|
|
864 {
|
145
|
865 tree decl2 = TYPE_NAME (TYPE_MAIN_VARIANT (t2));
|
111
|
866 if (warned)
|
|
867 *warned = false;
|
|
868
|
145
|
869 if (!warn || !TYPE_NAME(TYPE_MAIN_VARIANT (t1)))
|
111
|
870 return;
|
|
871
|
145
|
872 /* ODR warnings are output during LTO streaming; we must apply location
|
111
|
873 cache for potential warnings to be output correctly. */
|
|
874 if (lto_location_cache::current_cache)
|
|
875 lto_location_cache::current_cache->apply_location_cache ();
|
|
876
|
131
|
877 auto_diagnostic_group d;
|
145
|
878 if (t1 != TYPE_MAIN_VARIANT (t1)
|
|
879 && TYPE_NAME (t1) != TYPE_NAME (TYPE_MAIN_VARIANT (t1)))
|
|
880 {
|
|
881 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
|
|
882 OPT_Wodr, "type %qT (typedef of %qT) violates the "
|
|
883 "C++ One Definition Rule",
|
|
884 t1, TYPE_MAIN_VARIANT (t1)))
|
|
885 return;
|
|
886 }
|
|
887 else
|
|
888 {
|
|
889 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
|
|
890 OPT_Wodr, "type %qT violates the C++ One Definition Rule",
|
|
891 t1))
|
|
892 return;
|
|
893 }
|
111
|
894 if (!st1 && !st2)
|
|
895 ;
|
|
896 /* For FIELD_DECL support also case where one of fields is
|
|
897 NULL - this is used when the structures have mismatching number of
|
|
898 elements. */
|
|
899 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
|
|
900 {
|
|
901 inform (DECL_SOURCE_LOCATION (decl2),
|
|
902 "a different type is defined in another translation unit");
|
|
903 if (!st1)
|
|
904 {
|
|
905 st1 = st2;
|
|
906 st2 = NULL;
|
|
907 }
|
|
908 inform (DECL_SOURCE_LOCATION (st1),
|
|
909 "the first difference of corresponding definitions is field %qD",
|
|
910 st1);
|
|
911 if (st2)
|
|
912 decl2 = st2;
|
|
913 }
|
|
914 else if (TREE_CODE (st1) == FUNCTION_DECL)
|
|
915 {
|
|
916 inform (DECL_SOURCE_LOCATION (decl2),
|
|
917 "a different type is defined in another translation unit");
|
|
918 inform (DECL_SOURCE_LOCATION (st1),
|
|
919 "the first difference of corresponding definitions is method %qD",
|
|
920 st1);
|
|
921 decl2 = st2;
|
|
922 }
|
|
923 else
|
|
924 return;
|
|
925 inform (DECL_SOURCE_LOCATION (decl2), reason);
|
|
926
|
|
927 if (warned)
|
|
928 *warned = true;
|
|
929 }
|
|
930
|
145
|
931 /* Return true if T1 and T2 are incompatible and we want to recursively
|
111
|
932 dive into them from warn_type_mismatch to give sensible answer. */
|
|
933
|
|
934 static bool
|
|
935 type_mismatch_p (tree t1, tree t2)
|
|
936 {
|
|
937 if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
|
|
938 && !odr_types_equivalent_p (t1, t2))
|
|
939 return true;
|
|
940 return !types_compatible_p (t1, t2);
|
|
941 }
|
|
942
|
|
943
|
|
944 /* Types T1 and T2 was found to be incompatible in a context they can't
|
|
945 (either used to declare a symbol of same assembler name or unified by
|
|
946 ODR rule). We already output warning about this, but if possible, output
|
|
947 extra information on how the types mismatch.
|
|
948
|
|
949 This is hard to do in general. We basically handle the common cases.
|
|
950
|
|
951 If LOC1 and LOC2 are meaningful locations, use it in the case the types
|
145
|
952 themselves do not have one. */
|
111
|
953
|
|
954 void
|
|
955 warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
|
|
956 {
|
|
957 /* Location of type is known only if it has TYPE_NAME and the name is
|
|
958 TYPE_DECL. */
|
|
959 location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
|
|
960 ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
|
|
961 : UNKNOWN_LOCATION;
|
|
962 location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
|
|
963 ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
|
|
964 : UNKNOWN_LOCATION;
|
|
965 bool loc_t2_useful = false;
|
|
966
|
|
967 /* With LTO it is a common case that the location of both types match.
|
|
968 See if T2 has a location that is different from T1. If so, we will
|
|
969 inform user about the location.
|
|
970 Do not consider the location passed to us in LOC1/LOC2 as those are
|
|
971 already output. */
|
|
972 if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
|
|
973 {
|
|
974 if (loc_t1 <= BUILTINS_LOCATION)
|
|
975 loc_t2_useful = true;
|
|
976 else
|
|
977 {
|
|
978 expanded_location xloc1 = expand_location (loc_t1);
|
|
979 expanded_location xloc2 = expand_location (loc_t2);
|
|
980
|
|
981 if (strcmp (xloc1.file, xloc2.file)
|
|
982 || xloc1.line != xloc2.line
|
|
983 || xloc1.column != xloc2.column)
|
|
984 loc_t2_useful = true;
|
|
985 }
|
|
986 }
|
|
987
|
|
988 if (loc_t1 <= BUILTINS_LOCATION)
|
|
989 loc_t1 = loc1;
|
|
990 if (loc_t2 <= BUILTINS_LOCATION)
|
|
991 loc_t2 = loc2;
|
|
992
|
|
993 location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
|
|
994
|
|
995 /* It is a quite common bug to reference anonymous namespace type in
|
|
996 non-anonymous namespace class. */
|
145
|
997 tree mt1 = TYPE_MAIN_VARIANT (t1);
|
|
998 tree mt2 = TYPE_MAIN_VARIANT (t2);
|
|
999 if ((type_with_linkage_p (mt1)
|
|
1000 && type_in_anonymous_namespace_p (mt1))
|
|
1001 || (type_with_linkage_p (mt2)
|
|
1002 && type_in_anonymous_namespace_p (mt2)))
|
111
|
1003 {
|
145
|
1004 if (!type_with_linkage_p (mt1)
|
|
1005 || !type_in_anonymous_namespace_p (mt1))
|
111
|
1006 {
|
|
1007 std::swap (t1, t2);
|
145
|
1008 std::swap (mt1, mt2);
|
111
|
1009 std::swap (loc_t1, loc_t2);
|
|
1010 }
|
145
|
1011 gcc_assert (TYPE_NAME (mt1)
|
|
1012 && TREE_CODE (TYPE_NAME (mt1)) == TYPE_DECL);
|
|
1013 tree n1 = TYPE_NAME (mt1);
|
|
1014 tree n2 = TYPE_NAME (mt2) ? TYPE_NAME (mt2) : NULL;
|
|
1015
|
|
1016 if (TREE_CODE (n1) == TYPE_DECL)
|
|
1017 n1 = DECL_NAME (n1);
|
|
1018 if (n2 && TREE_CODE (n2) == TYPE_DECL)
|
|
1019 n2 = DECL_NAME (n2);
|
|
1020 /* Most of the time, the type names will match, do not be unnecessarily
|
111
|
1021 verbose. */
|
145
|
1022 if (n1 != n2)
|
111
|
1023 inform (loc_t1,
|
145
|
1024 "type %qT defined in anonymous namespace cannot match "
|
111
|
1025 "type %qT across the translation unit boundary",
|
|
1026 t1, t2);
|
|
1027 else
|
|
1028 inform (loc_t1,
|
145
|
1029 "type %qT defined in anonymous namespace cannot match "
|
111
|
1030 "across the translation unit boundary",
|
|
1031 t1);
|
|
1032 if (loc_t2_useful)
|
|
1033 inform (loc_t2,
|
|
1034 "the incompatible type defined in another translation unit");
|
|
1035 return;
|
|
1036 }
|
|
1037 /* If types have mangled ODR names and they are different, it is most
|
|
1038 informative to output those.
|
|
1039 This also covers types defined in different namespaces. */
|
145
|
1040 const char *odr1 = get_odr_name_for_type (mt1);
|
|
1041 const char *odr2 = get_odr_name_for_type (mt2);
|
|
1042 if (odr1 != NULL && odr2 != NULL && odr1 != odr2)
|
111
|
1043 {
|
145
|
1044 const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
|
|
1045 char *name1 = xstrdup (cplus_demangle (odr1, opts));
|
|
1046 char *name2 = cplus_demangle (odr2, opts);
|
111
|
1047 if (name1 && name2 && strcmp (name1, name2))
|
|
1048 {
|
|
1049 inform (loc_t1,
|
|
1050 "type name %qs should match type name %qs",
|
|
1051 name1, name2);
|
|
1052 if (loc_t2_useful)
|
|
1053 inform (loc_t2,
|
|
1054 "the incompatible type is defined here");
|
|
1055 free (name1);
|
|
1056 return;
|
|
1057 }
|
|
1058 free (name1);
|
|
1059 }
|
|
1060 /* A tricky case are compound types. Often they appear the same in source
|
|
1061 code and the mismatch is dragged in by type they are build from.
|
|
1062 Look for those differences in subtypes and try to be informative. In other
|
|
1063 cases just output nothing because the source code is probably different
|
|
1064 and in this case we already output a all necessary info. */
|
|
1065 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
|
|
1066 {
|
|
1067 if (TREE_CODE (t1) == TREE_CODE (t2))
|
|
1068 {
|
|
1069 if (TREE_CODE (t1) == ARRAY_TYPE
|
|
1070 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
|
|
1071 {
|
|
1072 tree i1 = TYPE_DOMAIN (t1);
|
|
1073 tree i2 = TYPE_DOMAIN (t2);
|
|
1074
|
|
1075 if (i1 && i2
|
|
1076 && TYPE_MAX_VALUE (i1)
|
|
1077 && TYPE_MAX_VALUE (i2)
|
|
1078 && !operand_equal_p (TYPE_MAX_VALUE (i1),
|
|
1079 TYPE_MAX_VALUE (i2), 0))
|
|
1080 {
|
|
1081 inform (loc,
|
|
1082 "array types have different bounds");
|
|
1083 return;
|
|
1084 }
|
|
1085 }
|
|
1086 if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
|
|
1087 && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
|
|
1088 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
|
|
1089 else if (TREE_CODE (t1) == METHOD_TYPE
|
|
1090 || TREE_CODE (t1) == FUNCTION_TYPE)
|
|
1091 {
|
|
1092 tree parms1 = NULL, parms2 = NULL;
|
|
1093 int count = 1;
|
|
1094
|
|
1095 if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
|
|
1096 {
|
|
1097 inform (loc, "return value type mismatch");
|
|
1098 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
|
|
1099 loc_t2);
|
|
1100 return;
|
|
1101 }
|
|
1102 if (prototype_p (t1) && prototype_p (t2))
|
|
1103 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
|
|
1104 parms1 && parms2;
|
|
1105 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
|
|
1106 count++)
|
|
1107 {
|
|
1108 if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
|
|
1109 {
|
|
1110 if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
|
|
1111 inform (loc,
|
|
1112 "implicit this pointer type mismatch");
|
|
1113 else
|
|
1114 inform (loc,
|
|
1115 "type mismatch in parameter %i",
|
|
1116 count - (TREE_CODE (t1) == METHOD_TYPE));
|
|
1117 warn_types_mismatch (TREE_VALUE (parms1),
|
|
1118 TREE_VALUE (parms2),
|
|
1119 loc_t1, loc_t2);
|
|
1120 return;
|
|
1121 }
|
|
1122 }
|
|
1123 if (parms1 || parms2)
|
|
1124 {
|
|
1125 inform (loc,
|
|
1126 "types have different parameter counts");
|
|
1127 return;
|
|
1128 }
|
|
1129 }
|
|
1130 }
|
|
1131 return;
|
|
1132 }
|
|
1133
|
145
|
1134 if (types_odr_comparable (t1, t2)
|
|
1135 /* We make assign integers mangled names to be able to handle
|
|
1136 signed/unsigned chars. Accepting them here would however lead to
|
|
1137 confusing message like
|
|
1138 "type ‘const int’ itself violates the C++ One Definition Rule" */
|
|
1139 && TREE_CODE (t1) != INTEGER_TYPE
|
|
1140 && types_same_for_odr (t1, t2))
|
111
|
1141 inform (loc_t1,
|
|
1142 "type %qT itself violates the C++ One Definition Rule", t1);
|
|
1143 /* Prevent pointless warnings like "struct aa" should match "struct aa". */
|
|
1144 else if (TYPE_NAME (t1) == TYPE_NAME (t2)
|
|
1145 && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
|
|
1146 return;
|
|
1147 else
|
|
1148 inform (loc_t1, "type %qT should match type %qT",
|
|
1149 t1, t2);
|
|
1150 if (loc_t2_useful)
|
|
1151 inform (loc_t2, "the incompatible type is defined here");
|
|
1152 }
|
|
1153
|
145
|
1154 /* Return true if T should be ignored in TYPE_FIELDS for ODR comparison. */
|
|
1155
|
|
1156 static bool
|
|
1157 skip_in_fields_list_p (tree t)
|
|
1158 {
|
|
1159 if (TREE_CODE (t) != FIELD_DECL)
|
|
1160 return true;
|
|
1161 /* C++ FE introduces zero sized fields depending on -std setting, see
|
|
1162 PR89358. */
|
|
1163 if (DECL_SIZE (t)
|
|
1164 && integer_zerop (DECL_SIZE (t))
|
|
1165 && DECL_ARTIFICIAL (t)
|
|
1166 && DECL_IGNORED_P (t)
|
|
1167 && !DECL_NAME (t))
|
|
1168 return true;
|
|
1169 return false;
|
|
1170 }
|
|
1171
|
111
|
1172 /* Compare T1 and T2, report ODR violations if WARN is true and set
|
|
1173 WARNED to true if anything is reported. Return true if types match.
|
|
1174 If true is returned, the types are also compatible in the sense of
|
|
1175 gimple_canonical_types_compatible_p.
|
|
1176 If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
|
|
1177 about the type if the type itself do not have location. */
|
|
1178
|
|
1179 static bool
|
|
1180 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
|
|
1181 hash_set<type_pair> *visited,
|
|
1182 location_t loc1, location_t loc2)
|
|
1183 {
|
|
1184 /* Check first for the obvious case of pointer identity. */
|
|
1185 if (t1 == t2)
|
|
1186 return true;
|
|
1187
|
|
1188 /* Can't be the same type if the types don't have the same code. */
|
|
1189 if (TREE_CODE (t1) != TREE_CODE (t2))
|
|
1190 {
|
|
1191 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1192 G_("a different type is defined in another translation unit"));
|
|
1193 return false;
|
|
1194 }
|
|
1195
|
145
|
1196 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
|
|
1197 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
|
|
1198 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
|
|
1199 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
|
111
|
1200 {
|
145
|
1201 /* We cannot trip this when comparing ODR types, only when trying to
|
111
|
1202 match different ODR derivations from different declarations.
|
|
1203 So WARN should be always false. */
|
|
1204 gcc_assert (!warn);
|
|
1205 return false;
|
|
1206 }
|
|
1207
|
|
1208 if (TREE_CODE (t1) == ENUMERAL_TYPE
|
|
1209 && TYPE_VALUES (t1) && TYPE_VALUES (t2))
|
|
1210 {
|
|
1211 tree v1, v2;
|
|
1212 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
|
|
1213 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
|
|
1214 {
|
|
1215 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
|
|
1216 {
|
|
1217 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1218 G_("an enum with different value name"
|
|
1219 " is defined in another translation unit"));
|
|
1220 return false;
|
|
1221 }
|
145
|
1222 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2), 0))
|
111
|
1223 {
|
|
1224 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1225 G_("an enum with different values is defined"
|
|
1226 " in another translation unit"));
|
|
1227 return false;
|
|
1228 }
|
|
1229 }
|
|
1230 if (v1 || v2)
|
|
1231 {
|
|
1232 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1233 G_("an enum with mismatching number of values "
|
|
1234 "is defined in another translation unit"));
|
|
1235 return false;
|
|
1236 }
|
|
1237 }
|
|
1238
|
|
1239 /* Non-aggregate types can be handled cheaply. */
|
|
1240 if (INTEGRAL_TYPE_P (t1)
|
|
1241 || SCALAR_FLOAT_TYPE_P (t1)
|
|
1242 || FIXED_POINT_TYPE_P (t1)
|
|
1243 || TREE_CODE (t1) == VECTOR_TYPE
|
|
1244 || TREE_CODE (t1) == COMPLEX_TYPE
|
|
1245 || TREE_CODE (t1) == OFFSET_TYPE
|
|
1246 || POINTER_TYPE_P (t1))
|
|
1247 {
|
|
1248 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
|
|
1249 {
|
|
1250 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1251 G_("a type with different precision is defined "
|
|
1252 "in another translation unit"));
|
|
1253 return false;
|
|
1254 }
|
|
1255 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
|
|
1256 {
|
|
1257 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1258 G_("a type with different signedness is defined "
|
|
1259 "in another translation unit"));
|
|
1260 return false;
|
|
1261 }
|
|
1262
|
|
1263 if (TREE_CODE (t1) == INTEGER_TYPE
|
|
1264 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
|
|
1265 {
|
|
1266 /* char WRT uint_8? */
|
|
1267 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1268 G_("a different type is defined in another "
|
|
1269 "translation unit"));
|
|
1270 return false;
|
|
1271 }
|
|
1272
|
|
1273 /* For canonical type comparisons we do not want to build SCCs
|
|
1274 so we cannot compare pointed-to types. But we can, for now,
|
|
1275 require the same pointed-to type kind and match what
|
|
1276 useless_type_conversion_p would do. */
|
|
1277 if (POINTER_TYPE_P (t1))
|
|
1278 {
|
|
1279 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
|
|
1280 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
|
|
1281 {
|
|
1282 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1283 G_("it is defined as a pointer in different address "
|
|
1284 "space in another translation unit"));
|
|
1285 return false;
|
|
1286 }
|
|
1287
|
|
1288 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
|
|
1289 visited, loc1, loc2))
|
|
1290 {
|
|
1291 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1292 G_("it is defined as a pointer to different type "
|
|
1293 "in another translation unit"));
|
|
1294 if (warn && warned)
|
|
1295 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
|
|
1296 loc1, loc2);
|
|
1297 return false;
|
|
1298 }
|
|
1299 }
|
|
1300
|
|
1301 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
|
|
1302 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
|
|
1303 visited, loc1, loc2))
|
|
1304 {
|
|
1305 /* Probably specific enough. */
|
|
1306 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1307 G_("a different type is defined "
|
|
1308 "in another translation unit"));
|
|
1309 if (warn && warned)
|
|
1310 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
|
|
1311 return false;
|
|
1312 }
|
|
1313 }
|
|
1314 /* Do type-specific comparisons. */
|
|
1315 else switch (TREE_CODE (t1))
|
|
1316 {
|
|
1317 case ARRAY_TYPE:
|
|
1318 {
|
|
1319 /* Array types are the same if the element types are the same and
|
|
1320 the number of elements are the same. */
|
|
1321 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
|
|
1322 visited, loc1, loc2))
|
|
1323 {
|
|
1324 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1325 G_("a different type is defined in another "
|
|
1326 "translation unit"));
|
|
1327 if (warn && warned)
|
|
1328 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
|
|
1329 }
|
|
1330 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
|
|
1331 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
|
|
1332 == TYPE_NONALIASED_COMPONENT (t2));
|
|
1333
|
|
1334 tree i1 = TYPE_DOMAIN (t1);
|
|
1335 tree i2 = TYPE_DOMAIN (t2);
|
|
1336
|
|
1337 /* For an incomplete external array, the type domain can be
|
|
1338 NULL_TREE. Check this condition also. */
|
|
1339 if (i1 == NULL_TREE || i2 == NULL_TREE)
|
145
|
1340 return type_variants_equivalent_p (t1, t2);
|
111
|
1341
|
|
1342 tree min1 = TYPE_MIN_VALUE (i1);
|
|
1343 tree min2 = TYPE_MIN_VALUE (i2);
|
|
1344 tree max1 = TYPE_MAX_VALUE (i1);
|
|
1345 tree max2 = TYPE_MAX_VALUE (i2);
|
|
1346
|
|
1347 /* In C++, minimums should be always 0. */
|
|
1348 gcc_assert (min1 == min2);
|
|
1349 if (!operand_equal_p (max1, max2, 0))
|
|
1350 {
|
|
1351 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1352 G_("an array of different size is defined "
|
|
1353 "in another translation unit"));
|
|
1354 return false;
|
|
1355 }
|
|
1356 }
|
|
1357 break;
|
|
1358
|
|
1359 case METHOD_TYPE:
|
|
1360 case FUNCTION_TYPE:
|
|
1361 /* Function types are the same if the return type and arguments types
|
|
1362 are the same. */
|
|
1363 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
|
|
1364 visited, loc1, loc2))
|
|
1365 {
|
|
1366 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1367 G_("has different return value "
|
|
1368 "in another translation unit"));
|
|
1369 if (warn && warned)
|
|
1370 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
|
|
1371 return false;
|
|
1372 }
|
|
1373
|
|
1374 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
|
|
1375 || !prototype_p (t1) || !prototype_p (t2))
|
145
|
1376 return type_variants_equivalent_p (t1, t2);
|
111
|
1377 else
|
|
1378 {
|
|
1379 tree parms1, parms2;
|
|
1380
|
|
1381 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
|
|
1382 parms1 && parms2;
|
|
1383 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
|
|
1384 {
|
|
1385 if (!odr_subtypes_equivalent_p
|
145
|
1386 (TREE_VALUE (parms1), TREE_VALUE (parms2),
|
|
1387 visited, loc1, loc2))
|
111
|
1388 {
|
|
1389 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1390 G_("has different parameters in another "
|
|
1391 "translation unit"));
|
|
1392 if (warn && warned)
|
|
1393 warn_types_mismatch (TREE_VALUE (parms1),
|
|
1394 TREE_VALUE (parms2), loc1, loc2);
|
|
1395 return false;
|
|
1396 }
|
|
1397 }
|
|
1398
|
|
1399 if (parms1 || parms2)
|
|
1400 {
|
|
1401 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1402 G_("has different parameters "
|
|
1403 "in another translation unit"));
|
|
1404 return false;
|
|
1405 }
|
|
1406
|
145
|
1407 return type_variants_equivalent_p (t1, t2);
|
111
|
1408 }
|
|
1409
|
|
1410 case RECORD_TYPE:
|
|
1411 case UNION_TYPE:
|
|
1412 case QUAL_UNION_TYPE:
|
|
1413 {
|
|
1414 tree f1, f2;
|
|
1415
|
|
1416 /* For aggregate types, all the fields must be the same. */
|
|
1417 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
|
|
1418 {
|
|
1419 if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
|
|
1420 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
|
|
1421 != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
|
|
1422 {
|
|
1423 if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
|
|
1424 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1425 G_("a type defined in another translation unit "
|
|
1426 "is not polymorphic"));
|
|
1427 else
|
|
1428 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1429 G_("a type defined in another translation unit "
|
|
1430 "is polymorphic"));
|
|
1431 return false;
|
|
1432 }
|
|
1433 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
|
|
1434 f1 || f2;
|
|
1435 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
|
|
1436 {
|
|
1437 /* Skip non-fields. */
|
145
|
1438 while (f1 && skip_in_fields_list_p (f1))
|
111
|
1439 f1 = TREE_CHAIN (f1);
|
145
|
1440 while (f2 && skip_in_fields_list_p (f2))
|
111
|
1441 f2 = TREE_CHAIN (f2);
|
|
1442 if (!f1 || !f2)
|
|
1443 break;
|
|
1444 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
|
|
1445 {
|
|
1446 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1447 G_("a type with different virtual table pointers"
|
|
1448 " is defined in another translation unit"));
|
|
1449 return false;
|
|
1450 }
|
|
1451 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
|
|
1452 {
|
|
1453 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1454 G_("a type with different bases is defined "
|
|
1455 "in another translation unit"));
|
|
1456 return false;
|
|
1457 }
|
|
1458 if (DECL_NAME (f1) != DECL_NAME (f2)
|
|
1459 && !DECL_ARTIFICIAL (f1))
|
|
1460 {
|
|
1461 warn_odr (t1, t2, f1, f2, warn, warned,
|
|
1462 G_("a field with different name is defined "
|
|
1463 "in another translation unit"));
|
|
1464 return false;
|
|
1465 }
|
|
1466 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
|
145
|
1467 TREE_TYPE (f2),
|
|
1468 visited, loc1, loc2))
|
111
|
1469 {
|
|
1470 /* Do not warn about artificial fields and just go into
|
|
1471 generic field mismatch warning. */
|
|
1472 if (DECL_ARTIFICIAL (f1))
|
|
1473 break;
|
|
1474
|
|
1475 warn_odr (t1, t2, f1, f2, warn, warned,
|
|
1476 G_("a field of same name but different type "
|
|
1477 "is defined in another translation unit"));
|
|
1478 if (warn && warned)
|
|
1479 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
|
|
1480 return false;
|
|
1481 }
|
|
1482 if (!gimple_compare_field_offset (f1, f2))
|
|
1483 {
|
|
1484 /* Do not warn about artificial fields and just go into
|
|
1485 generic field mismatch warning. */
|
|
1486 if (DECL_ARTIFICIAL (f1))
|
|
1487 break;
|
|
1488 warn_odr (t1, t2, f1, f2, warn, warned,
|
|
1489 G_("fields have different layout "
|
|
1490 "in another translation unit"));
|
|
1491 return false;
|
|
1492 }
|
131
|
1493 if (DECL_BIT_FIELD (f1) != DECL_BIT_FIELD (f2))
|
|
1494 {
|
|
1495 warn_odr (t1, t2, f1, f2, warn, warned,
|
145
|
1496 G_("one field is a bitfield while the other "
|
|
1497 "is not"));
|
131
|
1498 return false;
|
|
1499 }
|
|
1500 else
|
|
1501 gcc_assert (DECL_NONADDRESSABLE_P (f1)
|
|
1502 == DECL_NONADDRESSABLE_P (f2));
|
111
|
1503 }
|
|
1504
|
|
1505 /* If one aggregate has more fields than the other, they
|
|
1506 are not the same. */
|
|
1507 if (f1 || f2)
|
|
1508 {
|
|
1509 if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
|
|
1510 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1511 G_("a type with different virtual table pointers"
|
|
1512 " is defined in another translation unit"));
|
|
1513 else if ((f1 && DECL_ARTIFICIAL (f1))
|
|
1514 || (f2 && DECL_ARTIFICIAL (f2)))
|
|
1515 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1516 G_("a type with different bases is defined "
|
|
1517 "in another translation unit"));
|
|
1518 else
|
|
1519 warn_odr (t1, t2, f1, f2, warn, warned,
|
|
1520 G_("a type with different number of fields "
|
|
1521 "is defined in another translation unit"));
|
|
1522
|
|
1523 return false;
|
|
1524 }
|
|
1525 }
|
|
1526 break;
|
|
1527 }
|
|
1528 case VOID_TYPE:
|
|
1529 case NULLPTR_TYPE:
|
|
1530 break;
|
|
1531
|
|
1532 default:
|
|
1533 debug_tree (t1);
|
|
1534 gcc_unreachable ();
|
|
1535 }
|
|
1536
|
|
1537 /* Those are better to come last as they are utterly uninformative. */
|
|
1538 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
|
|
1539 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
|
|
1540 {
|
|
1541 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1542 G_("a type with different size "
|
|
1543 "is defined in another translation unit"));
|
|
1544 return false;
|
|
1545 }
|
145
|
1546
|
|
1547 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2)
|
|
1548 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
|
111
|
1549 {
|
|
1550 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
145
|
1551 G_("one type needs to be constructed while other not"));
|
|
1552 gcc_checking_assert (RECORD_OR_UNION_TYPE_P (t1));
|
111
|
1553 return false;
|
|
1554 }
|
145
|
1555 /* There is no really good user facing warning for this.
|
|
1556 Either the original reason for modes being different is lost during
|
|
1557 streaming or we should catch earlier warnings. We however must detect
|
|
1558 the mismatch to avoid type verifier from cmplaining on mismatched
|
|
1559 types between type and canonical type. See PR91576. */
|
|
1560 if (TYPE_MODE (t1) != TYPE_MODE (t2)
|
|
1561 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
|
|
1562 {
|
|
1563 warn_odr (t1, t2, NULL, NULL, warn, warned,
|
|
1564 G_("memory layout mismatch"));
|
|
1565 return false;
|
|
1566 }
|
|
1567
|
111
|
1568 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
|
|
1569 || operand_equal_p (TYPE_SIZE_UNIT (t1),
|
|
1570 TYPE_SIZE_UNIT (t2), 0));
|
145
|
1571 return type_variants_equivalent_p (t1, t2);
|
111
|
1572 }
|
|
1573
|
|
1574 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
|
|
1575
|
|
1576 bool
|
|
1577 odr_types_equivalent_p (tree type1, tree type2)
|
|
1578 {
|
|
1579 gcc_checking_assert (odr_or_derived_type_p (type1)
|
|
1580 && odr_or_derived_type_p (type2));
|
|
1581
|
|
1582 hash_set<type_pair> visited;
|
|
1583 return odr_types_equivalent_p (type1, type2, false, NULL,
|
|
1584 &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
|
|
1585 }
|
|
1586
|
|
1587 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
|
|
1588 from VAL->type. This may happen in LTO where tree merging did not merge
|
|
1589 all variants of the same type or due to ODR violation.
|
|
1590
|
|
1591 Analyze and report ODR violations and add type to duplicate list.
|
|
1592 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
|
|
1593 this is first time we see definition of a class return true so the
|
|
1594 base types are analyzed. */
|
|
1595
|
|
1596 static bool
|
|
1597 add_type_duplicate (odr_type val, tree type)
|
|
1598 {
|
|
1599 bool build_bases = false;
|
|
1600 bool prevail = false;
|
|
1601 bool odr_must_violate = false;
|
|
1602
|
|
1603 if (!val->types_set)
|
|
1604 val->types_set = new hash_set<tree>;
|
|
1605
|
|
1606 /* Chose polymorphic type as leader (this happens only in case of ODR
|
|
1607 violations. */
|
|
1608 if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
|
|
1609 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
|
|
1610 && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
|
|
1611 || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
|
|
1612 {
|
|
1613 prevail = true;
|
|
1614 build_bases = true;
|
|
1615 }
|
|
1616 /* Always prefer complete type to be the leader. */
|
|
1617 else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
|
|
1618 {
|
|
1619 prevail = true;
|
145
|
1620 if (TREE_CODE (type) == RECORD_TYPE)
|
|
1621 build_bases = TYPE_BINFO (type);
|
111
|
1622 }
|
|
1623 else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
|
|
1624 ;
|
|
1625 else if (TREE_CODE (val->type) == ENUMERAL_TYPE
|
|
1626 && TREE_CODE (type) == ENUMERAL_TYPE
|
|
1627 && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
|
|
1628 prevail = true;
|
|
1629 else if (TREE_CODE (val->type) == RECORD_TYPE
|
|
1630 && TREE_CODE (type) == RECORD_TYPE
|
|
1631 && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
|
|
1632 {
|
|
1633 gcc_assert (!val->bases.length ());
|
|
1634 build_bases = true;
|
|
1635 prevail = true;
|
|
1636 }
|
|
1637
|
|
1638 if (prevail)
|
|
1639 std::swap (val->type, type);
|
|
1640
|
|
1641 val->types_set->add (type);
|
|
1642
|
145
|
1643 if (!odr_hash)
|
|
1644 return false;
|
|
1645
|
|
1646 gcc_checking_assert (can_be_name_hashed_p (type)
|
|
1647 && can_be_name_hashed_p (val->type));
|
111
|
1648
|
|
1649 bool merge = true;
|
|
1650 bool base_mismatch = false;
|
|
1651 unsigned int i;
|
|
1652 bool warned = false;
|
|
1653 hash_set<type_pair> visited;
|
|
1654
|
|
1655 gcc_assert (in_lto_p);
|
|
1656 vec_safe_push (val->types, type);
|
|
1657
|
|
1658 /* If both are class types, compare the bases. */
|
|
1659 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
|
|
1660 && TREE_CODE (val->type) == RECORD_TYPE
|
|
1661 && TREE_CODE (type) == RECORD_TYPE
|
|
1662 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
|
|
1663 {
|
|
1664 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
|
|
1665 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
|
|
1666 {
|
|
1667 if (!flag_ltrans && !warned && !val->odr_violated)
|
|
1668 {
|
|
1669 tree extra_base;
|
|
1670 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
|
|
1671 "a type with the same name but different "
|
|
1672 "number of polymorphic bases is "
|
|
1673 "defined in another translation unit");
|
|
1674 if (warned)
|
|
1675 {
|
|
1676 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
|
|
1677 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
|
|
1678 extra_base = BINFO_BASE_BINFO
|
|
1679 (TYPE_BINFO (type),
|
|
1680 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
|
|
1681 else
|
|
1682 extra_base = BINFO_BASE_BINFO
|
|
1683 (TYPE_BINFO (val->type),
|
|
1684 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
|
|
1685 tree extra_base_type = BINFO_TYPE (extra_base);
|
|
1686 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
|
|
1687 "the extra base is defined here");
|
|
1688 }
|
|
1689 }
|
|
1690 base_mismatch = true;
|
|
1691 }
|
|
1692 else
|
|
1693 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
|
|
1694 {
|
|
1695 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
|
|
1696 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
|
|
1697 tree type1 = BINFO_TYPE (base1);
|
|
1698 tree type2 = BINFO_TYPE (base2);
|
|
1699
|
|
1700 if (types_odr_comparable (type1, type2))
|
|
1701 {
|
|
1702 if (!types_same_for_odr (type1, type2))
|
|
1703 base_mismatch = true;
|
|
1704 }
|
|
1705 else
|
|
1706 if (!odr_types_equivalent_p (type1, type2))
|
|
1707 base_mismatch = true;
|
|
1708 if (base_mismatch)
|
|
1709 {
|
|
1710 if (!warned && !val->odr_violated)
|
|
1711 {
|
|
1712 warn_odr (type, val->type, NULL, NULL,
|
|
1713 !warned, &warned,
|
|
1714 "a type with the same name but different base "
|
|
1715 "type is defined in another translation unit");
|
|
1716 if (warned)
|
|
1717 warn_types_mismatch (type1, type2,
|
|
1718 UNKNOWN_LOCATION, UNKNOWN_LOCATION);
|
|
1719 }
|
|
1720 break;
|
|
1721 }
|
|
1722 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
|
|
1723 {
|
|
1724 base_mismatch = true;
|
|
1725 if (!warned && !val->odr_violated)
|
|
1726 warn_odr (type, val->type, NULL, NULL,
|
|
1727 !warned, &warned,
|
|
1728 "a type with the same name but different base "
|
|
1729 "layout is defined in another translation unit");
|
|
1730 break;
|
|
1731 }
|
|
1732 /* One of bases is not of complete type. */
|
|
1733 if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
|
|
1734 {
|
|
1735 /* If we have a polymorphic type info specified for TYPE1
|
|
1736 but not for TYPE2 we possibly missed a base when recording
|
|
1737 VAL->type earlier.
|
|
1738 Be sure this does not happen. */
|
|
1739 if (TYPE_BINFO (type1)
|
|
1740 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
|
|
1741 && !build_bases)
|
|
1742 odr_must_violate = true;
|
|
1743 break;
|
|
1744 }
|
|
1745 /* One base is polymorphic and the other not.
|
|
1746 This ought to be diagnosed earlier, but do not ICE in the
|
|
1747 checking bellow. */
|
|
1748 else if (TYPE_BINFO (type1)
|
|
1749 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
|
|
1750 != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
|
|
1751 {
|
|
1752 if (!warned && !val->odr_violated)
|
|
1753 warn_odr (type, val->type, NULL, NULL,
|
|
1754 !warned, &warned,
|
|
1755 "a base of the type is polymorphic only in one "
|
|
1756 "translation unit");
|
|
1757 base_mismatch = true;
|
|
1758 break;
|
|
1759 }
|
|
1760 }
|
|
1761 if (base_mismatch)
|
|
1762 {
|
|
1763 merge = false;
|
|
1764 odr_violation_reported = true;
|
|
1765 val->odr_violated = true;
|
|
1766
|
|
1767 if (symtab->dump_file)
|
|
1768 {
|
|
1769 fprintf (symtab->dump_file, "ODR base violation\n");
|
|
1770
|
|
1771 print_node (symtab->dump_file, "", val->type, 0);
|
|
1772 putc ('\n',symtab->dump_file);
|
|
1773 print_node (symtab->dump_file, "", type, 0);
|
|
1774 putc ('\n',symtab->dump_file);
|
|
1775 }
|
|
1776 }
|
|
1777 }
|
|
1778
|
131
|
1779 /* Next compare memory layout.
|
|
1780 The DECL_SOURCE_LOCATIONs in this invocation came from LTO streaming.
|
|
1781 We must apply the location cache to ensure that they are valid
|
|
1782 before we can pass them to odr_types_equivalent_p (PR lto/83121). */
|
|
1783 if (lto_location_cache::current_cache)
|
|
1784 lto_location_cache::current_cache->apply_location_cache ();
|
145
|
1785 /* As a special case we stream mangles names of integer types so we can see
|
|
1786 if they are believed to be same even though they have different
|
|
1787 representation. Avoid bogus warning on mismatches in these. */
|
|
1788 if (TREE_CODE (type) != INTEGER_TYPE
|
|
1789 && TREE_CODE (val->type) != INTEGER_TYPE
|
|
1790 && !odr_types_equivalent_p (val->type, type,
|
111
|
1791 !flag_ltrans && !val->odr_violated && !warned,
|
|
1792 &warned, &visited,
|
|
1793 DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
|
|
1794 DECL_SOURCE_LOCATION (TYPE_NAME (type))))
|
|
1795 {
|
|
1796 merge = false;
|
|
1797 odr_violation_reported = true;
|
|
1798 val->odr_violated = true;
|
|
1799 }
|
|
1800 gcc_assert (val->odr_violated || !odr_must_violate);
|
|
1801 /* Sanity check that all bases will be build same way again. */
|
|
1802 if (flag_checking
|
|
1803 && COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
|
|
1804 && TREE_CODE (val->type) == RECORD_TYPE
|
|
1805 && TREE_CODE (type) == RECORD_TYPE
|
|
1806 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
|
|
1807 && !val->odr_violated
|
|
1808 && !base_mismatch && val->bases.length ())
|
|
1809 {
|
|
1810 unsigned int num_poly_bases = 0;
|
|
1811 unsigned int j;
|
|
1812
|
|
1813 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
|
|
1814 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
|
|
1815 (TYPE_BINFO (type), i)))
|
|
1816 num_poly_bases++;
|
|
1817 gcc_assert (num_poly_bases == val->bases.length ());
|
|
1818 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
|
|
1819 i++)
|
|
1820 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
|
|
1821 (TYPE_BINFO (type), i)))
|
|
1822 {
|
|
1823 odr_type base = get_odr_type
|
|
1824 (BINFO_TYPE
|
|
1825 (BINFO_BASE_BINFO (TYPE_BINFO (type),
|
|
1826 i)),
|
|
1827 true);
|
|
1828 gcc_assert (val->bases[j] == base);
|
|
1829 j++;
|
|
1830 }
|
|
1831 }
|
|
1832
|
|
1833
|
|
1834 /* Regularize things a little. During LTO same types may come with
|
|
1835 different BINFOs. Either because their virtual table was
|
|
1836 not merged by tree merging and only later at decl merging or
|
|
1837 because one type comes with external vtable, while other
|
|
1838 with internal. We want to merge equivalent binfos to conserve
|
|
1839 memory and streaming overhead.
|
|
1840
|
|
1841 The external vtables are more harmful: they contain references
|
|
1842 to external declarations of methods that may be defined in the
|
|
1843 merged LTO unit. For this reason we absolutely need to remove
|
|
1844 them and replace by internal variants. Not doing so will lead
|
|
1845 to incomplete answers from possible_polymorphic_call_targets.
|
|
1846
|
|
1847 FIXME: disable for now; because ODR types are now build during
|
|
1848 streaming in, the variants do not need to be linked to the type,
|
|
1849 yet. We need to do the merging in cleanup pass to be implemented
|
|
1850 soon. */
|
|
1851 if (!flag_ltrans && merge
|
|
1852 && 0
|
|
1853 && TREE_CODE (val->type) == RECORD_TYPE
|
|
1854 && TREE_CODE (type) == RECORD_TYPE
|
|
1855 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
|
|
1856 && TYPE_MAIN_VARIANT (type) == type
|
|
1857 && TYPE_MAIN_VARIANT (val->type) == val->type
|
|
1858 && BINFO_VTABLE (TYPE_BINFO (val->type))
|
|
1859 && BINFO_VTABLE (TYPE_BINFO (type)))
|
|
1860 {
|
|
1861 tree master_binfo = TYPE_BINFO (val->type);
|
|
1862 tree v1 = BINFO_VTABLE (master_binfo);
|
|
1863 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
|
|
1864
|
|
1865 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
|
|
1866 {
|
|
1867 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
|
|
1868 && operand_equal_p (TREE_OPERAND (v1, 1),
|
|
1869 TREE_OPERAND (v2, 1), 0));
|
|
1870 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
|
|
1871 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
|
|
1872 }
|
|
1873 gcc_assert (DECL_ASSEMBLER_NAME (v1)
|
|
1874 == DECL_ASSEMBLER_NAME (v2));
|
|
1875
|
|
1876 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
|
|
1877 {
|
|
1878 unsigned int i;
|
|
1879
|
|
1880 set_type_binfo (val->type, TYPE_BINFO (type));
|
|
1881 for (i = 0; i < val->types->length (); i++)
|
|
1882 {
|
|
1883 if (TYPE_BINFO ((*val->types)[i])
|
|
1884 == master_binfo)
|
|
1885 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
|
|
1886 }
|
|
1887 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
|
|
1888 }
|
|
1889 else
|
|
1890 set_type_binfo (type, master_binfo);
|
|
1891 }
|
|
1892 return build_bases;
|
|
1893 }
|
|
1894
|
145
|
1895 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
|
|
1896
|
|
1897 tree
|
|
1898 obj_type_ref_class (const_tree ref)
|
|
1899 {
|
|
1900 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
|
|
1901 ref = TREE_TYPE (ref);
|
|
1902 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
|
|
1903 ref = TREE_TYPE (ref);
|
|
1904 /* We look for type THIS points to. ObjC also builds
|
|
1905 OBJ_TYPE_REF with non-method calls, Their first parameter
|
|
1906 ID however also corresponds to class type. */
|
|
1907 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
|
|
1908 || TREE_CODE (ref) == FUNCTION_TYPE);
|
|
1909 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
|
|
1910 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
|
|
1911 tree ret = TREE_TYPE (ref);
|
|
1912 if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (ret))
|
|
1913 ret = TYPE_CANONICAL (ret);
|
|
1914 else
|
|
1915 ret = get_odr_type (ret)->type;
|
|
1916 return ret;
|
|
1917 }
|
|
1918
|
111
|
1919 /* Get ODR type hash entry for TYPE. If INSERT is true, create
|
|
1920 possibly new entry. */
|
|
1921
|
|
1922 odr_type
|
|
1923 get_odr_type (tree type, bool insert)
|
|
1924 {
|
|
1925 odr_type_d **slot = NULL;
|
|
1926 odr_type val = NULL;
|
|
1927 hashval_t hash;
|
|
1928 bool build_bases = false;
|
|
1929 bool insert_to_odr_array = false;
|
|
1930 int base_id = -1;
|
|
1931
|
145
|
1932 type = TYPE_MAIN_VARIANT (type);
|
|
1933 if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (type))
|
|
1934 type = TYPE_CANONICAL (type);
|
|
1935
|
|
1936 gcc_checking_assert (can_be_name_hashed_p (type));
|
|
1937
|
|
1938 hash = hash_odr_name (type);
|
|
1939 slot = odr_hash->find_slot_with_hash (type, hash,
|
|
1940 insert ? INSERT : NO_INSERT);
|
|
1941
|
|
1942 if (!slot)
|
111
|
1943 return NULL;
|
|
1944
|
|
1945 /* See if we already have entry for type. */
|
145
|
1946 if (*slot)
|
111
|
1947 {
|
145
|
1948 val = *slot;
|
|
1949
|
|
1950 if (val->type != type && insert
|
111
|
1951 && (!val->types_set || !val->types_set->add (type)))
|
145
|
1952 build_bases = add_type_duplicate (val, type);
|
111
|
1953 }
|
|
1954 else
|
|
1955 {
|
|
1956 val = ggc_cleared_alloc<odr_type_d> ();
|
|
1957 val->type = type;
|
|
1958 val->bases = vNULL;
|
|
1959 val->derived_types = vNULL;
|
|
1960 if (type_with_linkage_p (type))
|
|
1961 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
|
|
1962 else
|
|
1963 val->anonymous_namespace = 0;
|
|
1964 build_bases = COMPLETE_TYPE_P (val->type);
|
|
1965 insert_to_odr_array = true;
|
145
|
1966 *slot = val;
|
111
|
1967 }
|
|
1968
|
|
1969 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
|
|
1970 && type_with_linkage_p (type)
|
|
1971 && type == TYPE_MAIN_VARIANT (type))
|
|
1972 {
|
|
1973 tree binfo = TYPE_BINFO (type);
|
|
1974 unsigned int i;
|
|
1975
|
|
1976 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
|
|
1977
|
|
1978 val->all_derivations_known = type_all_derivations_known_p (type);
|
|
1979 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
|
|
1980 /* For now record only polymorphic types. other are
|
145
|
1981 pointless for devirtualization and we cannot precisely
|
111
|
1982 determine ODR equivalency of these during LTO. */
|
|
1983 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
|
|
1984 {
|
|
1985 tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
|
|
1986 odr_type base = get_odr_type (base_type, true);
|
|
1987 gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
|
|
1988 base->derived_types.safe_push (val);
|
|
1989 val->bases.safe_push (base);
|
|
1990 if (base->id > base_id)
|
|
1991 base_id = base->id;
|
|
1992 }
|
|
1993 }
|
|
1994 /* Ensure that type always appears after bases. */
|
|
1995 if (insert_to_odr_array)
|
|
1996 {
|
|
1997 if (odr_types_ptr)
|
|
1998 val->id = odr_types.length ();
|
|
1999 vec_safe_push (odr_types_ptr, val);
|
|
2000 }
|
|
2001 else if (base_id > val->id)
|
|
2002 {
|
|
2003 odr_types[val->id] = 0;
|
|
2004 /* Be sure we did not recorded any derived types; these may need
|
|
2005 renumbering too. */
|
|
2006 gcc_assert (val->derived_types.length() == 0);
|
|
2007 val->id = odr_types.length ();
|
|
2008 vec_safe_push (odr_types_ptr, val);
|
|
2009 }
|
|
2010 return val;
|
|
2011 }
|
|
2012
|
145
|
2013 /* Return type that in ODR type hash prevailed TYPE. Be careful and punt
|
|
2014 on ODR violations. */
|
|
2015
|
|
2016 tree
|
|
2017 prevailing_odr_type (tree type)
|
|
2018 {
|
|
2019 odr_type t = get_odr_type (type, false);
|
|
2020 if (!t || t->odr_violated)
|
|
2021 return type;
|
|
2022 return t->type;
|
|
2023 }
|
|
2024
|
|
2025 /* Set tbaa_enabled flag for TYPE. */
|
|
2026
|
|
2027 void
|
|
2028 enable_odr_based_tbaa (tree type)
|
|
2029 {
|
|
2030 odr_type t = get_odr_type (type, true);
|
|
2031 t->tbaa_enabled = true;
|
|
2032 }
|
|
2033
|
|
2034 /* True if canonical type of TYPE is determined using ODR name. */
|
|
2035
|
|
2036 bool
|
|
2037 odr_based_tbaa_p (const_tree type)
|
|
2038 {
|
|
2039 if (!RECORD_OR_UNION_TYPE_P (type))
|
|
2040 return false;
|
|
2041 odr_type t = get_odr_type (const_cast <tree> (type), false);
|
|
2042 if (!t || !t->tbaa_enabled)
|
|
2043 return false;
|
|
2044 return true;
|
|
2045 }
|
|
2046
|
|
2047 /* Set TYPE_CANONICAL of type and all its variants and duplicates
|
|
2048 to CANONICAL. */
|
|
2049
|
|
2050 void
|
|
2051 set_type_canonical_for_odr_type (tree type, tree canonical)
|
|
2052 {
|
|
2053 odr_type t = get_odr_type (type, false);
|
|
2054 unsigned int i;
|
|
2055 tree tt;
|
|
2056
|
|
2057 for (tree t2 = t->type; t2; t2 = TYPE_NEXT_VARIANT (t2))
|
|
2058 TYPE_CANONICAL (t2) = canonical;
|
|
2059 if (t->types)
|
|
2060 FOR_EACH_VEC_ELT (*t->types, i, tt)
|
|
2061 for (tree t2 = tt; t2; t2 = TYPE_NEXT_VARIANT (t2))
|
|
2062 TYPE_CANONICAL (t2) = canonical;
|
|
2063 }
|
|
2064
|
|
2065 /* Return true if we reported some ODR violation on TYPE. */
|
|
2066
|
|
2067 bool
|
|
2068 odr_type_violation_reported_p (tree type)
|
|
2069 {
|
|
2070 return get_odr_type (type, false)->odr_violated;
|
|
2071 }
|
|
2072
|
|
2073 /* Add TYPE of ODR type hash. */
|
111
|
2074
|
|
2075 void
|
|
2076 register_odr_type (tree type)
|
|
2077 {
|
|
2078 if (!odr_hash)
|
145
|
2079 odr_hash = new odr_hash_type (23);
|
|
2080 if (type == TYPE_MAIN_VARIANT (type))
|
111
|
2081 {
|
145
|
2082 /* To get ODR warnings right, first register all sub-types. */
|
|
2083 if (RECORD_OR_UNION_TYPE_P (type)
|
|
2084 && COMPLETE_TYPE_P (type))
|
|
2085 {
|
|
2086 /* Limit recursion on types which are already registered. */
|
|
2087 odr_type ot = get_odr_type (type, false);
|
|
2088 if (ot
|
|
2089 && (ot->type == type
|
|
2090 || (ot->types_set
|
|
2091 && ot->types_set->contains (type))))
|
|
2092 return;
|
|
2093 for (tree f = TYPE_FIELDS (type); f; f = TREE_CHAIN (f))
|
|
2094 if (TREE_CODE (f) == FIELD_DECL)
|
|
2095 {
|
|
2096 tree subtype = TREE_TYPE (f);
|
|
2097
|
|
2098 while (TREE_CODE (subtype) == ARRAY_TYPE)
|
|
2099 subtype = TREE_TYPE (subtype);
|
|
2100 if (type_with_linkage_p (TYPE_MAIN_VARIANT (subtype)))
|
|
2101 register_odr_type (TYPE_MAIN_VARIANT (subtype));
|
|
2102 }
|
|
2103 if (TYPE_BINFO (type))
|
|
2104 for (unsigned int i = 0;
|
|
2105 i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
|
|
2106 register_odr_type (BINFO_TYPE (BINFO_BASE_BINFO
|
|
2107 (TYPE_BINFO (type), i)));
|
|
2108 }
|
|
2109 get_odr_type (type, true);
|
111
|
2110 }
|
|
2111 }
|
|
2112
|
|
2113 /* Return true if type is known to have no derivations. */
|
|
2114
|
|
2115 bool
|
|
2116 type_known_to_have_no_derivations_p (tree t)
|
|
2117 {
|
|
2118 return (type_all_derivations_known_p (t)
|
|
2119 && (TYPE_FINAL_P (t)
|
|
2120 || (odr_hash
|
|
2121 && !get_odr_type (t, true)->derived_types.length())));
|
|
2122 }
|
|
2123
|
|
2124 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
|
|
2125 recursive printing. */
|
|
2126
|
|
2127 static void
|
|
2128 dump_odr_type (FILE *f, odr_type t, int indent=0)
|
|
2129 {
|
|
2130 unsigned int i;
|
|
2131 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
|
|
2132 print_generic_expr (f, t->type, TDF_SLIM);
|
|
2133 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
|
|
2134 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
|
|
2135 if (TYPE_NAME (t->type))
|
|
2136 {
|
|
2137 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
|
|
2138 fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
|
|
2139 IDENTIFIER_POINTER
|
|
2140 (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
|
|
2141 }
|
|
2142 if (t->bases.length ())
|
|
2143 {
|
|
2144 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
|
|
2145 for (i = 0; i < t->bases.length (); i++)
|
|
2146 fprintf (f, " %i", t->bases[i]->id);
|
|
2147 fprintf (f, "\n");
|
|
2148 }
|
|
2149 if (t->derived_types.length ())
|
|
2150 {
|
|
2151 fprintf (f, "%*s derived types:\n", indent * 2, "");
|
|
2152 for (i = 0; i < t->derived_types.length (); i++)
|
|
2153 dump_odr_type (f, t->derived_types[i], indent + 1);
|
|
2154 }
|
|
2155 fprintf (f, "\n");
|
|
2156 }
|
|
2157
|
|
2158 /* Dump the type inheritance graph. */
|
|
2159
|
|
2160 static void
|
|
2161 dump_type_inheritance_graph (FILE *f)
|
|
2162 {
|
|
2163 unsigned int i;
|
145
|
2164 unsigned int num_all_types = 0, num_types = 0, num_duplicates = 0;
|
111
|
2165 if (!odr_types_ptr)
|
|
2166 return;
|
|
2167 fprintf (f, "\n\nType inheritance graph:\n");
|
|
2168 for (i = 0; i < odr_types.length (); i++)
|
|
2169 {
|
|
2170 if (odr_types[i] && odr_types[i]->bases.length () == 0)
|
|
2171 dump_odr_type (f, odr_types[i]);
|
|
2172 }
|
|
2173 for (i = 0; i < odr_types.length (); i++)
|
|
2174 {
|
145
|
2175 if (!odr_types[i])
|
|
2176 continue;
|
|
2177
|
|
2178 num_all_types++;
|
|
2179 if (!odr_types[i]->types || !odr_types[i]->types->length ())
|
|
2180 continue;
|
|
2181
|
|
2182 /* To aid ODR warnings we also mangle integer constants but do
|
|
2183 not consider duplicates there. */
|
|
2184 if (TREE_CODE (odr_types[i]->type) == INTEGER_TYPE)
|
|
2185 continue;
|
|
2186
|
|
2187 /* It is normal to have one duplicate and one normal variant. */
|
|
2188 if (odr_types[i]->types->length () == 1
|
|
2189 && COMPLETE_TYPE_P (odr_types[i]->type)
|
|
2190 && !COMPLETE_TYPE_P ((*odr_types[i]->types)[0]))
|
|
2191 continue;
|
|
2192
|
|
2193 num_types ++;
|
|
2194
|
|
2195 unsigned int j;
|
|
2196 fprintf (f, "Duplicate tree types for odr type %i\n", i);
|
|
2197 print_node (f, "", odr_types[i]->type, 0);
|
|
2198 print_node (f, "", TYPE_NAME (odr_types[i]->type), 0);
|
|
2199 putc ('\n',f);
|
|
2200 for (j = 0; j < odr_types[i]->types->length (); j++)
|
111
|
2201 {
|
145
|
2202 tree t;
|
|
2203 num_duplicates ++;
|
|
2204 fprintf (f, "duplicate #%i\n", j);
|
|
2205 print_node (f, "", (*odr_types[i]->types)[j], 0);
|
|
2206 t = (*odr_types[i]->types)[j];
|
|
2207 while (TYPE_P (t) && TYPE_CONTEXT (t))
|
111
|
2208 {
|
145
|
2209 t = TYPE_CONTEXT (t);
|
|
2210 print_node (f, "", t, 0);
|
111
|
2211 }
|
145
|
2212 print_node (f, "", TYPE_NAME ((*odr_types[i]->types)[j]), 0);
|
|
2213 putc ('\n',f);
|
111
|
2214 }
|
|
2215 }
|
145
|
2216 fprintf (f, "Out of %i types there are %i types with duplicates; "
|
|
2217 "%i duplicates overall\n", num_all_types, num_types, num_duplicates);
|
|
2218 }
|
|
2219
|
|
2220 /* Save some WPA->ltrans streaming by freeing stuff needed only for good
|
|
2221 ODR warnings.
|
|
2222 We free TYPE_VALUES of enums and also make TYPE_DECLs to not point back
|
|
2223 to the type (which is needed to keep them in the same SCC and preserve
|
|
2224 location information to output warnings) and subsequently we make all
|
|
2225 TYPE_DECLS of same assembler name equivalent. */
|
|
2226
|
|
2227 static void
|
|
2228 free_odr_warning_data ()
|
|
2229 {
|
|
2230 static bool odr_data_freed = false;
|
|
2231
|
|
2232 if (odr_data_freed || !flag_wpa || !odr_types_ptr)
|
|
2233 return;
|
|
2234
|
|
2235 odr_data_freed = true;
|
|
2236
|
|
2237 for (unsigned int i = 0; i < odr_types.length (); i++)
|
|
2238 if (odr_types[i])
|
|
2239 {
|
|
2240 tree t = odr_types[i]->type;
|
|
2241
|
|
2242 if (TREE_CODE (t) == ENUMERAL_TYPE)
|
|
2243 TYPE_VALUES (t) = NULL;
|
|
2244 TREE_TYPE (TYPE_NAME (t)) = void_type_node;
|
|
2245
|
|
2246 if (odr_types[i]->types)
|
|
2247 for (unsigned int j = 0; j < odr_types[i]->types->length (); j++)
|
|
2248 {
|
|
2249 tree td = (*odr_types[i]->types)[j];
|
|
2250
|
|
2251 if (TREE_CODE (td) == ENUMERAL_TYPE)
|
|
2252 TYPE_VALUES (td) = NULL;
|
|
2253 TYPE_NAME (td) = TYPE_NAME (t);
|
|
2254 }
|
|
2255 }
|
|
2256 odr_data_freed = true;
|
111
|
2257 }
|
|
2258
|
|
2259 /* Initialize IPA devirt and build inheritance tree graph. */
|
|
2260
|
|
2261 void
|
|
2262 build_type_inheritance_graph (void)
|
|
2263 {
|
|
2264 struct symtab_node *n;
|
|
2265 FILE *inheritance_dump_file;
|
|
2266 dump_flags_t flags;
|
|
2267
|
|
2268 if (odr_hash)
|
145
|
2269 {
|
|
2270 free_odr_warning_data ();
|
|
2271 return;
|
|
2272 }
|
111
|
2273 timevar_push (TV_IPA_INHERITANCE);
|
|
2274 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
|
|
2275 odr_hash = new odr_hash_type (23);
|
|
2276
|
|
2277 /* We reconstruct the graph starting of types of all methods seen in the
|
|
2278 unit. */
|
|
2279 FOR_EACH_SYMBOL (n)
|
|
2280 if (is_a <cgraph_node *> (n)
|
|
2281 && DECL_VIRTUAL_P (n->decl)
|
|
2282 && n->real_symbol_p ())
|
|
2283 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
|
|
2284
|
|
2285 /* Look also for virtual tables of types that do not define any methods.
|
|
2286
|
|
2287 We need it in a case where class B has virtual base of class A
|
|
2288 re-defining its virtual method and there is class C with no virtual
|
|
2289 methods with B as virtual base.
|
|
2290
|
|
2291 Here we output B's virtual method in two variant - for non-virtual
|
|
2292 and virtual inheritance. B's virtual table has non-virtual version,
|
|
2293 while C's has virtual.
|
|
2294
|
|
2295 For this reason we need to know about C in order to include both
|
|
2296 variants of B. More correctly, record_target_from_binfo should
|
|
2297 add both variants of the method when walking B, but we have no
|
|
2298 link in between them.
|
|
2299
|
|
2300 We rely on fact that either the method is exported and thus we
|
|
2301 assume it is called externally or C is in anonymous namespace and
|
|
2302 thus we will see the vtable. */
|
|
2303
|
|
2304 else if (is_a <varpool_node *> (n)
|
|
2305 && DECL_VIRTUAL_P (n->decl)
|
|
2306 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
|
|
2307 && TYPE_BINFO (DECL_CONTEXT (n->decl))
|
|
2308 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
|
|
2309 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
|
|
2310 if (inheritance_dump_file)
|
|
2311 {
|
|
2312 dump_type_inheritance_graph (inheritance_dump_file);
|
|
2313 dump_end (TDI_inheritance, inheritance_dump_file);
|
|
2314 }
|
145
|
2315 free_odr_warning_data ();
|
111
|
2316 timevar_pop (TV_IPA_INHERITANCE);
|
|
2317 }
|
|
2318
|
|
2319 /* Return true if N has reference from live virtual table
|
|
2320 (and thus can be a destination of polymorphic call).
|
|
2321 Be conservatively correct when callgraph is not built or
|
|
2322 if the method may be referred externally. */
|
|
2323
|
|
2324 static bool
|
|
2325 referenced_from_vtable_p (struct cgraph_node *node)
|
|
2326 {
|
|
2327 int i;
|
|
2328 struct ipa_ref *ref;
|
|
2329 bool found = false;
|
|
2330
|
|
2331 if (node->externally_visible
|
|
2332 || DECL_EXTERNAL (node->decl)
|
|
2333 || node->used_from_other_partition)
|
|
2334 return true;
|
|
2335
|
|
2336 /* Keep this test constant time.
|
|
2337 It is unlikely this can happen except for the case where speculative
|
|
2338 devirtualization introduced many speculative edges to this node.
|
|
2339 In this case the target is very likely alive anyway. */
|
|
2340 if (node->ref_list.referring.length () > 100)
|
|
2341 return true;
|
|
2342
|
|
2343 /* We need references built. */
|
|
2344 if (symtab->state <= CONSTRUCTION)
|
|
2345 return true;
|
|
2346
|
|
2347 for (i = 0; node->iterate_referring (i, ref); i++)
|
|
2348 if ((ref->use == IPA_REF_ALIAS
|
|
2349 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
|
|
2350 || (ref->use == IPA_REF_ADDR
|
|
2351 && VAR_P (ref->referring->decl)
|
|
2352 && DECL_VIRTUAL_P (ref->referring->decl)))
|
|
2353 {
|
|
2354 found = true;
|
|
2355 break;
|
|
2356 }
|
|
2357 return found;
|
|
2358 }
|
|
2359
|
|
2360 /* Return if TARGET is cxa_pure_virtual. */
|
|
2361
|
|
2362 static bool
|
|
2363 is_cxa_pure_virtual_p (tree target)
|
|
2364 {
|
|
2365 return target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE
|
|
2366 && DECL_NAME (target)
|
|
2367 && id_equal (DECL_NAME (target),
|
|
2368 "__cxa_pure_virtual");
|
|
2369 }
|
|
2370
|
|
2371 /* If TARGET has associated node, record it in the NODES array.
|
|
2372 CAN_REFER specify if program can refer to the target directly.
|
145
|
2373 if TARGET is unknown (NULL) or it cannot be inserted (for example because
|
111
|
2374 its body was already removed and there is no way to refer to it), clear
|
|
2375 COMPLETEP. */
|
|
2376
|
|
2377 static void
|
|
2378 maybe_record_node (vec <cgraph_node *> &nodes,
|
|
2379 tree target, hash_set<tree> *inserted,
|
|
2380 bool can_refer,
|
|
2381 bool *completep)
|
|
2382 {
|
|
2383 struct cgraph_node *target_node, *alias_target;
|
|
2384 enum availability avail;
|
|
2385 bool pure_virtual = is_cxa_pure_virtual_p (target);
|
|
2386
|
|
2387 /* __builtin_unreachable do not need to be added into
|
|
2388 list of targets; the runtime effect of calling them is undefined.
|
|
2389 Only "real" virtual methods should be accounted. */
|
|
2390 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE && !pure_virtual)
|
|
2391 return;
|
|
2392
|
|
2393 if (!can_refer)
|
|
2394 {
|
|
2395 /* The only case when method of anonymous namespace becomes unreferable
|
|
2396 is when we completely optimized it out. */
|
|
2397 if (flag_ltrans
|
|
2398 || !target
|
|
2399 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
|
|
2400 *completep = false;
|
|
2401 return;
|
|
2402 }
|
|
2403
|
|
2404 if (!target)
|
|
2405 return;
|
|
2406
|
|
2407 target_node = cgraph_node::get (target);
|
|
2408
|
|
2409 /* Prefer alias target over aliases, so we do not get confused by
|
|
2410 fake duplicates. */
|
|
2411 if (target_node)
|
|
2412 {
|
|
2413 alias_target = target_node->ultimate_alias_target (&avail);
|
|
2414 if (target_node != alias_target
|
|
2415 && avail >= AVAIL_AVAILABLE
|
|
2416 && target_node->get_availability ())
|
|
2417 target_node = alias_target;
|
|
2418 }
|
|
2419
|
|
2420 /* Method can only be called by polymorphic call if any
|
|
2421 of vtables referring to it are alive.
|
|
2422
|
|
2423 While this holds for non-anonymous functions, too, there are
|
|
2424 cases where we want to keep them in the list; for example
|
|
2425 inline functions with -fno-weak are static, but we still
|
|
2426 may devirtualize them when instance comes from other unit.
|
|
2427 The same holds for LTO.
|
|
2428
|
|
2429 Currently we ignore these functions in speculative devirtualization.
|
|
2430 ??? Maybe it would make sense to be more aggressive for LTO even
|
|
2431 elsewhere. */
|
|
2432 if (!flag_ltrans
|
|
2433 && !pure_virtual
|
|
2434 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
|
|
2435 && (!target_node
|
|
2436 || !referenced_from_vtable_p (target_node)))
|
|
2437 ;
|
|
2438 /* See if TARGET is useful function we can deal with. */
|
|
2439 else if (target_node != NULL
|
|
2440 && (TREE_PUBLIC (target)
|
|
2441 || DECL_EXTERNAL (target)
|
|
2442 || target_node->definition)
|
|
2443 && target_node->real_symbol_p ())
|
|
2444 {
|
145
|
2445 gcc_assert (!target_node->inlined_to);
|
111
|
2446 gcc_assert (target_node->real_symbol_p ());
|
|
2447 /* When sanitizing, do not assume that __cxa_pure_virtual is not called
|
|
2448 by valid program. */
|
|
2449 if (flag_sanitize & SANITIZE_UNREACHABLE)
|
|
2450 ;
|
|
2451 /* Only add pure virtual if it is the only possible target. This way
|
|
2452 we will preserve the diagnostics about pure virtual called in many
|
|
2453 cases without disabling optimization in other. */
|
|
2454 else if (pure_virtual)
|
|
2455 {
|
|
2456 if (nodes.length ())
|
|
2457 return;
|
|
2458 }
|
|
2459 /* If we found a real target, take away cxa_pure_virtual. */
|
|
2460 else if (!pure_virtual && nodes.length () == 1
|
|
2461 && is_cxa_pure_virtual_p (nodes[0]->decl))
|
|
2462 nodes.pop ();
|
|
2463 if (pure_virtual && nodes.length ())
|
|
2464 return;
|
|
2465 if (!inserted->add (target))
|
|
2466 {
|
|
2467 cached_polymorphic_call_targets->add (target_node);
|
|
2468 nodes.safe_push (target_node);
|
|
2469 }
|
|
2470 }
|
|
2471 else if (!completep)
|
|
2472 ;
|
|
2473 /* We have definition of __cxa_pure_virtual that is not accessible (it is
|
145
|
2474 optimized out or partitioned to other unit) so we cannot add it. When
|
111
|
2475 not sanitizing, there is nothing to do.
|
|
2476 Otherwise declare the list incomplete. */
|
|
2477 else if (pure_virtual)
|
|
2478 {
|
|
2479 if (flag_sanitize & SANITIZE_UNREACHABLE)
|
|
2480 *completep = false;
|
|
2481 }
|
|
2482 else if (flag_ltrans
|
|
2483 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
|
|
2484 *completep = false;
|
|
2485 }
|
|
2486
|
|
2487 /* See if BINFO's type matches OUTER_TYPE. If so, look up
|
|
2488 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
|
|
2489 method in vtable and insert method to NODES array
|
|
2490 or BASES_TO_CONSIDER if this array is non-NULL.
|
|
2491 Otherwise recurse to base BINFOs.
|
|
2492 This matches what get_binfo_at_offset does, but with offset
|
|
2493 being unknown.
|
|
2494
|
|
2495 TYPE_BINFOS is a stack of BINFOS of types with defined
|
|
2496 virtual table seen on way from class type to BINFO.
|
|
2497
|
|
2498 MATCHED_VTABLES tracks virtual tables we already did lookup
|
|
2499 for virtual function in. INSERTED tracks nodes we already
|
|
2500 inserted.
|
|
2501
|
|
2502 ANONYMOUS is true if BINFO is part of anonymous namespace.
|
|
2503
|
|
2504 Clear COMPLETEP when we hit unreferable target.
|
|
2505 */
|
|
2506
|
|
2507 static void
|
|
2508 record_target_from_binfo (vec <cgraph_node *> &nodes,
|
|
2509 vec <tree> *bases_to_consider,
|
|
2510 tree binfo,
|
|
2511 tree otr_type,
|
|
2512 vec <tree> &type_binfos,
|
|
2513 HOST_WIDE_INT otr_token,
|
|
2514 tree outer_type,
|
|
2515 HOST_WIDE_INT offset,
|
|
2516 hash_set<tree> *inserted,
|
|
2517 hash_set<tree> *matched_vtables,
|
|
2518 bool anonymous,
|
|
2519 bool *completep)
|
|
2520 {
|
|
2521 tree type = BINFO_TYPE (binfo);
|
|
2522 int i;
|
|
2523 tree base_binfo;
|
|
2524
|
|
2525
|
|
2526 if (BINFO_VTABLE (binfo))
|
|
2527 type_binfos.safe_push (binfo);
|
|
2528 if (types_same_for_odr (type, outer_type))
|
|
2529 {
|
|
2530 int i;
|
|
2531 tree type_binfo = NULL;
|
|
2532
|
|
2533 /* Look up BINFO with virtual table. For normal types it is always last
|
|
2534 binfo on stack. */
|
|
2535 for (i = type_binfos.length () - 1; i >= 0; i--)
|
|
2536 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
|
|
2537 {
|
|
2538 type_binfo = type_binfos[i];
|
|
2539 break;
|
|
2540 }
|
|
2541 if (BINFO_VTABLE (binfo))
|
|
2542 type_binfos.pop ();
|
|
2543 /* If this is duplicated BINFO for base shared by virtual inheritance,
|
|
2544 we may not have its associated vtable. This is not a problem, since
|
|
2545 we will walk it on the other path. */
|
|
2546 if (!type_binfo)
|
|
2547 return;
|
|
2548 tree inner_binfo = get_binfo_at_offset (type_binfo,
|
|
2549 offset, otr_type);
|
|
2550 if (!inner_binfo)
|
|
2551 {
|
|
2552 gcc_assert (odr_violation_reported);
|
|
2553 return;
|
|
2554 }
|
|
2555 /* For types in anonymous namespace first check if the respective vtable
|
|
2556 is alive. If not, we know the type can't be called. */
|
|
2557 if (!flag_ltrans && anonymous)
|
|
2558 {
|
|
2559 tree vtable = BINFO_VTABLE (inner_binfo);
|
|
2560 varpool_node *vnode;
|
|
2561
|
|
2562 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
|
|
2563 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
|
|
2564 vnode = varpool_node::get (vtable);
|
|
2565 if (!vnode || !vnode->definition)
|
|
2566 return;
|
|
2567 }
|
|
2568 gcc_assert (inner_binfo);
|
|
2569 if (bases_to_consider
|
|
2570 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
|
|
2571 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
|
|
2572 {
|
|
2573 bool can_refer;
|
|
2574 tree target = gimple_get_virt_method_for_binfo (otr_token,
|
|
2575 inner_binfo,
|
|
2576 &can_refer);
|
|
2577 if (!bases_to_consider)
|
|
2578 maybe_record_node (nodes, target, inserted, can_refer, completep);
|
|
2579 /* Destructors are never called via construction vtables. */
|
|
2580 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
|
|
2581 bases_to_consider->safe_push (target);
|
|
2582 }
|
|
2583 return;
|
|
2584 }
|
|
2585
|
|
2586 /* Walk bases. */
|
|
2587 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
|
|
2588 /* Walking bases that have no virtual method is pointless exercise. */
|
|
2589 if (polymorphic_type_binfo_p (base_binfo))
|
|
2590 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
|
|
2591 type_binfos,
|
|
2592 otr_token, outer_type, offset, inserted,
|
|
2593 matched_vtables, anonymous, completep);
|
|
2594 if (BINFO_VTABLE (binfo))
|
|
2595 type_binfos.pop ();
|
|
2596 }
|
|
2597
|
|
2598 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
|
|
2599 of TYPE, insert them to NODES, recurse into derived nodes.
|
|
2600 INSERTED is used to avoid duplicate insertions of methods into NODES.
|
|
2601 MATCHED_VTABLES are used to avoid duplicate walking vtables.
|
|
2602 Clear COMPLETEP if unreferable target is found.
|
|
2603
|
|
2604 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
|
|
2605 all cases where BASE_SKIPPED is true (because the base is abstract
|
|
2606 class). */
|
|
2607
|
|
2608 static void
|
|
2609 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
|
|
2610 hash_set<tree> *inserted,
|
|
2611 hash_set<tree> *matched_vtables,
|
|
2612 tree otr_type,
|
|
2613 odr_type type,
|
|
2614 HOST_WIDE_INT otr_token,
|
|
2615 tree outer_type,
|
|
2616 HOST_WIDE_INT offset,
|
|
2617 bool *completep,
|
|
2618 vec <tree> &bases_to_consider,
|
|
2619 bool consider_construction)
|
|
2620 {
|
|
2621 tree binfo = TYPE_BINFO (type->type);
|
|
2622 unsigned int i;
|
|
2623 auto_vec <tree, 8> type_binfos;
|
|
2624 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
|
|
2625
|
|
2626 /* We may need to consider types w/o instances because of possible derived
|
|
2627 types using their methods either directly or via construction vtables.
|
|
2628 We are safe to skip them when all derivations are known, since we will
|
|
2629 handle them later.
|
|
2630 This is done by recording them to BASES_TO_CONSIDER array. */
|
|
2631 if (possibly_instantiated || consider_construction)
|
|
2632 {
|
|
2633 record_target_from_binfo (nodes,
|
|
2634 (!possibly_instantiated
|
|
2635 && type_all_derivations_known_p (type->type))
|
|
2636 ? &bases_to_consider : NULL,
|
|
2637 binfo, otr_type, type_binfos, otr_token,
|
|
2638 outer_type, offset,
|
|
2639 inserted, matched_vtables,
|
|
2640 type->anonymous_namespace, completep);
|
|
2641 }
|
|
2642 for (i = 0; i < type->derived_types.length (); i++)
|
|
2643 possible_polymorphic_call_targets_1 (nodes, inserted,
|
|
2644 matched_vtables,
|
|
2645 otr_type,
|
|
2646 type->derived_types[i],
|
|
2647 otr_token, outer_type, offset, completep,
|
|
2648 bases_to_consider, consider_construction);
|
|
2649 }
|
|
2650
|
|
2651 /* Cache of queries for polymorphic call targets.
|
|
2652
|
|
2653 Enumerating all call targets may get expensive when there are many
|
|
2654 polymorphic calls in the program, so we memoize all the previous
|
|
2655 queries and avoid duplicated work. */
|
|
2656
|
145
|
2657 class polymorphic_call_target_d
|
111
|
2658 {
|
145
|
2659 public:
|
111
|
2660 HOST_WIDE_INT otr_token;
|
|
2661 ipa_polymorphic_call_context context;
|
|
2662 odr_type type;
|
|
2663 vec <cgraph_node *> targets;
|
|
2664 tree decl_warning;
|
|
2665 int type_warning;
|
145
|
2666 unsigned int n_odr_types;
|
111
|
2667 bool complete;
|
|
2668 bool speculative;
|
|
2669 };
|
|
2670
|
|
2671 /* Polymorphic call target cache helpers. */
|
|
2672
|
|
2673 struct polymorphic_call_target_hasher
|
|
2674 : pointer_hash <polymorphic_call_target_d>
|
|
2675 {
|
|
2676 static inline hashval_t hash (const polymorphic_call_target_d *);
|
|
2677 static inline bool equal (const polymorphic_call_target_d *,
|
|
2678 const polymorphic_call_target_d *);
|
|
2679 static inline void remove (polymorphic_call_target_d *);
|
|
2680 };
|
|
2681
|
|
2682 /* Return the computed hashcode for ODR_QUERY. */
|
|
2683
|
|
2684 inline hashval_t
|
|
2685 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
|
|
2686 {
|
|
2687 inchash::hash hstate (odr_query->otr_token);
|
|
2688
|
|
2689 hstate.add_hwi (odr_query->type->id);
|
|
2690 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
|
|
2691 hstate.add_hwi (odr_query->context.offset);
|
145
|
2692 hstate.add_hwi (odr_query->n_odr_types);
|
111
|
2693
|
|
2694 if (odr_query->context.speculative_outer_type)
|
|
2695 {
|
|
2696 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
|
|
2697 hstate.add_hwi (odr_query->context.speculative_offset);
|
|
2698 }
|
|
2699 hstate.add_flag (odr_query->speculative);
|
|
2700 hstate.add_flag (odr_query->context.maybe_in_construction);
|
|
2701 hstate.add_flag (odr_query->context.maybe_derived_type);
|
|
2702 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
|
|
2703 hstate.commit_flag ();
|
|
2704 return hstate.end ();
|
|
2705 }
|
|
2706
|
|
2707 /* Compare cache entries T1 and T2. */
|
|
2708
|
|
2709 inline bool
|
|
2710 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
|
|
2711 const polymorphic_call_target_d *t2)
|
|
2712 {
|
|
2713 return (t1->type == t2->type && t1->otr_token == t2->otr_token
|
|
2714 && t1->speculative == t2->speculative
|
|
2715 && t1->context.offset == t2->context.offset
|
|
2716 && t1->context.speculative_offset == t2->context.speculative_offset
|
|
2717 && t1->context.outer_type == t2->context.outer_type
|
|
2718 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
|
|
2719 && t1->context.maybe_in_construction
|
|
2720 == t2->context.maybe_in_construction
|
|
2721 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
|
|
2722 && (t1->context.speculative_maybe_derived_type
|
145
|
2723 == t2->context.speculative_maybe_derived_type)
|
|
2724 /* Adding new type may affect outcome of target search. */
|
|
2725 && t1->n_odr_types == t2->n_odr_types);
|
111
|
2726 }
|
|
2727
|
|
2728 /* Remove entry in polymorphic call target cache hash. */
|
|
2729
|
|
2730 inline void
|
|
2731 polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
|
|
2732 {
|
|
2733 v->targets.release ();
|
|
2734 free (v);
|
|
2735 }
|
|
2736
|
|
2737 /* Polymorphic call target query cache. */
|
|
2738
|
|
2739 typedef hash_table<polymorphic_call_target_hasher>
|
|
2740 polymorphic_call_target_hash_type;
|
|
2741 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
|
|
2742
|
|
2743 /* Destroy polymorphic call target query cache. */
|
|
2744
|
|
2745 static void
|
|
2746 free_polymorphic_call_targets_hash ()
|
|
2747 {
|
|
2748 if (cached_polymorphic_call_targets)
|
|
2749 {
|
|
2750 delete polymorphic_call_target_hash;
|
|
2751 polymorphic_call_target_hash = NULL;
|
|
2752 delete cached_polymorphic_call_targets;
|
|
2753 cached_polymorphic_call_targets = NULL;
|
|
2754 }
|
|
2755 }
|
|
2756
|
131
|
2757 /* Force rebuilding type inheritance graph from scratch.
|
|
2758 This is use to make sure that we do not keep references to types
|
|
2759 which was not visible to free_lang_data. */
|
|
2760
|
|
2761 void
|
|
2762 rebuild_type_inheritance_graph ()
|
|
2763 {
|
|
2764 if (!odr_hash)
|
|
2765 return;
|
|
2766 delete odr_hash;
|
|
2767 odr_hash = NULL;
|
|
2768 odr_types_ptr = NULL;
|
|
2769 free_polymorphic_call_targets_hash ();
|
|
2770 }
|
|
2771
|
111
|
2772 /* When virtual function is removed, we may need to flush the cache. */
|
|
2773
|
|
2774 static void
|
|
2775 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
|
|
2776 {
|
|
2777 if (cached_polymorphic_call_targets
|
145
|
2778 && !thunk_expansion
|
111
|
2779 && cached_polymorphic_call_targets->contains (n))
|
|
2780 free_polymorphic_call_targets_hash ();
|
|
2781 }
|
|
2782
|
|
2783 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
|
|
2784
|
|
2785 tree
|
|
2786 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
|
|
2787 tree vtable)
|
|
2788 {
|
|
2789 tree v = BINFO_VTABLE (binfo);
|
|
2790 int i;
|
|
2791 tree base_binfo;
|
|
2792 unsigned HOST_WIDE_INT this_offset;
|
|
2793
|
|
2794 if (v)
|
|
2795 {
|
|
2796 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
|
|
2797 gcc_unreachable ();
|
|
2798
|
|
2799 if (offset == this_offset
|
|
2800 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
|
|
2801 return binfo;
|
|
2802 }
|
|
2803
|
|
2804 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
|
|
2805 if (polymorphic_type_binfo_p (base_binfo))
|
|
2806 {
|
|
2807 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
|
|
2808 if (base_binfo)
|
|
2809 return base_binfo;
|
|
2810 }
|
|
2811 return NULL;
|
|
2812 }
|
|
2813
|
|
2814 /* T is known constant value of virtual table pointer.
|
|
2815 Store virtual table to V and its offset to OFFSET.
|
|
2816 Return false if T does not look like virtual table reference. */
|
|
2817
|
|
2818 bool
|
|
2819 vtable_pointer_value_to_vtable (const_tree t, tree *v,
|
|
2820 unsigned HOST_WIDE_INT *offset)
|
|
2821 {
|
|
2822 /* We expect &MEM[(void *)&virtual_table + 16B].
|
|
2823 We obtain object's BINFO from the context of the virtual table.
|
|
2824 This one contains pointer to virtual table represented via
|
|
2825 POINTER_PLUS_EXPR. Verify that this pointer matches what
|
|
2826 we propagated through.
|
|
2827
|
|
2828 In the case of virtual inheritance, the virtual tables may
|
|
2829 be nested, i.e. the offset may be different from 16 and we may
|
|
2830 need to dive into the type representation. */
|
|
2831 if (TREE_CODE (t) == ADDR_EXPR
|
|
2832 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
|
|
2833 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
|
|
2834 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
|
|
2835 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
|
|
2836 == VAR_DECL)
|
|
2837 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
|
|
2838 (TREE_OPERAND (t, 0), 0), 0)))
|
|
2839 {
|
|
2840 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
|
|
2841 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
|
|
2842 return true;
|
|
2843 }
|
|
2844
|
|
2845 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
|
|
2846 We need to handle it when T comes from static variable initializer or
|
|
2847 BINFO. */
|
|
2848 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
|
|
2849 {
|
|
2850 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
|
|
2851 t = TREE_OPERAND (t, 0);
|
|
2852 }
|
|
2853 else
|
|
2854 *offset = 0;
|
|
2855
|
|
2856 if (TREE_CODE (t) != ADDR_EXPR)
|
|
2857 return false;
|
|
2858 *v = TREE_OPERAND (t, 0);
|
|
2859 return true;
|
|
2860 }
|
|
2861
|
|
2862 /* T is known constant value of virtual table pointer. Return BINFO of the
|
|
2863 instance type. */
|
|
2864
|
|
2865 tree
|
|
2866 vtable_pointer_value_to_binfo (const_tree t)
|
|
2867 {
|
|
2868 tree vtable;
|
|
2869 unsigned HOST_WIDE_INT offset;
|
|
2870
|
|
2871 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
|
|
2872 return NULL_TREE;
|
|
2873
|
|
2874 /* FIXME: for stores of construction vtables we return NULL,
|
|
2875 because we do not have BINFO for those. Eventually we should fix
|
|
2876 our representation to allow this case to be handled, too.
|
|
2877 In the case we see store of BINFO we however may assume
|
|
2878 that standard folding will be able to cope with it. */
|
|
2879 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
|
|
2880 offset, vtable);
|
|
2881 }
|
|
2882
|
|
2883 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
|
|
2884 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
|
|
2885 and insert them in NODES.
|
|
2886
|
|
2887 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
|
|
2888
|
|
2889 static void
|
|
2890 record_targets_from_bases (tree otr_type,
|
|
2891 HOST_WIDE_INT otr_token,
|
|
2892 tree outer_type,
|
|
2893 HOST_WIDE_INT offset,
|
|
2894 vec <cgraph_node *> &nodes,
|
|
2895 hash_set<tree> *inserted,
|
|
2896 hash_set<tree> *matched_vtables,
|
|
2897 bool *completep)
|
|
2898 {
|
|
2899 while (true)
|
|
2900 {
|
|
2901 HOST_WIDE_INT pos, size;
|
|
2902 tree base_binfo;
|
|
2903 tree fld;
|
|
2904
|
|
2905 if (types_same_for_odr (outer_type, otr_type))
|
|
2906 return;
|
|
2907
|
|
2908 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
|
|
2909 {
|
|
2910 if (TREE_CODE (fld) != FIELD_DECL)
|
|
2911 continue;
|
|
2912
|
|
2913 pos = int_bit_position (fld);
|
|
2914 size = tree_to_shwi (DECL_SIZE (fld));
|
|
2915 if (pos <= offset && (pos + size) > offset
|
|
2916 /* Do not get confused by zero sized bases. */
|
|
2917 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
|
|
2918 break;
|
|
2919 }
|
|
2920 /* Within a class type we should always find corresponding fields. */
|
|
2921 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
|
|
2922
|
|
2923 /* Nonbase types should have been stripped by outer_class_type. */
|
|
2924 gcc_assert (DECL_ARTIFICIAL (fld));
|
|
2925
|
|
2926 outer_type = TREE_TYPE (fld);
|
|
2927 offset -= pos;
|
|
2928
|
|
2929 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
|
|
2930 offset, otr_type);
|
|
2931 if (!base_binfo)
|
|
2932 {
|
|
2933 gcc_assert (odr_violation_reported);
|
|
2934 return;
|
|
2935 }
|
|
2936 gcc_assert (base_binfo);
|
|
2937 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
|
|
2938 {
|
|
2939 bool can_refer;
|
|
2940 tree target = gimple_get_virt_method_for_binfo (otr_token,
|
|
2941 base_binfo,
|
|
2942 &can_refer);
|
|
2943 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
|
|
2944 maybe_record_node (nodes, target, inserted, can_refer, completep);
|
|
2945 matched_vtables->add (BINFO_VTABLE (base_binfo));
|
|
2946 }
|
|
2947 }
|
|
2948 }
|
|
2949
|
|
2950 /* When virtual table is removed, we may need to flush the cache. */
|
|
2951
|
|
2952 static void
|
|
2953 devirt_variable_node_removal_hook (varpool_node *n,
|
|
2954 void *d ATTRIBUTE_UNUSED)
|
|
2955 {
|
|
2956 if (cached_polymorphic_call_targets
|
|
2957 && DECL_VIRTUAL_P (n->decl)
|
|
2958 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
|
|
2959 free_polymorphic_call_targets_hash ();
|
|
2960 }
|
|
2961
|
|
2962 /* Record about how many calls would benefit from given type to be final. */
|
|
2963
|
|
2964 struct odr_type_warn_count
|
|
2965 {
|
|
2966 tree type;
|
|
2967 int count;
|
|
2968 profile_count dyn_count;
|
|
2969 };
|
|
2970
|
|
2971 /* Record about how many calls would benefit from given method to be final. */
|
|
2972
|
|
2973 struct decl_warn_count
|
|
2974 {
|
|
2975 tree decl;
|
|
2976 int count;
|
|
2977 profile_count dyn_count;
|
|
2978 };
|
|
2979
|
|
2980 /* Information about type and decl warnings. */
|
|
2981
|
145
|
2982 class final_warning_record
|
111
|
2983 {
|
145
|
2984 public:
|
131
|
2985 /* If needed grow type_warnings vector and initialize new decl_warn_count
|
|
2986 to have dyn_count set to profile_count::zero (). */
|
|
2987 void grow_type_warnings (unsigned newlen);
|
|
2988
|
111
|
2989 profile_count dyn_count;
|
|
2990 auto_vec<odr_type_warn_count> type_warnings;
|
|
2991 hash_map<tree, decl_warn_count> decl_warnings;
|
|
2992 };
|
131
|
2993
|
|
2994 void
|
|
2995 final_warning_record::grow_type_warnings (unsigned newlen)
|
|
2996 {
|
|
2997 unsigned len = type_warnings.length ();
|
|
2998 if (newlen > len)
|
|
2999 {
|
|
3000 type_warnings.safe_grow_cleared (newlen);
|
|
3001 for (unsigned i = len; i < newlen; i++)
|
|
3002 type_warnings[i].dyn_count = profile_count::zero ();
|
|
3003 }
|
|
3004 }
|
|
3005
|
145
|
3006 class final_warning_record *final_warning_records;
|
111
|
3007
|
|
3008 /* Return vector containing possible targets of polymorphic call of type
|
|
3009 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
|
|
3010 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
|
|
3011 OTR_TYPE and include their virtual method. This is useful for types
|
|
3012 possibly in construction or destruction where the virtual table may
|
145
|
3013 temporarily change to one of base types. INCLUDE_DERIVED_TYPES make
|
111
|
3014 us to walk the inheritance graph for all derivations.
|
|
3015
|
|
3016 If COMPLETEP is non-NULL, store true if the list is complete.
|
|
3017 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
|
|
3018 in the target cache. If user needs to visit every target list
|
|
3019 just once, it can memoize them.
|
|
3020
|
|
3021 If SPECULATIVE is set, the list will not contain targets that
|
|
3022 are not speculatively taken.
|
|
3023
|
|
3024 Returned vector is placed into cache. It is NOT caller's responsibility
|
|
3025 to free it. The vector can be freed on cgraph_remove_node call if
|
|
3026 the particular node is a virtual function present in the cache. */
|
|
3027
|
|
3028 vec <cgraph_node *>
|
|
3029 possible_polymorphic_call_targets (tree otr_type,
|
|
3030 HOST_WIDE_INT otr_token,
|
|
3031 ipa_polymorphic_call_context context,
|
|
3032 bool *completep,
|
|
3033 void **cache_token,
|
|
3034 bool speculative)
|
|
3035 {
|
|
3036 static struct cgraph_node_hook_list *node_removal_hook_holder;
|
|
3037 vec <cgraph_node *> nodes = vNULL;
|
|
3038 auto_vec <tree, 8> bases_to_consider;
|
|
3039 odr_type type, outer_type;
|
|
3040 polymorphic_call_target_d key;
|
|
3041 polymorphic_call_target_d **slot;
|
|
3042 unsigned int i;
|
|
3043 tree binfo, target;
|
|
3044 bool complete;
|
|
3045 bool can_refer = false;
|
|
3046 bool skipped = false;
|
|
3047
|
|
3048 otr_type = TYPE_MAIN_VARIANT (otr_type);
|
|
3049
|
|
3050 /* If ODR is not initialized or the context is invalid, return empty
|
|
3051 incomplete list. */
|
|
3052 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
|
|
3053 {
|
|
3054 if (completep)
|
|
3055 *completep = context.invalid;
|
|
3056 if (cache_token)
|
|
3057 *cache_token = NULL;
|
|
3058 return nodes;
|
|
3059 }
|
|
3060
|
|
3061 /* Do not bother to compute speculative info when user do not asks for it. */
|
|
3062 if (!speculative || !context.speculative_outer_type)
|
|
3063 context.clear_speculation ();
|
|
3064
|
|
3065 type = get_odr_type (otr_type, true);
|
|
3066
|
|
3067 /* Recording type variants would waste results cache. */
|
|
3068 gcc_assert (!context.outer_type
|
|
3069 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
|
|
3070
|
|
3071 /* Look up the outer class type we want to walk.
|
|
3072 If we fail to do so, the context is invalid. */
|
|
3073 if ((context.outer_type || context.speculative_outer_type)
|
|
3074 && !context.restrict_to_inner_class (otr_type))
|
|
3075 {
|
|
3076 if (completep)
|
|
3077 *completep = true;
|
|
3078 if (cache_token)
|
|
3079 *cache_token = NULL;
|
|
3080 return nodes;
|
|
3081 }
|
|
3082 gcc_assert (!context.invalid);
|
|
3083
|
|
3084 /* Check that restrict_to_inner_class kept the main variant. */
|
|
3085 gcc_assert (!context.outer_type
|
|
3086 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
|
|
3087
|
|
3088 /* We canonicalize our query, so we do not need extra hashtable entries. */
|
|
3089
|
|
3090 /* Without outer type, we have no use for offset. Just do the
|
|
3091 basic search from inner type. */
|
|
3092 if (!context.outer_type)
|
|
3093 context.clear_outer_type (otr_type);
|
|
3094 /* We need to update our hierarchy if the type does not exist. */
|
|
3095 outer_type = get_odr_type (context.outer_type, true);
|
|
3096 /* If the type is complete, there are no derivations. */
|
|
3097 if (TYPE_FINAL_P (outer_type->type))
|
|
3098 context.maybe_derived_type = false;
|
|
3099
|
|
3100 /* Initialize query cache. */
|
|
3101 if (!cached_polymorphic_call_targets)
|
|
3102 {
|
|
3103 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
|
|
3104 polymorphic_call_target_hash
|
|
3105 = new polymorphic_call_target_hash_type (23);
|
|
3106 if (!node_removal_hook_holder)
|
|
3107 {
|
|
3108 node_removal_hook_holder =
|
|
3109 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
|
|
3110 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
|
|
3111 NULL);
|
|
3112 }
|
|
3113 }
|
|
3114
|
|
3115 if (in_lto_p)
|
|
3116 {
|
|
3117 if (context.outer_type != otr_type)
|
|
3118 context.outer_type
|
|
3119 = get_odr_type (context.outer_type, true)->type;
|
|
3120 if (context.speculative_outer_type)
|
|
3121 context.speculative_outer_type
|
|
3122 = get_odr_type (context.speculative_outer_type, true)->type;
|
|
3123 }
|
|
3124
|
|
3125 /* Look up cached answer. */
|
|
3126 key.type = type;
|
|
3127 key.otr_token = otr_token;
|
|
3128 key.speculative = speculative;
|
|
3129 key.context = context;
|
145
|
3130 key.n_odr_types = odr_types.length ();
|
111
|
3131 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
|
|
3132 if (cache_token)
|
|
3133 *cache_token = (void *)*slot;
|
|
3134 if (*slot)
|
|
3135 {
|
|
3136 if (completep)
|
|
3137 *completep = (*slot)->complete;
|
|
3138 if ((*slot)->type_warning && final_warning_records)
|
|
3139 {
|
|
3140 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
|
|
3141 if (!final_warning_records->type_warnings
|
|
3142 [(*slot)->type_warning - 1].dyn_count.initialized_p ())
|
|
3143 final_warning_records->type_warnings
|
|
3144 [(*slot)->type_warning - 1].dyn_count = profile_count::zero ();
|
|
3145 if (final_warning_records->dyn_count > 0)
|
|
3146 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
|
|
3147 = final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
|
|
3148 + final_warning_records->dyn_count;
|
|
3149 }
|
|
3150 if (!speculative && (*slot)->decl_warning && final_warning_records)
|
|
3151 {
|
|
3152 struct decl_warn_count *c =
|
|
3153 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
|
|
3154 c->count++;
|
|
3155 if (final_warning_records->dyn_count > 0)
|
|
3156 c->dyn_count += final_warning_records->dyn_count;
|
|
3157 }
|
|
3158 return (*slot)->targets;
|
|
3159 }
|
|
3160
|
|
3161 complete = true;
|
|
3162
|
|
3163 /* Do actual search. */
|
|
3164 timevar_push (TV_IPA_VIRTUAL_CALL);
|
|
3165 *slot = XCNEW (polymorphic_call_target_d);
|
|
3166 if (cache_token)
|
|
3167 *cache_token = (void *)*slot;
|
|
3168 (*slot)->type = type;
|
|
3169 (*slot)->otr_token = otr_token;
|
|
3170 (*slot)->context = context;
|
|
3171 (*slot)->speculative = speculative;
|
|
3172
|
|
3173 hash_set<tree> inserted;
|
|
3174 hash_set<tree> matched_vtables;
|
|
3175
|
|
3176 /* First insert targets we speculatively identified as likely. */
|
|
3177 if (context.speculative_outer_type)
|
|
3178 {
|
|
3179 odr_type speculative_outer_type;
|
|
3180 bool speculation_complete = true;
|
|
3181
|
|
3182 /* First insert target from type itself and check if it may have
|
|
3183 derived types. */
|
|
3184 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
|
|
3185 if (TYPE_FINAL_P (speculative_outer_type->type))
|
|
3186 context.speculative_maybe_derived_type = false;
|
|
3187 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
|
|
3188 context.speculative_offset, otr_type);
|
|
3189 if (binfo)
|
|
3190 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
|
|
3191 &can_refer);
|
|
3192 else
|
|
3193 target = NULL;
|
|
3194
|
|
3195 /* In the case we get complete method, we don't need
|
|
3196 to walk derivations. */
|
|
3197 if (target && DECL_FINAL_P (target))
|
|
3198 context.speculative_maybe_derived_type = false;
|
|
3199 if (type_possibly_instantiated_p (speculative_outer_type->type))
|
|
3200 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
|
|
3201 if (binfo)
|
|
3202 matched_vtables.add (BINFO_VTABLE (binfo));
|
|
3203
|
|
3204
|
|
3205 /* Next walk recursively all derived types. */
|
|
3206 if (context.speculative_maybe_derived_type)
|
|
3207 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
|
|
3208 possible_polymorphic_call_targets_1 (nodes, &inserted,
|
|
3209 &matched_vtables,
|
|
3210 otr_type,
|
|
3211 speculative_outer_type->derived_types[i],
|
|
3212 otr_token, speculative_outer_type->type,
|
|
3213 context.speculative_offset,
|
|
3214 &speculation_complete,
|
|
3215 bases_to_consider,
|
|
3216 false);
|
|
3217 }
|
|
3218
|
|
3219 if (!speculative || !nodes.length ())
|
|
3220 {
|
|
3221 /* First see virtual method of type itself. */
|
|
3222 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
|
|
3223 context.offset, otr_type);
|
|
3224 if (binfo)
|
|
3225 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
|
|
3226 &can_refer);
|
|
3227 else
|
|
3228 {
|
|
3229 gcc_assert (odr_violation_reported);
|
|
3230 target = NULL;
|
|
3231 }
|
|
3232
|
|
3233 /* Destructors are never called through construction virtual tables,
|
|
3234 because the type is always known. */
|
|
3235 if (target && DECL_CXX_DESTRUCTOR_P (target))
|
|
3236 context.maybe_in_construction = false;
|
|
3237
|
|
3238 if (target)
|
|
3239 {
|
|
3240 /* In the case we get complete method, we don't need
|
|
3241 to walk derivations. */
|
|
3242 if (DECL_FINAL_P (target))
|
|
3243 context.maybe_derived_type = false;
|
|
3244 }
|
|
3245
|
|
3246 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
|
|
3247 if (type_possibly_instantiated_p (outer_type->type))
|
|
3248 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
|
|
3249 else
|
|
3250 skipped = true;
|
|
3251
|
|
3252 if (binfo)
|
|
3253 matched_vtables.add (BINFO_VTABLE (binfo));
|
|
3254
|
|
3255 /* Next walk recursively all derived types. */
|
|
3256 if (context.maybe_derived_type)
|
|
3257 {
|
|
3258 for (i = 0; i < outer_type->derived_types.length(); i++)
|
|
3259 possible_polymorphic_call_targets_1 (nodes, &inserted,
|
|
3260 &matched_vtables,
|
|
3261 otr_type,
|
|
3262 outer_type->derived_types[i],
|
|
3263 otr_token, outer_type->type,
|
|
3264 context.offset, &complete,
|
|
3265 bases_to_consider,
|
|
3266 context.maybe_in_construction);
|
|
3267
|
|
3268 if (!outer_type->all_derivations_known)
|
|
3269 {
|
|
3270 if (!speculative && final_warning_records
|
|
3271 && nodes.length () == 1
|
|
3272 && TREE_CODE (TREE_TYPE (nodes[0]->decl)) == METHOD_TYPE)
|
|
3273 {
|
|
3274 if (complete
|
|
3275 && warn_suggest_final_types
|
|
3276 && !outer_type->derived_types.length ())
|
|
3277 {
|
131
|
3278 final_warning_records->grow_type_warnings
|
|
3279 (outer_type->id);
|
111
|
3280 final_warning_records->type_warnings[outer_type->id].count++;
|
|
3281 if (!final_warning_records->type_warnings
|
|
3282 [outer_type->id].dyn_count.initialized_p ())
|
|
3283 final_warning_records->type_warnings
|
|
3284 [outer_type->id].dyn_count = profile_count::zero ();
|
|
3285 final_warning_records->type_warnings[outer_type->id].dyn_count
|
|
3286 += final_warning_records->dyn_count;
|
|
3287 final_warning_records->type_warnings[outer_type->id].type
|
|
3288 = outer_type->type;
|
|
3289 (*slot)->type_warning = outer_type->id + 1;
|
|
3290 }
|
|
3291 if (complete
|
|
3292 && warn_suggest_final_methods
|
|
3293 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
|
|
3294 outer_type->type))
|
|
3295 {
|
|
3296 bool existed;
|
|
3297 struct decl_warn_count &c =
|
|
3298 final_warning_records->decl_warnings.get_or_insert
|
|
3299 (nodes[0]->decl, &existed);
|
|
3300
|
|
3301 if (existed)
|
|
3302 {
|
|
3303 c.count++;
|
|
3304 c.dyn_count += final_warning_records->dyn_count;
|
|
3305 }
|
|
3306 else
|
|
3307 {
|
|
3308 c.count = 1;
|
|
3309 c.dyn_count = final_warning_records->dyn_count;
|
|
3310 c.decl = nodes[0]->decl;
|
|
3311 }
|
|
3312 (*slot)->decl_warning = nodes[0]->decl;
|
|
3313 }
|
|
3314 }
|
|
3315 complete = false;
|
|
3316 }
|
|
3317 }
|
|
3318
|
|
3319 if (!speculative)
|
|
3320 {
|
|
3321 /* Destructors are never called through construction virtual tables,
|
|
3322 because the type is always known. One of entries may be
|
|
3323 cxa_pure_virtual so look to at least two of them. */
|
|
3324 if (context.maybe_in_construction)
|
|
3325 for (i =0 ; i < MIN (nodes.length (), 2); i++)
|
|
3326 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
|
|
3327 context.maybe_in_construction = false;
|
|
3328 if (context.maybe_in_construction)
|
|
3329 {
|
|
3330 if (type != outer_type
|
|
3331 && (!skipped
|
|
3332 || (context.maybe_derived_type
|
|
3333 && !type_all_derivations_known_p (outer_type->type))))
|
|
3334 record_targets_from_bases (otr_type, otr_token, outer_type->type,
|
|
3335 context.offset, nodes, &inserted,
|
|
3336 &matched_vtables, &complete);
|
|
3337 if (skipped)
|
|
3338 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
|
|
3339 for (i = 0; i < bases_to_consider.length(); i++)
|
|
3340 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
|
|
3341 }
|
|
3342 }
|
|
3343 }
|
|
3344
|
|
3345 (*slot)->targets = nodes;
|
|
3346 (*slot)->complete = complete;
|
145
|
3347 (*slot)->n_odr_types = odr_types.length ();
|
111
|
3348 if (completep)
|
|
3349 *completep = complete;
|
|
3350
|
|
3351 timevar_pop (TV_IPA_VIRTUAL_CALL);
|
|
3352 return nodes;
|
|
3353 }
|
|
3354
|
|
3355 bool
|
|
3356 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
|
|
3357 vec<const decl_warn_count*> *vec)
|
|
3358 {
|
|
3359 vec->safe_push (&value);
|
|
3360 return true;
|
|
3361 }
|
|
3362
|
|
3363 /* Dump target list TARGETS into FILE. */
|
|
3364
|
|
3365 static void
|
145
|
3366 dump_targets (FILE *f, vec <cgraph_node *> targets, bool verbose)
|
111
|
3367 {
|
|
3368 unsigned int i;
|
|
3369
|
|
3370 for (i = 0; i < targets.length (); i++)
|
|
3371 {
|
|
3372 char *name = NULL;
|
|
3373 if (in_lto_p)
|
|
3374 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
|
145
|
3375 fprintf (f, " %s", name ? name : targets[i]->dump_name ());
|
111
|
3376 if (in_lto_p)
|
|
3377 free (name);
|
|
3378 if (!targets[i]->definition)
|
|
3379 fprintf (f, " (no definition%s)",
|
|
3380 DECL_DECLARED_INLINE_P (targets[i]->decl)
|
|
3381 ? " inline" : "");
|
145
|
3382 /* With many targets for every call polymorphic dumps are going to
|
|
3383 be quadratic in size. */
|
|
3384 if (i > 10 && !verbose)
|
|
3385 {
|
|
3386 fprintf (f, " ... and %i more targets\n", targets.length () - i);
|
|
3387 return;
|
|
3388 }
|
111
|
3389 }
|
|
3390 fprintf (f, "\n");
|
|
3391 }
|
|
3392
|
|
3393 /* Dump all possible targets of a polymorphic call. */
|
|
3394
|
|
3395 void
|
|
3396 dump_possible_polymorphic_call_targets (FILE *f,
|
|
3397 tree otr_type,
|
|
3398 HOST_WIDE_INT otr_token,
|
145
|
3399 const ipa_polymorphic_call_context &ctx,
|
|
3400 bool verbose)
|
111
|
3401 {
|
|
3402 vec <cgraph_node *> targets;
|
|
3403 bool final;
|
|
3404 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
|
|
3405 unsigned int len;
|
|
3406
|
|
3407 if (!type)
|
|
3408 return;
|
|
3409 targets = possible_polymorphic_call_targets (otr_type, otr_token,
|
|
3410 ctx,
|
|
3411 &final, NULL, false);
|
|
3412 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
|
|
3413 print_generic_expr (f, type->type, TDF_SLIM);
|
|
3414 fprintf (f, " token %i\n", (int)otr_token);
|
|
3415
|
|
3416 ctx.dump (f);
|
|
3417
|
|
3418 fprintf (f, " %s%s%s%s\n ",
|
|
3419 final ? "This is a complete list." :
|
|
3420 "This is partial list; extra targets may be defined in other units.",
|
|
3421 ctx.maybe_in_construction ? " (base types included)" : "",
|
|
3422 ctx.maybe_derived_type ? " (derived types included)" : "",
|
|
3423 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
|
|
3424 len = targets.length ();
|
145
|
3425 dump_targets (f, targets, verbose);
|
111
|
3426
|
|
3427 targets = possible_polymorphic_call_targets (otr_type, otr_token,
|
|
3428 ctx,
|
|
3429 &final, NULL, true);
|
|
3430 if (targets.length () != len)
|
|
3431 {
|
|
3432 fprintf (f, " Speculative targets:");
|
145
|
3433 dump_targets (f, targets, verbose);
|
111
|
3434 }
|
|
3435 /* Ugly: during callgraph construction the target cache may get populated
|
|
3436 before all targets are found. While this is harmless (because all local
|
|
3437 types are discovered and only in those case we devirtualize fully and we
|
|
3438 don't do speculative devirtualization before IPA stage) it triggers
|
|
3439 assert here when dumping at that stage also populates the case with
|
|
3440 speculative targets. Quietly ignore this. */
|
|
3441 gcc_assert (symtab->state < IPA_SSA || targets.length () <= len);
|
|
3442 fprintf (f, "\n");
|
|
3443 }
|
|
3444
|
|
3445
|
|
3446 /* Return true if N can be possibly target of a polymorphic call of
|
|
3447 OTR_TYPE/OTR_TOKEN. */
|
|
3448
|
|
3449 bool
|
|
3450 possible_polymorphic_call_target_p (tree otr_type,
|
|
3451 HOST_WIDE_INT otr_token,
|
|
3452 const ipa_polymorphic_call_context &ctx,
|
|
3453 struct cgraph_node *n)
|
|
3454 {
|
|
3455 vec <cgraph_node *> targets;
|
|
3456 unsigned int i;
|
|
3457 bool final;
|
|
3458
|
145
|
3459 if (fndecl_built_in_p (n->decl, BUILT_IN_UNREACHABLE)
|
|
3460 || fndecl_built_in_p (n->decl, BUILT_IN_TRAP))
|
111
|
3461 return true;
|
|
3462
|
|
3463 if (is_cxa_pure_virtual_p (n->decl))
|
|
3464 return true;
|
|
3465
|
|
3466 if (!odr_hash)
|
|
3467 return true;
|
|
3468 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
|
|
3469 for (i = 0; i < targets.length (); i++)
|
|
3470 if (n->semantically_equivalent_p (targets[i]))
|
|
3471 return true;
|
|
3472
|
|
3473 /* At a moment we allow middle end to dig out new external declarations
|
|
3474 as a targets of polymorphic calls. */
|
|
3475 if (!final && !n->definition)
|
|
3476 return true;
|
|
3477 return false;
|
|
3478 }
|
|
3479
|
|
3480
|
|
3481
|
|
3482 /* Return true if N can be possibly target of a polymorphic call of
|
|
3483 OBJ_TYPE_REF expression REF in STMT. */
|
|
3484
|
|
3485 bool
|
|
3486 possible_polymorphic_call_target_p (tree ref,
|
|
3487 gimple *stmt,
|
|
3488 struct cgraph_node *n)
|
|
3489 {
|
|
3490 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
|
|
3491 tree call_fn = gimple_call_fn (stmt);
|
|
3492
|
|
3493 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
|
|
3494 tree_to_uhwi
|
|
3495 (OBJ_TYPE_REF_TOKEN (call_fn)),
|
|
3496 context,
|
|
3497 n);
|
|
3498 }
|
|
3499
|
|
3500
|
|
3501 /* After callgraph construction new external nodes may appear.
|
|
3502 Add them into the graph. */
|
|
3503
|
|
3504 void
|
|
3505 update_type_inheritance_graph (void)
|
|
3506 {
|
|
3507 struct cgraph_node *n;
|
|
3508
|
|
3509 if (!odr_hash)
|
|
3510 return;
|
|
3511 free_polymorphic_call_targets_hash ();
|
|
3512 timevar_push (TV_IPA_INHERITANCE);
|
|
3513 /* We reconstruct the graph starting from types of all methods seen in the
|
|
3514 unit. */
|
|
3515 FOR_EACH_FUNCTION (n)
|
|
3516 if (DECL_VIRTUAL_P (n->decl)
|
|
3517 && !n->definition
|
|
3518 && n->real_symbol_p ())
|
|
3519 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
|
|
3520 timevar_pop (TV_IPA_INHERITANCE);
|
|
3521 }
|
|
3522
|
|
3523
|
|
3524 /* Return true if N looks like likely target of a polymorphic call.
|
|
3525 Rule out cxa_pure_virtual, noreturns, function declared cold and
|
|
3526 other obvious cases. */
|
|
3527
|
|
3528 bool
|
|
3529 likely_target_p (struct cgraph_node *n)
|
|
3530 {
|
|
3531 int flags;
|
|
3532 /* cxa_pure_virtual and similar things are not likely. */
|
|
3533 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
|
|
3534 return false;
|
|
3535 flags = flags_from_decl_or_type (n->decl);
|
|
3536 if (flags & ECF_NORETURN)
|
|
3537 return false;
|
|
3538 if (lookup_attribute ("cold",
|
|
3539 DECL_ATTRIBUTES (n->decl)))
|
|
3540 return false;
|
|
3541 if (n->frequency < NODE_FREQUENCY_NORMAL)
|
|
3542 return false;
|
|
3543 /* If there are no live virtual tables referring the target,
|
|
3544 the only way the target can be called is an instance coming from other
|
|
3545 compilation unit; speculative devirtualization is built around an
|
|
3546 assumption that won't happen. */
|
|
3547 if (!referenced_from_vtable_p (n))
|
|
3548 return false;
|
|
3549 return true;
|
|
3550 }
|
|
3551
|
|
3552 /* Compare type warning records P1 and P2 and choose one with larger count;
|
|
3553 helper for qsort. */
|
|
3554
|
145
|
3555 static int
|
111
|
3556 type_warning_cmp (const void *p1, const void *p2)
|
|
3557 {
|
|
3558 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
|
|
3559 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
|
|
3560
|
|
3561 if (t1->dyn_count < t2->dyn_count)
|
|
3562 return 1;
|
|
3563 if (t1->dyn_count > t2->dyn_count)
|
|
3564 return -1;
|
|
3565 return t2->count - t1->count;
|
|
3566 }
|
|
3567
|
|
3568 /* Compare decl warning records P1 and P2 and choose one with larger count;
|
|
3569 helper for qsort. */
|
|
3570
|
145
|
3571 static int
|
111
|
3572 decl_warning_cmp (const void *p1, const void *p2)
|
|
3573 {
|
|
3574 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
|
|
3575 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
|
|
3576
|
|
3577 if (t1->dyn_count < t2->dyn_count)
|
|
3578 return 1;
|
|
3579 if (t1->dyn_count > t2->dyn_count)
|
|
3580 return -1;
|
|
3581 return t2->count - t1->count;
|
|
3582 }
|
|
3583
|
|
3584
|
|
3585 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
|
|
3586 context CTX. */
|
|
3587
|
|
3588 struct cgraph_node *
|
|
3589 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
|
|
3590 ipa_polymorphic_call_context ctx)
|
|
3591 {
|
|
3592 vec <cgraph_node *>targets
|
|
3593 = possible_polymorphic_call_targets
|
|
3594 (otr_type, otr_token, ctx, NULL, NULL, true);
|
|
3595 unsigned int i;
|
|
3596 struct cgraph_node *likely_target = NULL;
|
|
3597
|
|
3598 for (i = 0; i < targets.length (); i++)
|
|
3599 if (likely_target_p (targets[i]))
|
|
3600 {
|
|
3601 if (likely_target)
|
|
3602 return NULL;
|
|
3603 likely_target = targets[i];
|
|
3604 }
|
|
3605 if (!likely_target
|
|
3606 ||!likely_target->definition
|
|
3607 || DECL_EXTERNAL (likely_target->decl))
|
|
3608 return NULL;
|
|
3609
|
|
3610 /* Don't use an implicitly-declared destructor (c++/58678). */
|
|
3611 struct cgraph_node *non_thunk_target
|
|
3612 = likely_target->function_symbol ();
|
|
3613 if (DECL_ARTIFICIAL (non_thunk_target->decl))
|
|
3614 return NULL;
|
|
3615 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
|
|
3616 && likely_target->can_be_discarded_p ())
|
|
3617 return NULL;
|
|
3618 return likely_target;
|
|
3619 }
|
|
3620
|
|
3621 /* The ipa-devirt pass.
|
|
3622 When polymorphic call has only one likely target in the unit,
|
|
3623 turn it into a speculative call. */
|
|
3624
|
|
3625 static unsigned int
|
|
3626 ipa_devirt (void)
|
|
3627 {
|
|
3628 struct cgraph_node *n;
|
|
3629 hash_set<void *> bad_call_targets;
|
|
3630 struct cgraph_edge *e;
|
|
3631
|
|
3632 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
|
|
3633 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
|
|
3634 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
|
|
3635 int ndropped = 0;
|
|
3636
|
|
3637 if (!odr_types_ptr)
|
|
3638 return 0;
|
|
3639
|
|
3640 if (dump_file)
|
|
3641 dump_type_inheritance_graph (dump_file);
|
|
3642
|
|
3643 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
|
|
3644 This is implemented by setting up final_warning_records that are updated
|
|
3645 by get_polymorphic_call_targets.
|
|
3646 We need to clear cache in this case to trigger recomputation of all
|
|
3647 entries. */
|
|
3648 if (warn_suggest_final_methods || warn_suggest_final_types)
|
|
3649 {
|
|
3650 final_warning_records = new (final_warning_record);
|
|
3651 final_warning_records->dyn_count = profile_count::zero ();
|
131
|
3652 final_warning_records->grow_type_warnings (odr_types.length ());
|
111
|
3653 free_polymorphic_call_targets_hash ();
|
|
3654 }
|
|
3655
|
|
3656 FOR_EACH_DEFINED_FUNCTION (n)
|
|
3657 {
|
|
3658 bool update = false;
|
|
3659 if (!opt_for_fn (n->decl, flag_devirtualize))
|
|
3660 continue;
|
|
3661 if (dump_file && n->indirect_calls)
|
|
3662 fprintf (dump_file, "\n\nProcesing function %s\n",
|
|
3663 n->dump_name ());
|
|
3664 for (e = n->indirect_calls; e; e = e->next_callee)
|
|
3665 if (e->indirect_info->polymorphic)
|
|
3666 {
|
|
3667 struct cgraph_node *likely_target = NULL;
|
|
3668 void *cache_token;
|
|
3669 bool final;
|
|
3670
|
|
3671 if (final_warning_records)
|
131
|
3672 final_warning_records->dyn_count = e->count.ipa ();
|
111
|
3673
|
|
3674 vec <cgraph_node *>targets
|
|
3675 = possible_polymorphic_call_targets
|
|
3676 (e, &final, &cache_token, true);
|
|
3677 unsigned int i;
|
|
3678
|
|
3679 /* Trigger warnings by calculating non-speculative targets. */
|
|
3680 if (warn_suggest_final_methods || warn_suggest_final_types)
|
|
3681 possible_polymorphic_call_targets (e);
|
|
3682
|
|
3683 if (dump_file)
|
|
3684 dump_possible_polymorphic_call_targets
|
145
|
3685 (dump_file, e, (dump_flags & TDF_DETAILS));
|
111
|
3686
|
|
3687 npolymorphic++;
|
|
3688
|
|
3689 /* See if the call can be devirtualized by means of ipa-prop's
|
|
3690 polymorphic call context propagation. If not, we can just
|
|
3691 forget about this call being polymorphic and avoid some heavy
|
|
3692 lifting in remove_unreachable_nodes that will otherwise try to
|
|
3693 keep all possible targets alive until inlining and in the inliner
|
|
3694 itself.
|
|
3695
|
|
3696 This may need to be revisited once we add further ways to use
|
145
|
3697 the may edges, but it is a reasonable thing to do right now. */
|
111
|
3698
|
|
3699 if ((e->indirect_info->param_index == -1
|
|
3700 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
|
|
3701 && e->indirect_info->vptr_changed))
|
|
3702 && !flag_ltrans_devirtualize)
|
|
3703 {
|
|
3704 e->indirect_info->polymorphic = false;
|
|
3705 ndropped++;
|
|
3706 if (dump_file)
|
|
3707 fprintf (dump_file, "Dropping polymorphic call info;"
|
145
|
3708 " it cannot be used by ipa-prop\n");
|
111
|
3709 }
|
|
3710
|
|
3711 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
|
|
3712 continue;
|
|
3713
|
|
3714 if (!e->maybe_hot_p ())
|
|
3715 {
|
|
3716 if (dump_file)
|
|
3717 fprintf (dump_file, "Call is cold\n\n");
|
|
3718 ncold++;
|
|
3719 continue;
|
|
3720 }
|
|
3721 if (e->speculative)
|
|
3722 {
|
|
3723 if (dump_file)
|
|
3724 fprintf (dump_file, "Call is already speculated\n\n");
|
|
3725 nspeculated++;
|
|
3726
|
|
3727 /* When dumping see if we agree with speculation. */
|
|
3728 if (!dump_file)
|
|
3729 continue;
|
|
3730 }
|
|
3731 if (bad_call_targets.contains (cache_token))
|
|
3732 {
|
|
3733 if (dump_file)
|
|
3734 fprintf (dump_file, "Target list is known to be useless\n\n");
|
|
3735 nmultiple++;
|
|
3736 continue;
|
|
3737 }
|
|
3738 for (i = 0; i < targets.length (); i++)
|
|
3739 if (likely_target_p (targets[i]))
|
|
3740 {
|
|
3741 if (likely_target)
|
|
3742 {
|
|
3743 likely_target = NULL;
|
|
3744 if (dump_file)
|
|
3745 fprintf (dump_file, "More than one likely target\n\n");
|
|
3746 nmultiple++;
|
|
3747 break;
|
|
3748 }
|
|
3749 likely_target = targets[i];
|
|
3750 }
|
|
3751 if (!likely_target)
|
|
3752 {
|
|
3753 bad_call_targets.add (cache_token);
|
|
3754 continue;
|
|
3755 }
|
|
3756 /* This is reached only when dumping; check if we agree or disagree
|
|
3757 with the speculation. */
|
|
3758 if (e->speculative)
|
|
3759 {
|
145
|
3760 bool found = e->speculative_call_for_target (likely_target);
|
|
3761 if (found)
|
111
|
3762 {
|
|
3763 fprintf (dump_file, "We agree with speculation\n\n");
|
|
3764 nok++;
|
|
3765 }
|
|
3766 else
|
|
3767 {
|
|
3768 fprintf (dump_file, "We disagree with speculation\n\n");
|
|
3769 nwrong++;
|
|
3770 }
|
|
3771 continue;
|
|
3772 }
|
|
3773 if (!likely_target->definition)
|
|
3774 {
|
|
3775 if (dump_file)
|
|
3776 fprintf (dump_file, "Target is not a definition\n\n");
|
|
3777 nnotdefined++;
|
|
3778 continue;
|
|
3779 }
|
|
3780 /* Do not introduce new references to external symbols. While we
|
|
3781 can handle these just well, it is common for programs to
|
|
3782 incorrectly with headers defining methods they are linked
|
|
3783 with. */
|
|
3784 if (DECL_EXTERNAL (likely_target->decl))
|
|
3785 {
|
|
3786 if (dump_file)
|
|
3787 fprintf (dump_file, "Target is external\n\n");
|
|
3788 nexternal++;
|
|
3789 continue;
|
|
3790 }
|
|
3791 /* Don't use an implicitly-declared destructor (c++/58678). */
|
|
3792 struct cgraph_node *non_thunk_target
|
|
3793 = likely_target->function_symbol ();
|
|
3794 if (DECL_ARTIFICIAL (non_thunk_target->decl))
|
|
3795 {
|
|
3796 if (dump_file)
|
|
3797 fprintf (dump_file, "Target is artificial\n\n");
|
|
3798 nartificial++;
|
|
3799 continue;
|
|
3800 }
|
|
3801 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
|
|
3802 && likely_target->can_be_discarded_p ())
|
|
3803 {
|
|
3804 if (dump_file)
|
|
3805 fprintf (dump_file, "Target is overwritable\n\n");
|
|
3806 noverwritable++;
|
|
3807 continue;
|
|
3808 }
|
|
3809 else if (dbg_cnt (devirt))
|
|
3810 {
|
|
3811 if (dump_enabled_p ())
|
|
3812 {
|
131
|
3813 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, e->call_stmt,
|
111
|
3814 "speculatively devirtualizing call "
|
|
3815 "in %s to %s\n",
|
|
3816 n->dump_name (),
|
|
3817 likely_target->dump_name ());
|
|
3818 }
|
|
3819 if (!likely_target->can_be_discarded_p ())
|
|
3820 {
|
|
3821 cgraph_node *alias;
|
|
3822 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
|
|
3823 if (alias)
|
|
3824 likely_target = alias;
|
|
3825 }
|
|
3826 nconverted++;
|
|
3827 update = true;
|
|
3828 e->make_speculative
|
131
|
3829 (likely_target, e->count.apply_scale (8, 10));
|
111
|
3830 }
|
|
3831 }
|
|
3832 if (update)
|
|
3833 ipa_update_overall_fn_summary (n);
|
|
3834 }
|
|
3835 if (warn_suggest_final_methods || warn_suggest_final_types)
|
|
3836 {
|
|
3837 if (warn_suggest_final_types)
|
|
3838 {
|
|
3839 final_warning_records->type_warnings.qsort (type_warning_cmp);
|
|
3840 for (unsigned int i = 0;
|
|
3841 i < final_warning_records->type_warnings.length (); i++)
|
|
3842 if (final_warning_records->type_warnings[i].count)
|
|
3843 {
|
|
3844 tree type = final_warning_records->type_warnings[i].type;
|
|
3845 int count = final_warning_records->type_warnings[i].count;
|
|
3846 profile_count dyn_count
|
|
3847 = final_warning_records->type_warnings[i].dyn_count;
|
|
3848
|
|
3849 if (!(dyn_count > 0))
|
|
3850 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
|
|
3851 OPT_Wsuggest_final_types, count,
|
|
3852 "Declaring type %qD final "
|
|
3853 "would enable devirtualization of %i call",
|
|
3854 "Declaring type %qD final "
|
|
3855 "would enable devirtualization of %i calls",
|
|
3856 type,
|
|
3857 count);
|
|
3858 else
|
|
3859 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
|
|
3860 OPT_Wsuggest_final_types, count,
|
|
3861 "Declaring type %qD final "
|
|
3862 "would enable devirtualization of %i call "
|
|
3863 "executed %lli times",
|
|
3864 "Declaring type %qD final "
|
|
3865 "would enable devirtualization of %i calls "
|
|
3866 "executed %lli times",
|
|
3867 type,
|
|
3868 count,
|
|
3869 (long long) dyn_count.to_gcov_type ());
|
|
3870 }
|
|
3871 }
|
|
3872
|
|
3873 if (warn_suggest_final_methods)
|
|
3874 {
|
|
3875 auto_vec<const decl_warn_count*> decl_warnings_vec;
|
|
3876
|
|
3877 final_warning_records->decl_warnings.traverse
|
|
3878 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
|
|
3879 decl_warnings_vec.qsort (decl_warning_cmp);
|
|
3880 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
|
|
3881 {
|
|
3882 tree decl = decl_warnings_vec[i]->decl;
|
|
3883 int count = decl_warnings_vec[i]->count;
|
|
3884 profile_count dyn_count
|
|
3885 = decl_warnings_vec[i]->dyn_count;
|
|
3886
|
|
3887 if (!(dyn_count > 0))
|
|
3888 if (DECL_CXX_DESTRUCTOR_P (decl))
|
|
3889 warning_n (DECL_SOURCE_LOCATION (decl),
|
|
3890 OPT_Wsuggest_final_methods, count,
|
|
3891 "Declaring virtual destructor of %qD final "
|
|
3892 "would enable devirtualization of %i call",
|
|
3893 "Declaring virtual destructor of %qD final "
|
|
3894 "would enable devirtualization of %i calls",
|
|
3895 DECL_CONTEXT (decl), count);
|
|
3896 else
|
|
3897 warning_n (DECL_SOURCE_LOCATION (decl),
|
|
3898 OPT_Wsuggest_final_methods, count,
|
|
3899 "Declaring method %qD final "
|
|
3900 "would enable devirtualization of %i call",
|
|
3901 "Declaring method %qD final "
|
|
3902 "would enable devirtualization of %i calls",
|
|
3903 decl, count);
|
|
3904 else if (DECL_CXX_DESTRUCTOR_P (decl))
|
|
3905 warning_n (DECL_SOURCE_LOCATION (decl),
|
|
3906 OPT_Wsuggest_final_methods, count,
|
|
3907 "Declaring virtual destructor of %qD final "
|
|
3908 "would enable devirtualization of %i call "
|
|
3909 "executed %lli times",
|
|
3910 "Declaring virtual destructor of %qD final "
|
|
3911 "would enable devirtualization of %i calls "
|
|
3912 "executed %lli times",
|
|
3913 DECL_CONTEXT (decl), count,
|
|
3914 (long long)dyn_count.to_gcov_type ());
|
|
3915 else
|
|
3916 warning_n (DECL_SOURCE_LOCATION (decl),
|
|
3917 OPT_Wsuggest_final_methods, count,
|
|
3918 "Declaring method %qD final "
|
|
3919 "would enable devirtualization of %i call "
|
|
3920 "executed %lli times",
|
|
3921 "Declaring method %qD final "
|
|
3922 "would enable devirtualization of %i calls "
|
|
3923 "executed %lli times",
|
|
3924 decl, count,
|
|
3925 (long long)dyn_count.to_gcov_type ());
|
|
3926 }
|
|
3927 }
|
|
3928
|
|
3929 delete (final_warning_records);
|
|
3930 final_warning_records = 0;
|
|
3931 }
|
|
3932
|
|
3933 if (dump_file)
|
|
3934 fprintf (dump_file,
|
|
3935 "%i polymorphic calls, %i devirtualized,"
|
|
3936 " %i speculatively devirtualized, %i cold\n"
|
|
3937 "%i have multiple targets, %i overwritable,"
|
|
3938 " %i already speculated (%i agree, %i disagree),"
|
|
3939 " %i external, %i not defined, %i artificial, %i infos dropped\n",
|
|
3940 npolymorphic, ndevirtualized, nconverted, ncold,
|
|
3941 nmultiple, noverwritable, nspeculated, nok, nwrong,
|
|
3942 nexternal, nnotdefined, nartificial, ndropped);
|
|
3943 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
|
|
3944 }
|
|
3945
|
|
3946 namespace {
|
|
3947
|
|
3948 const pass_data pass_data_ipa_devirt =
|
|
3949 {
|
|
3950 IPA_PASS, /* type */
|
|
3951 "devirt", /* name */
|
|
3952 OPTGROUP_NONE, /* optinfo_flags */
|
|
3953 TV_IPA_DEVIRT, /* tv_id */
|
|
3954 0, /* properties_required */
|
|
3955 0, /* properties_provided */
|
|
3956 0, /* properties_destroyed */
|
|
3957 0, /* todo_flags_start */
|
|
3958 ( TODO_dump_symtab ), /* todo_flags_finish */
|
|
3959 };
|
|
3960
|
|
3961 class pass_ipa_devirt : public ipa_opt_pass_d
|
|
3962 {
|
|
3963 public:
|
|
3964 pass_ipa_devirt (gcc::context *ctxt)
|
|
3965 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
|
|
3966 NULL, /* generate_summary */
|
|
3967 NULL, /* write_summary */
|
|
3968 NULL, /* read_summary */
|
|
3969 NULL, /* write_optimization_summary */
|
|
3970 NULL, /* read_optimization_summary */
|
|
3971 NULL, /* stmt_fixup */
|
|
3972 0, /* function_transform_todo_flags_start */
|
|
3973 NULL, /* function_transform */
|
|
3974 NULL) /* variable_transform */
|
|
3975 {}
|
|
3976
|
|
3977 /* opt_pass methods: */
|
|
3978 virtual bool gate (function *)
|
|
3979 {
|
|
3980 /* In LTO, always run the IPA passes and decide on function basis if the
|
|
3981 pass is enabled. */
|
|
3982 if (in_lto_p)
|
|
3983 return true;
|
|
3984 return (flag_devirtualize
|
|
3985 && (flag_devirtualize_speculatively
|
|
3986 || (warn_suggest_final_methods
|
|
3987 || warn_suggest_final_types))
|
|
3988 && optimize);
|
|
3989 }
|
|
3990
|
|
3991 virtual unsigned int execute (function *) { return ipa_devirt (); }
|
|
3992
|
|
3993 }; // class pass_ipa_devirt
|
|
3994
|
|
3995 } // anon namespace
|
|
3996
|
|
3997 ipa_opt_pass_d *
|
|
3998 make_pass_ipa_devirt (gcc::context *ctxt)
|
|
3999 {
|
|
4000 return new pass_ipa_devirt (ctxt);
|
|
4001 }
|
|
4002
|
145
|
4003 /* Print ODR name of a TYPE if available.
|
|
4004 Use demangler when option DEMANGLE is used. */
|
|
4005
|
|
4006 DEBUG_FUNCTION void
|
|
4007 debug_tree_odr_name (tree type, bool demangle)
|
|
4008 {
|
|
4009 const char *odr = get_odr_name_for_type (type);
|
|
4010 if (demangle)
|
|
4011 {
|
|
4012 const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
|
|
4013 odr = cplus_demangle (odr, opts);
|
|
4014 }
|
|
4015
|
|
4016 fprintf (stderr, "%s\n", odr);
|
|
4017 }
|
|
4018
|
111
|
4019 #include "gt-ipa-devirt.h"
|