comparison gcc/lto/lto-common.c @ 145:1830386684a0

gcc-9.2.0
author anatofuz
date Thu, 13 Feb 2020 11:34:05 +0900
parents
children
comparison
equal deleted inserted replaced
131:84e7813d76e9 145:1830386684a0
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2020 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "function.h"
26 #include "bitmap.h"
27 #include "basic-block.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "tree-streamer.h"
34 #include "cgraph.h"
35 #include "opts.h"
36 #include "toplev.h"
37 #include "stor-layout.h"
38 #include "symbol-summary.h"
39 #include "tree-vrp.h"
40 #include "ipa-prop.h"
41 #include "common.h"
42 #include "debug.h"
43 #include "lto.h"
44 #include "lto-section-names.h"
45 #include "splay-tree.h"
46 #include "lto-partition.h"
47 #include "context.h"
48 #include "pass_manager.h"
49 #include "ipa-fnsummary.h"
50 #include "ipa-utils.h"
51 #include "gomp-constants.h"
52 #include "lto-symtab.h"
53 #include "stringpool.h"
54 #include "fold-const.h"
55 #include "attribs.h"
56 #include "builtins.h"
57 #include "lto-common.h"
58 #include "tree-pretty-print.h"
59
60 /* True when no new types are going to be streamd from the global stream. */
61
62 static bool type_streaming_finished = false;
63
64 GTY(()) tree first_personality_decl;
65
66 GTY(()) const unsigned char *lto_mode_identity_table;
67
68 /* Returns a hash code for P. */
69
70 static hashval_t
71 hash_name (const void *p)
72 {
73 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
74 return (hashval_t) htab_hash_string (ds->name);
75 }
76
77
78 /* Returns nonzero if P1 and P2 are equal. */
79
80 static int
81 eq_name (const void *p1, const void *p2)
82 {
83 const struct lto_section_slot *s1
84 = (const struct lto_section_slot *) p1;
85 const struct lto_section_slot *s2
86 = (const struct lto_section_slot *) p2;
87
88 return strcmp (s1->name, s2->name) == 0;
89 }
90
91 /* Free lto_section_slot. */
92
93 static void
94 free_with_string (void *arg)
95 {
96 struct lto_section_slot *s = (struct lto_section_slot *)arg;
97
98 free (CONST_CAST (char *, s->name));
99 free (arg);
100 }
101
102 /* Create section hash table. */
103
104 htab_t
105 lto_obj_create_section_hash_table (void)
106 {
107 return htab_create (37, hash_name, eq_name, free_with_string);
108 }
109
110 /* Delete an allocated integer KEY in the splay tree. */
111
112 static void
113 lto_splay_tree_delete_id (splay_tree_key key)
114 {
115 free ((void *) key);
116 }
117
118 /* Compare splay tree node ids A and B. */
119
120 static int
121 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
122 {
123 unsigned HOST_WIDE_INT ai;
124 unsigned HOST_WIDE_INT bi;
125
126 ai = *(unsigned HOST_WIDE_INT *) a;
127 bi = *(unsigned HOST_WIDE_INT *) b;
128
129 if (ai < bi)
130 return -1;
131 else if (ai > bi)
132 return 1;
133 return 0;
134 }
135
136 /* Look up splay tree node by ID in splay tree T. */
137
138 static splay_tree_node
139 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
140 {
141 return splay_tree_lookup (t, (splay_tree_key) &id);
142 }
143
144 /* Check if KEY has ID. */
145
146 static bool
147 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
148 {
149 return *(unsigned HOST_WIDE_INT *) key == id;
150 }
151
152 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
153 The ID is allocated separately because we need HOST_WIDE_INTs which may
154 be wider than a splay_tree_key. */
155
156 static void
157 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
158 struct lto_file_decl_data *file_data)
159 {
160 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
161 *idp = id;
162 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
163 }
164
165 /* Create a splay tree. */
166
167 static splay_tree
168 lto_splay_tree_new (void)
169 {
170 return splay_tree_new (lto_splay_tree_compare_ids,
171 lto_splay_tree_delete_id,
172 NULL);
173 }
174
175 /* Decode the content of memory pointed to by DATA in the in decl
176 state object STATE. DATA_IN points to a data_in structure for
177 decoding. Return the address after the decoded object in the
178 input. */
179
180 static const uint32_t *
181 lto_read_in_decl_state (class data_in *data_in, const uint32_t *data,
182 struct lto_in_decl_state *state)
183 {
184 uint32_t ix;
185 tree decl;
186 uint32_t i, j;
187
188 ix = *data++;
189 state->compressed = ix & 1;
190 ix /= 2;
191 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
192 if (!VAR_OR_FUNCTION_DECL_P (decl))
193 {
194 gcc_assert (decl == void_type_node);
195 decl = NULL_TREE;
196 }
197 state->fn_decl = decl;
198
199 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
200 {
201 uint32_t size = *data++;
202 vec<tree, va_gc> *decls = NULL;
203 vec_alloc (decls, size);
204
205 for (j = 0; j < size; j++)
206 vec_safe_push (decls,
207 streamer_tree_cache_get_tree (data_in->reader_cache,
208 data[j]));
209
210 state->streams[i] = decls;
211 data += size;
212 }
213
214 return data;
215 }
216
217
218 /* Global canonical type table. */
219 static htab_t gimple_canonical_types;
220 static hash_map<const_tree, hashval_t> *canonical_type_hash_cache;
221 static unsigned long num_canonical_type_hash_entries;
222 static unsigned long num_canonical_type_hash_queries;
223
224 /* Types postponed for registration to the canonical type table.
225 During streaming we postpone all TYPE_CXX_ODR_P types so we can alter
226 decide whether there is conflict with non-ODR type or not. */
227 static GTY(()) vec<tree, va_gc> *types_to_register = NULL;
228
229 static void iterative_hash_canonical_type (tree type, inchash::hash &hstate);
230 static hashval_t gimple_canonical_type_hash (const void *p);
231 static hashval_t gimple_register_canonical_type_1 (tree t, hashval_t hash);
232
233 /* Returning a hash value for gimple type TYPE.
234
235 The hash value returned is equal for types considered compatible
236 by gimple_canonical_types_compatible_p. */
237
238 static hashval_t
239 hash_canonical_type (tree type)
240 {
241 inchash::hash hstate;
242 enum tree_code code;
243
244 /* We compute alias sets only for types that needs them.
245 Be sure we do not recurse to something else as we cannot hash incomplete
246 types in a way they would have same hash value as compatible complete
247 types. */
248 gcc_checking_assert (type_with_alias_set_p (type));
249
250 /* Combine a few common features of types so that types are grouped into
251 smaller sets; when searching for existing matching types to merge,
252 only existing types having the same features as the new type will be
253 checked. */
254 code = tree_code_for_canonical_type_merging (TREE_CODE (type));
255 hstate.add_int (code);
256 hstate.add_int (TYPE_MODE (type));
257
258 /* Incorporate common features of numerical types. */
259 if (INTEGRAL_TYPE_P (type)
260 || SCALAR_FLOAT_TYPE_P (type)
261 || FIXED_POINT_TYPE_P (type)
262 || TREE_CODE (type) == OFFSET_TYPE
263 || POINTER_TYPE_P (type))
264 {
265 hstate.add_int (TYPE_PRECISION (type));
266 if (!type_with_interoperable_signedness (type))
267 hstate.add_int (TYPE_UNSIGNED (type));
268 }
269
270 if (VECTOR_TYPE_P (type))
271 {
272 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
273 hstate.add_int (TYPE_UNSIGNED (type));
274 }
275
276 if (TREE_CODE (type) == COMPLEX_TYPE)
277 hstate.add_int (TYPE_UNSIGNED (type));
278
279 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
280 interoperable with "signed char". Unless all frontends are revisited to
281 agree on these types, we must ignore the flag completely. */
282
283 /* Fortran standard define C_PTR type that is compatible with every
284 C pointer. For this reason we need to glob all pointers into one.
285 Still pointers in different address spaces are not compatible. */
286 if (POINTER_TYPE_P (type))
287 hstate.add_int (TYPE_ADDR_SPACE (TREE_TYPE (type)));
288
289 /* For array types hash the domain bounds and the string flag. */
290 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
291 {
292 hstate.add_int (TYPE_STRING_FLAG (type));
293 /* OMP lowering can introduce error_mark_node in place of
294 random local decls in types. */
295 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
296 inchash::add_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), hstate);
297 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
298 inchash::add_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), hstate);
299 }
300
301 /* Recurse for aggregates with a single element type. */
302 if (TREE_CODE (type) == ARRAY_TYPE
303 || TREE_CODE (type) == COMPLEX_TYPE
304 || TREE_CODE (type) == VECTOR_TYPE)
305 iterative_hash_canonical_type (TREE_TYPE (type), hstate);
306
307 /* Incorporate function return and argument types. */
308 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
309 {
310 unsigned na;
311 tree p;
312
313 iterative_hash_canonical_type (TREE_TYPE (type), hstate);
314
315 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
316 {
317 iterative_hash_canonical_type (TREE_VALUE (p), hstate);
318 na++;
319 }
320
321 hstate.add_int (na);
322 }
323
324 if (RECORD_OR_UNION_TYPE_P (type))
325 {
326 unsigned nf;
327 tree f;
328
329 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
330 if (TREE_CODE (f) == FIELD_DECL
331 && (! DECL_SIZE (f)
332 || ! integer_zerop (DECL_SIZE (f))))
333 {
334 iterative_hash_canonical_type (TREE_TYPE (f), hstate);
335 nf++;
336 }
337
338 hstate.add_int (nf);
339 }
340
341 return hstate.end();
342 }
343
344 /* Returning a hash value for gimple type TYPE combined with VAL. */
345
346 static void
347 iterative_hash_canonical_type (tree type, inchash::hash &hstate)
348 {
349 hashval_t v;
350
351 /* All type variants have same TYPE_CANONICAL. */
352 type = TYPE_MAIN_VARIANT (type);
353
354 if (!canonical_type_used_p (type))
355 v = hash_canonical_type (type);
356 /* An already processed type. */
357 else if (TYPE_CANONICAL (type))
358 {
359 type = TYPE_CANONICAL (type);
360 v = gimple_canonical_type_hash (type);
361 }
362 else
363 {
364 /* Canonical types should not be able to form SCCs by design, this
365 recursion is just because we do not register canonical types in
366 optimal order. To avoid quadratic behavior also register the
367 type here. */
368 v = hash_canonical_type (type);
369 v = gimple_register_canonical_type_1 (type, v);
370 }
371 hstate.merge_hash (v);
372 }
373
374 /* Returns the hash for a canonical type P. */
375
376 static hashval_t
377 gimple_canonical_type_hash (const void *p)
378 {
379 num_canonical_type_hash_queries++;
380 hashval_t *slot = canonical_type_hash_cache->get ((const_tree) p);
381 gcc_assert (slot != NULL);
382 return *slot;
383 }
384
385
386
387 /* Returns nonzero if P1 and P2 are equal. */
388
389 static int
390 gimple_canonical_type_eq (const void *p1, const void *p2)
391 {
392 const_tree t1 = (const_tree) p1;
393 const_tree t2 = (const_tree) p2;
394 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
395 CONST_CAST_TREE (t2));
396 }
397
398 /* Main worker for gimple_register_canonical_type. */
399
400 static hashval_t
401 gimple_register_canonical_type_1 (tree t, hashval_t hash)
402 {
403 void **slot;
404
405 gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t)
406 && type_with_alias_set_p (t)
407 && canonical_type_used_p (t));
408
409 /* ODR types for which there is no ODR violation and we did not record
410 structurally equivalent non-ODR type can be treated as unique by their
411 name.
412
413 hash passed to gimple_register_canonical_type_1 is a structural hash
414 that we can use to lookup structurally equivalent non-ODR type.
415 In case we decide to treat type as unique ODR type we recompute hash based
416 on name and let TBAA machinery know about our decision. */
417 if (RECORD_OR_UNION_TYPE_P (t)
418 && odr_type_p (t) && !odr_type_violation_reported_p (t))
419 {
420 /* Anonymous namespace types never conflict with non-C++ types. */
421 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
422 slot = NULL;
423 else
424 {
425 /* Here we rely on fact that all non-ODR types was inserted into
426 canonical type hash and thus we can safely detect conflicts between
427 ODR types and interoperable non-ODR types. */
428 gcc_checking_assert (type_streaming_finished
429 && TYPE_MAIN_VARIANT (t) == t);
430 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash,
431 NO_INSERT);
432 }
433 if (slot && !TYPE_CXX_ODR_P (*(tree *)slot))
434 {
435 tree nonodr = *(tree *)slot;
436 if (symtab->dump_file)
437 {
438 fprintf (symtab->dump_file,
439 "ODR and non-ODR type conflict: ");
440 print_generic_expr (symtab->dump_file, t);
441 fprintf (symtab->dump_file, " and ");
442 print_generic_expr (symtab->dump_file, nonodr);
443 fprintf (symtab->dump_file, " mangled:%s\n",
444 IDENTIFIER_POINTER
445 (DECL_ASSEMBLER_NAME (TYPE_NAME (t))));
446 }
447 /* Set canonical for T and all other ODR equivalent duplicates
448 including incomplete structures. */
449 set_type_canonical_for_odr_type (t, nonodr);
450 }
451 else
452 {
453 tree prevail = prevailing_odr_type (t);
454
455 if (symtab->dump_file)
456 {
457 fprintf (symtab->dump_file,
458 "New canonical ODR type: ");
459 print_generic_expr (symtab->dump_file, t);
460 fprintf (symtab->dump_file, " mangled:%s\n",
461 IDENTIFIER_POINTER
462 (DECL_ASSEMBLER_NAME (TYPE_NAME (t))));
463 }
464 /* Set canonical for T and all other ODR equivalent duplicates
465 including incomplete structures. */
466 set_type_canonical_for_odr_type (t, prevail);
467 enable_odr_based_tbaa (t);
468 if (!type_in_anonymous_namespace_p (t))
469 hash = htab_hash_string (IDENTIFIER_POINTER
470 (DECL_ASSEMBLER_NAME
471 (TYPE_NAME (t))));
472 else
473 hash = TYPE_UID (t);
474
475 /* All variants of t now have TYPE_CANONICAL set to prevail.
476 Update canonical type hash cache accordingly. */
477 num_canonical_type_hash_entries++;
478 bool existed_p = canonical_type_hash_cache->put (prevail, hash);
479 gcc_checking_assert (!existed_p);
480 }
481 return hash;
482 }
483
484 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT);
485 if (*slot)
486 {
487 tree new_type = (tree)(*slot);
488 gcc_checking_assert (new_type != t);
489 TYPE_CANONICAL (t) = new_type;
490 }
491 else
492 {
493 TYPE_CANONICAL (t) = t;
494 *slot = (void *) t;
495 /* Cache the just computed hash value. */
496 num_canonical_type_hash_entries++;
497 bool existed_p = canonical_type_hash_cache->put (t, hash);
498 gcc_assert (!existed_p);
499 }
500 return hash;
501 }
502
503 /* Register type T in the global type table gimple_types and set
504 TYPE_CANONICAL of T accordingly.
505 This is used by LTO to merge structurally equivalent types for
506 type-based aliasing purposes across different TUs and languages.
507
508 ??? This merging does not exactly match how the tree.c middle-end
509 functions will assign TYPE_CANONICAL when new types are created
510 during optimization (which at least happens for pointer and array
511 types). */
512
513 static void
514 gimple_register_canonical_type (tree t)
515 {
516 if (TYPE_CANONICAL (t) || !type_with_alias_set_p (t)
517 || !canonical_type_used_p (t))
518 return;
519
520 /* Canonical types are same among all complete variants. */
521 if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (t)))
522 TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_MAIN_VARIANT (t));
523 else
524 {
525 hashval_t h = hash_canonical_type (TYPE_MAIN_VARIANT (t));
526 gimple_register_canonical_type_1 (TYPE_MAIN_VARIANT (t), h);
527 TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_MAIN_VARIANT (t));
528 }
529 }
530
531 /* Re-compute TYPE_CANONICAL for NODE and related types. */
532
533 static void
534 lto_register_canonical_types (tree node, bool first_p)
535 {
536 if (!node
537 || !TYPE_P (node))
538 return;
539
540 if (first_p)
541 TYPE_CANONICAL (node) = NULL_TREE;
542
543 if (POINTER_TYPE_P (node)
544 || TREE_CODE (node) == COMPLEX_TYPE
545 || TREE_CODE (node) == ARRAY_TYPE)
546 lto_register_canonical_types (TREE_TYPE (node), first_p);
547
548 if (!first_p)
549 gimple_register_canonical_type (node);
550 }
551
552 /* Finish canonical type calculation: after all units has been streamed in we
553 can check if given ODR type structurally conflicts with a non-ODR type. In
554 the first case we set type canonical according to the canonical type hash.
555 In the second case we use type names. */
556
557 static void
558 lto_register_canonical_types_for_odr_types ()
559 {
560 tree t;
561 unsigned int i;
562
563 if (!types_to_register)
564 return;
565
566 type_streaming_finished = true;
567
568 /* Be sure that no types derived from ODR types was
569 not inserted into the hash table. */
570 if (flag_checking)
571 FOR_EACH_VEC_ELT (*types_to_register, i, t)
572 gcc_assert (!TYPE_CANONICAL (t));
573
574 /* Register all remaining types. */
575 FOR_EACH_VEC_ELT (*types_to_register, i, t)
576 {
577 /* For pre-streamed types like va-arg it is possible that main variant
578 is !CXX_ODR_P while the variant (which is streamed) is.
579 Copy CXX_ODR_P to make type verifier happy. This is safe because
580 in canonical type calculation we only consider main variants.
581 However we can not change this flag before streaming is finished
582 to not affect tree merging. */
583 TYPE_CXX_ODR_P (t) = TYPE_CXX_ODR_P (TYPE_MAIN_VARIANT (t));
584 if (!TYPE_CANONICAL (t))
585 gimple_register_canonical_type (t);
586 }
587 }
588
589
590 /* Remember trees that contains references to declarations. */
591 vec <tree, va_gc> *tree_with_vars;
592
593 #define CHECK_VAR(tt) \
594 do \
595 { \
596 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
597 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
598 return true; \
599 } while (0)
600
601 #define CHECK_NO_VAR(tt) \
602 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
603
604 /* Check presence of pointers to decls in fields of a tree_typed T. */
605
606 static inline bool
607 mentions_vars_p_typed (tree t)
608 {
609 CHECK_NO_VAR (TREE_TYPE (t));
610 return false;
611 }
612
613 /* Check presence of pointers to decls in fields of a tree_common T. */
614
615 static inline bool
616 mentions_vars_p_common (tree t)
617 {
618 if (mentions_vars_p_typed (t))
619 return true;
620 CHECK_NO_VAR (TREE_CHAIN (t));
621 return false;
622 }
623
624 /* Check presence of pointers to decls in fields of a decl_minimal T. */
625
626 static inline bool
627 mentions_vars_p_decl_minimal (tree t)
628 {
629 if (mentions_vars_p_common (t))
630 return true;
631 CHECK_NO_VAR (DECL_NAME (t));
632 CHECK_VAR (DECL_CONTEXT (t));
633 return false;
634 }
635
636 /* Check presence of pointers to decls in fields of a decl_common T. */
637
638 static inline bool
639 mentions_vars_p_decl_common (tree t)
640 {
641 if (mentions_vars_p_decl_minimal (t))
642 return true;
643 CHECK_VAR (DECL_SIZE (t));
644 CHECK_VAR (DECL_SIZE_UNIT (t));
645 CHECK_VAR (DECL_INITIAL (t));
646 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
647 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
648 return false;
649 }
650
651 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
652
653 static inline bool
654 mentions_vars_p_decl_with_vis (tree t)
655 {
656 if (mentions_vars_p_decl_common (t))
657 return true;
658
659 /* Accessor macro has side-effects, use field-name here. */
660 CHECK_NO_VAR (DECL_ASSEMBLER_NAME_RAW (t));
661 return false;
662 }
663
664 /* Check presence of pointers to decls in fields of a decl_non_common T. */
665
666 static inline bool
667 mentions_vars_p_decl_non_common (tree t)
668 {
669 if (mentions_vars_p_decl_with_vis (t))
670 return true;
671 CHECK_NO_VAR (DECL_RESULT_FLD (t));
672 return false;
673 }
674
675 /* Check presence of pointers to decls in fields of a decl_non_common T. */
676
677 static bool
678 mentions_vars_p_function (tree t)
679 {
680 if (mentions_vars_p_decl_non_common (t))
681 return true;
682 CHECK_NO_VAR (DECL_ARGUMENTS (t));
683 CHECK_NO_VAR (DECL_VINDEX (t));
684 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
685 return false;
686 }
687
688 /* Check presence of pointers to decls in fields of a field_decl T. */
689
690 static bool
691 mentions_vars_p_field_decl (tree t)
692 {
693 if (mentions_vars_p_decl_common (t))
694 return true;
695 CHECK_VAR (DECL_FIELD_OFFSET (t));
696 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
697 CHECK_NO_VAR (DECL_QUALIFIER (t));
698 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
699 CHECK_NO_VAR (DECL_FCONTEXT (t));
700 return false;
701 }
702
703 /* Check presence of pointers to decls in fields of a type T. */
704
705 static bool
706 mentions_vars_p_type (tree t)
707 {
708 if (mentions_vars_p_common (t))
709 return true;
710 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
711 CHECK_VAR (TYPE_SIZE (t));
712 CHECK_VAR (TYPE_SIZE_UNIT (t));
713 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
714 CHECK_NO_VAR (TYPE_NAME (t));
715
716 CHECK_VAR (TYPE_MIN_VALUE_RAW (t));
717 CHECK_VAR (TYPE_MAX_VALUE_RAW (t));
718
719 /* Accessor is for derived node types only. */
720 CHECK_NO_VAR (TYPE_LANG_SLOT_1 (t));
721
722 CHECK_VAR (TYPE_CONTEXT (t));
723 CHECK_NO_VAR (TYPE_CANONICAL (t));
724 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
725 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
726 return false;
727 }
728
729 /* Check presence of pointers to decls in fields of a BINFO T. */
730
731 static bool
732 mentions_vars_p_binfo (tree t)
733 {
734 unsigned HOST_WIDE_INT i, n;
735
736 if (mentions_vars_p_common (t))
737 return true;
738 CHECK_VAR (BINFO_VTABLE (t));
739 CHECK_NO_VAR (BINFO_OFFSET (t));
740 CHECK_NO_VAR (BINFO_VIRTUALS (t));
741 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
742 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
743 for (i = 0; i < n; i++)
744 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
745 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
746 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
747 n = BINFO_N_BASE_BINFOS (t);
748 for (i = 0; i < n; i++)
749 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
750 return false;
751 }
752
753 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
754
755 static bool
756 mentions_vars_p_constructor (tree t)
757 {
758 unsigned HOST_WIDE_INT idx;
759 constructor_elt *ce;
760
761 if (mentions_vars_p_typed (t))
762 return true;
763
764 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
765 {
766 CHECK_NO_VAR (ce->index);
767 CHECK_VAR (ce->value);
768 }
769 return false;
770 }
771
772 /* Check presence of pointers to decls in fields of an expression tree T. */
773
774 static bool
775 mentions_vars_p_expr (tree t)
776 {
777 int i;
778 if (mentions_vars_p_typed (t))
779 return true;
780 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
781 CHECK_VAR (TREE_OPERAND (t, i));
782 return false;
783 }
784
785 /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */
786
787 static bool
788 mentions_vars_p_omp_clause (tree t)
789 {
790 int i;
791 if (mentions_vars_p_common (t))
792 return true;
793 for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i)
794 CHECK_VAR (OMP_CLAUSE_OPERAND (t, i));
795 return false;
796 }
797
798 /* Check presence of pointers to decls that needs later fixup in T. */
799
800 static bool
801 mentions_vars_p (tree t)
802 {
803 switch (TREE_CODE (t))
804 {
805 case IDENTIFIER_NODE:
806 break;
807
808 case TREE_LIST:
809 CHECK_VAR (TREE_VALUE (t));
810 CHECK_VAR (TREE_PURPOSE (t));
811 CHECK_NO_VAR (TREE_CHAIN (t));
812 break;
813
814 case FIELD_DECL:
815 return mentions_vars_p_field_decl (t);
816
817 case LABEL_DECL:
818 case CONST_DECL:
819 case PARM_DECL:
820 case RESULT_DECL:
821 case IMPORTED_DECL:
822 case NAMESPACE_DECL:
823 case NAMELIST_DECL:
824 return mentions_vars_p_decl_common (t);
825
826 case VAR_DECL:
827 return mentions_vars_p_decl_with_vis (t);
828
829 case TYPE_DECL:
830 return mentions_vars_p_decl_non_common (t);
831
832 case FUNCTION_DECL:
833 return mentions_vars_p_function (t);
834
835 case TREE_BINFO:
836 return mentions_vars_p_binfo (t);
837
838 case PLACEHOLDER_EXPR:
839 return mentions_vars_p_common (t);
840
841 case BLOCK:
842 case TRANSLATION_UNIT_DECL:
843 case OPTIMIZATION_NODE:
844 case TARGET_OPTION_NODE:
845 break;
846
847 case CONSTRUCTOR:
848 return mentions_vars_p_constructor (t);
849
850 case OMP_CLAUSE:
851 return mentions_vars_p_omp_clause (t);
852
853 default:
854 if (TYPE_P (t))
855 {
856 if (mentions_vars_p_type (t))
857 return true;
858 }
859 else if (EXPR_P (t))
860 {
861 if (mentions_vars_p_expr (t))
862 return true;
863 }
864 else if (CONSTANT_CLASS_P (t))
865 CHECK_NO_VAR (TREE_TYPE (t));
866 else
867 gcc_unreachable ();
868 }
869 return false;
870 }
871
872
873 /* Return the resolution for the decl with index INDEX from DATA_IN. */
874
875 static enum ld_plugin_symbol_resolution
876 get_resolution (class data_in *data_in, unsigned index)
877 {
878 if (data_in->globals_resolution.exists ())
879 {
880 ld_plugin_symbol_resolution_t ret;
881 /* We can have references to not emitted functions in
882 DECL_FUNCTION_PERSONALITY at least. So we can and have
883 to indeed return LDPR_UNKNOWN in some cases. */
884 if (data_in->globals_resolution.length () <= index)
885 return LDPR_UNKNOWN;
886 ret = data_in->globals_resolution[index];
887 return ret;
888 }
889 else
890 /* Delay resolution finding until decl merging. */
891 return LDPR_UNKNOWN;
892 }
893
894 /* We need to record resolutions until symbol table is read. */
895 static void
896 register_resolution (struct lto_file_decl_data *file_data, tree decl,
897 enum ld_plugin_symbol_resolution resolution)
898 {
899 bool existed;
900 if (resolution == LDPR_UNKNOWN)
901 return;
902 if (!file_data->resolution_map)
903 file_data->resolution_map
904 = new hash_map<tree, ld_plugin_symbol_resolution>;
905 ld_plugin_symbol_resolution_t &res
906 = file_data->resolution_map->get_or_insert (decl, &existed);
907 if (!existed
908 || resolution == LDPR_PREVAILING_DEF_IRONLY
909 || resolution == LDPR_PREVAILING_DEF
910 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
911 res = resolution;
912 }
913
914 /* Register DECL with the global symbol table and change its
915 name if necessary to avoid name clashes for static globals across
916 different files. */
917
918 static void
919 lto_register_var_decl_in_symtab (class data_in *data_in, tree decl,
920 unsigned ix)
921 {
922 tree context;
923
924 /* Variable has file scope, not local. */
925 if (!TREE_PUBLIC (decl)
926 && !((context = decl_function_context (decl))
927 && auto_var_in_fn_p (decl, context)))
928 rest_of_decl_compilation (decl, 1, 0);
929
930 /* If this variable has already been declared, queue the
931 declaration for merging. */
932 if (TREE_PUBLIC (decl))
933 register_resolution (data_in->file_data,
934 decl, get_resolution (data_in, ix));
935 }
936
937
938 /* Register DECL with the global symbol table and change its
939 name if necessary to avoid name clashes for static globals across
940 different files. DATA_IN contains descriptors and tables for the
941 file being read. */
942
943 static void
944 lto_register_function_decl_in_symtab (class data_in *data_in, tree decl,
945 unsigned ix)
946 {
947 /* If this variable has already been declared, queue the
948 declaration for merging. */
949 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT_P (decl))
950 register_resolution (data_in->file_data,
951 decl, get_resolution (data_in, ix));
952 }
953
954 /* Check if T is a decl and needs register its resolution info. */
955
956 static void
957 lto_maybe_register_decl (class data_in *data_in, tree t, unsigned ix)
958 {
959 if (TREE_CODE (t) == VAR_DECL)
960 lto_register_var_decl_in_symtab (data_in, t, ix);
961 else if (TREE_CODE (t) == FUNCTION_DECL
962 && !fndecl_built_in_p (t))
963 lto_register_function_decl_in_symtab (data_in, t, ix);
964 }
965
966
967 /* For the type T re-materialize it in the type variant list and
968 the pointer/reference-to chains. */
969
970 static void
971 lto_fixup_prevailing_type (tree t)
972 {
973 /* The following re-creates proper variant lists while fixing up
974 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
975 variant list state before fixup is broken. */
976
977 /* If we are not our own variant leader link us into our new leaders
978 variant list. */
979 if (TYPE_MAIN_VARIANT (t) != t)
980 {
981 tree mv = TYPE_MAIN_VARIANT (t);
982 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
983 TYPE_NEXT_VARIANT (mv) = t;
984 }
985
986 /* The following reconstructs the pointer chains
987 of the new pointed-to type if we are a main variant. We do
988 not stream those so they are broken before fixup. */
989 if (TREE_CODE (t) == POINTER_TYPE
990 && TYPE_MAIN_VARIANT (t) == t)
991 {
992 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
993 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
994 }
995 else if (TREE_CODE (t) == REFERENCE_TYPE
996 && TYPE_MAIN_VARIANT (t) == t)
997 {
998 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
999 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1000 }
1001 }
1002
1003
1004 /* We keep prevailing tree SCCs in a hashtable with manual collision
1005 handling (in case all hashes compare the same) and keep the colliding
1006 entries in the tree_scc->next chain. */
1007
1008 struct tree_scc
1009 {
1010 tree_scc *next;
1011 /* Hash of the whole SCC. */
1012 hashval_t hash;
1013 /* Number of trees in the SCC. */
1014 unsigned len;
1015 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1016 which share the same individual tree hash). */
1017 unsigned entry_len;
1018 /* The members of the SCC.
1019 We only need to remember the first entry node candidate for prevailing
1020 SCCs (but of course have access to all entries for SCCs we are
1021 processing).
1022 ??? For prevailing SCCs we really only need hash and the first
1023 entry candidate, but that's too awkward to implement. */
1024 tree entries[1];
1025 };
1026
1027 struct tree_scc_hasher : nofree_ptr_hash <tree_scc>
1028 {
1029 static inline hashval_t hash (const tree_scc *);
1030 static inline bool equal (const tree_scc *, const tree_scc *);
1031 };
1032
1033 hashval_t
1034 tree_scc_hasher::hash (const tree_scc *scc)
1035 {
1036 return scc->hash;
1037 }
1038
1039 bool
1040 tree_scc_hasher::equal (const tree_scc *scc1, const tree_scc *scc2)
1041 {
1042 if (scc1->hash != scc2->hash
1043 || scc1->len != scc2->len
1044 || scc1->entry_len != scc2->entry_len)
1045 return false;
1046 return true;
1047 }
1048
1049 static hash_table<tree_scc_hasher> *tree_scc_hash;
1050 static struct obstack tree_scc_hash_obstack;
1051
1052 static unsigned long num_merged_types;
1053 static unsigned long num_prevailing_types;
1054 static unsigned long num_type_scc_trees;
1055 static unsigned long total_scc_size;
1056 static unsigned long num_sccs_read;
1057 static unsigned long total_scc_size_merged;
1058 static unsigned long num_sccs_merged;
1059 static unsigned long num_scc_compares;
1060 static unsigned long num_scc_compare_collisions;
1061
1062
1063 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1064 recursing through in-SCC tree edges. Returns true if the SCCs entered
1065 through T1 and T2 are equal and fills in *MAP with the pairs of
1066 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1067
1068 static bool
1069 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1070 {
1071 enum tree_code code;
1072
1073 /* Mark already visited nodes. */
1074 TREE_ASM_WRITTEN (t2) = 1;
1075
1076 /* Push the pair onto map. */
1077 (*map)[0] = t1;
1078 (*map)[1] = t2;
1079 *map = *map + 2;
1080
1081 /* Compare value-fields. */
1082 #define compare_values(X) \
1083 do { \
1084 if (X(t1) != X(t2)) \
1085 return false; \
1086 } while (0)
1087
1088 compare_values (TREE_CODE);
1089 code = TREE_CODE (t1);
1090
1091 if (!TYPE_P (t1))
1092 {
1093 compare_values (TREE_SIDE_EFFECTS);
1094 compare_values (TREE_CONSTANT);
1095 compare_values (TREE_READONLY);
1096 compare_values (TREE_PUBLIC);
1097 }
1098 compare_values (TREE_ADDRESSABLE);
1099 compare_values (TREE_THIS_VOLATILE);
1100 if (DECL_P (t1))
1101 compare_values (DECL_UNSIGNED);
1102 else if (TYPE_P (t1))
1103 compare_values (TYPE_UNSIGNED);
1104 if (TYPE_P (t1))
1105 compare_values (TYPE_ARTIFICIAL);
1106 else
1107 compare_values (TREE_NO_WARNING);
1108 compare_values (TREE_NOTHROW);
1109 compare_values (TREE_STATIC);
1110 if (code != TREE_BINFO)
1111 compare_values (TREE_PRIVATE);
1112 compare_values (TREE_PROTECTED);
1113 compare_values (TREE_DEPRECATED);
1114 if (TYPE_P (t1))
1115 {
1116 if (AGGREGATE_TYPE_P (t1))
1117 compare_values (TYPE_REVERSE_STORAGE_ORDER);
1118 else
1119 compare_values (TYPE_SATURATING);
1120 compare_values (TYPE_ADDR_SPACE);
1121 }
1122 else if (code == SSA_NAME)
1123 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1124
1125 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1126 {
1127 if (wi::to_wide (t1) != wi::to_wide (t2))
1128 return false;
1129 }
1130
1131 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1132 {
1133 /* ??? No suitable compare routine available. */
1134 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1135 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1136 if (r1.cl != r2.cl
1137 || r1.decimal != r2.decimal
1138 || r1.sign != r2.sign
1139 || r1.signalling != r2.signalling
1140 || r1.canonical != r2.canonical
1141 || r1.uexp != r2.uexp)
1142 return false;
1143 for (unsigned i = 0; i < SIGSZ; ++i)
1144 if (r1.sig[i] != r2.sig[i])
1145 return false;
1146 }
1147
1148 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1149 if (!fixed_compare (EQ_EXPR,
1150 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1151 return false;
1152
1153 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1154 {
1155 compare_values (VECTOR_CST_LOG2_NPATTERNS);
1156 compare_values (VECTOR_CST_NELTS_PER_PATTERN);
1157 }
1158
1159 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1160 {
1161 compare_values (DECL_MODE);
1162 compare_values (DECL_NONLOCAL);
1163 compare_values (DECL_VIRTUAL_P);
1164 compare_values (DECL_IGNORED_P);
1165 compare_values (DECL_ABSTRACT_P);
1166 compare_values (DECL_ARTIFICIAL);
1167 compare_values (DECL_USER_ALIGN);
1168 compare_values (DECL_PRESERVE_P);
1169 compare_values (DECL_EXTERNAL);
1170 compare_values (DECL_GIMPLE_REG_P);
1171 compare_values (DECL_ALIGN);
1172 if (code == LABEL_DECL)
1173 {
1174 compare_values (EH_LANDING_PAD_NR);
1175 compare_values (LABEL_DECL_UID);
1176 }
1177 else if (code == FIELD_DECL)
1178 {
1179 compare_values (DECL_PACKED);
1180 compare_values (DECL_NONADDRESSABLE_P);
1181 compare_values (DECL_PADDING_P);
1182 compare_values (DECL_OFFSET_ALIGN);
1183 }
1184 else if (code == VAR_DECL)
1185 {
1186 compare_values (DECL_HAS_DEBUG_EXPR_P);
1187 compare_values (DECL_NONLOCAL_FRAME);
1188 }
1189 if (code == RESULT_DECL
1190 || code == PARM_DECL
1191 || code == VAR_DECL)
1192 {
1193 compare_values (DECL_BY_REFERENCE);
1194 if (code == VAR_DECL
1195 || code == PARM_DECL)
1196 compare_values (DECL_HAS_VALUE_EXPR_P);
1197 }
1198 }
1199
1200 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1201 compare_values (DECL_REGISTER);
1202
1203 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1204 {
1205 compare_values (DECL_COMMON);
1206 compare_values (DECL_DLLIMPORT_P);
1207 compare_values (DECL_WEAK);
1208 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1209 compare_values (DECL_COMDAT);
1210 compare_values (DECL_VISIBILITY);
1211 compare_values (DECL_VISIBILITY_SPECIFIED);
1212 if (code == VAR_DECL)
1213 {
1214 compare_values (DECL_HARD_REGISTER);
1215 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1216 compare_values (DECL_IN_CONSTANT_POOL);
1217 }
1218 }
1219
1220 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1221 {
1222 compare_values (DECL_BUILT_IN_CLASS);
1223 compare_values (DECL_STATIC_CONSTRUCTOR);
1224 compare_values (DECL_STATIC_DESTRUCTOR);
1225 compare_values (DECL_UNINLINABLE);
1226 compare_values (DECL_POSSIBLY_INLINED);
1227 compare_values (DECL_IS_NOVOPS);
1228 compare_values (DECL_IS_RETURNS_TWICE);
1229 compare_values (DECL_IS_MALLOC);
1230 compare_values (DECL_IS_OPERATOR_NEW_P);
1231 compare_values (DECL_DECLARED_INLINE_P);
1232 compare_values (DECL_STATIC_CHAIN);
1233 compare_values (DECL_NO_INLINE_WARNING_P);
1234 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1235 compare_values (DECL_NO_LIMIT_STACK);
1236 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1237 compare_values (DECL_PURE_P);
1238 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1239 compare_values (DECL_FINAL_P);
1240 compare_values (DECL_CXX_CONSTRUCTOR_P);
1241 compare_values (DECL_CXX_DESTRUCTOR_P);
1242 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1243 compare_values (DECL_UNCHECKED_FUNCTION_CODE);
1244 }
1245
1246 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1247 {
1248 compare_values (TYPE_MODE);
1249 compare_values (TYPE_NEEDS_CONSTRUCTING);
1250 if (RECORD_OR_UNION_TYPE_P (t1))
1251 {
1252 compare_values (TYPE_TRANSPARENT_AGGR);
1253 compare_values (TYPE_FINAL_P);
1254 compare_values (TYPE_CXX_ODR_P);
1255 }
1256 else if (code == ARRAY_TYPE)
1257 compare_values (TYPE_NONALIASED_COMPONENT);
1258 if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1259 compare_values (TYPE_STRING_FLAG);
1260 if (AGGREGATE_TYPE_P (t1))
1261 compare_values (TYPE_TYPELESS_STORAGE);
1262 compare_values (TYPE_EMPTY_P);
1263 compare_values (TYPE_PACKED);
1264 compare_values (TYPE_RESTRICT);
1265 compare_values (TYPE_USER_ALIGN);
1266 compare_values (TYPE_READONLY);
1267 compare_values (TYPE_PRECISION);
1268 compare_values (TYPE_ALIGN);
1269 /* Do not compare TYPE_ALIAS_SET. Doing so introduce ordering issues
1270 with calls to get_alias_set which may initialize it for streamed
1271 in types. */
1272 }
1273
1274 /* We don't want to compare locations, so there is nothing do compare
1275 for TS_EXP. */
1276
1277 /* BLOCKs are function local and we don't merge anything there, so
1278 simply refuse to merge. */
1279 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1280 return false;
1281
1282 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1283 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1284 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1285 return false;
1286
1287 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1288 if (!cl_target_option_eq (TREE_TARGET_OPTION (t1), TREE_TARGET_OPTION (t2)))
1289 return false;
1290
1291 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1292 if (!cl_optimization_option_eq (TREE_OPTIMIZATION (t1),
1293 TREE_OPTIMIZATION (t2)))
1294 return false;
1295
1296 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1297 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1298 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1299 return false;
1300
1301 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1302 compare_values (CONSTRUCTOR_NELTS);
1303
1304 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1305 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1306 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1307 IDENTIFIER_LENGTH (t1)) != 0)
1308 return false;
1309
1310 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1311 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1312 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1313 TREE_STRING_LENGTH (t1)) != 0)
1314 return false;
1315
1316 if (code == OMP_CLAUSE)
1317 {
1318 compare_values (OMP_CLAUSE_CODE);
1319 switch (OMP_CLAUSE_CODE (t1))
1320 {
1321 case OMP_CLAUSE_DEFAULT:
1322 compare_values (OMP_CLAUSE_DEFAULT_KIND);
1323 break;
1324 case OMP_CLAUSE_SCHEDULE:
1325 compare_values (OMP_CLAUSE_SCHEDULE_KIND);
1326 break;
1327 case OMP_CLAUSE_DEPEND:
1328 compare_values (OMP_CLAUSE_DEPEND_KIND);
1329 break;
1330 case OMP_CLAUSE_MAP:
1331 compare_values (OMP_CLAUSE_MAP_KIND);
1332 break;
1333 case OMP_CLAUSE_PROC_BIND:
1334 compare_values (OMP_CLAUSE_PROC_BIND_KIND);
1335 break;
1336 case OMP_CLAUSE_REDUCTION:
1337 compare_values (OMP_CLAUSE_REDUCTION_CODE);
1338 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT);
1339 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE);
1340 break;
1341 default:
1342 break;
1343 }
1344 }
1345
1346 #undef compare_values
1347
1348
1349 /* Compare pointer fields. */
1350
1351 /* Recurse. Search & Replaced from DFS_write_tree_body.
1352 Folding the early checks into the compare_tree_edges recursion
1353 macro makes debugging way quicker as you are able to break on
1354 compare_tree_sccs_1 and simply finish until a call returns false
1355 to spot the SCC members with the difference. */
1356 #define compare_tree_edges(E1, E2) \
1357 do { \
1358 tree t1_ = (E1), t2_ = (E2); \
1359 if (t1_ != t2_ \
1360 && (!t1_ || !t2_ \
1361 || !TREE_VISITED (t2_) \
1362 || (!TREE_ASM_WRITTEN (t2_) \
1363 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
1364 return false; \
1365 /* Only non-NULL trees outside of the SCC may compare equal. */ \
1366 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
1367 } while (0)
1368
1369 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1370 {
1371 if (code != IDENTIFIER_NODE)
1372 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
1373 }
1374
1375 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1376 {
1377 /* Note that the number of elements for EXPR has already been emitted
1378 in EXPR's header (see streamer_write_tree_header). */
1379 unsigned int count = vector_cst_encoded_nelts (t1);
1380 for (unsigned int i = 0; i < count; ++i)
1381 compare_tree_edges (VECTOR_CST_ENCODED_ELT (t1, i),
1382 VECTOR_CST_ENCODED_ELT (t2, i));
1383 }
1384
1385 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1386 {
1387 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
1388 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
1389 }
1390
1391 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1392 {
1393 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
1394 /* ??? Global decls from different TUs have non-matching
1395 TRANSLATION_UNIT_DECLs. Only consider a small set of
1396 decls equivalent, we should not end up merging others. */
1397 if ((code == TYPE_DECL
1398 || code == NAMESPACE_DECL
1399 || code == IMPORTED_DECL
1400 || code == CONST_DECL
1401 || (VAR_OR_FUNCTION_DECL_P (t1)
1402 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
1403 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
1404 ;
1405 else
1406 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
1407 }
1408
1409 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1410 {
1411 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
1412 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
1413 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
1414 compare_tree_edges (DECL_ABSTRACT_ORIGIN (t1), DECL_ABSTRACT_ORIGIN (t2));
1415 if ((code == VAR_DECL
1416 || code == PARM_DECL)
1417 && DECL_HAS_VALUE_EXPR_P (t1))
1418 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
1419 if (code == VAR_DECL
1420 && DECL_HAS_DEBUG_EXPR_P (t1))
1421 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
1422 /* LTO specific edges. */
1423 if (code != FUNCTION_DECL
1424 && code != TRANSLATION_UNIT_DECL)
1425 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
1426 }
1427
1428 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1429 {
1430 if (code == FUNCTION_DECL)
1431 {
1432 tree a1, a2;
1433 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
1434 a1 || a2;
1435 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
1436 compare_tree_edges (a1, a2);
1437 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
1438 }
1439 else if (code == TYPE_DECL)
1440 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
1441 }
1442
1443 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1444 {
1445 /* Make sure we don't inadvertently set the assembler name. */
1446 if (DECL_ASSEMBLER_NAME_SET_P (t1))
1447 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
1448 DECL_ASSEMBLER_NAME (t2));
1449 }
1450
1451 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1452 {
1453 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
1454 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
1455 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
1456 DECL_BIT_FIELD_REPRESENTATIVE (t2));
1457 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
1458 DECL_FIELD_BIT_OFFSET (t2));
1459 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
1460 }
1461
1462 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1463 {
1464 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
1465 DECL_FUNCTION_PERSONALITY (t2));
1466 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
1467 compare_tree_edges (DECL_FUNCTION_SPECIFIC_TARGET (t1),
1468 DECL_FUNCTION_SPECIFIC_TARGET (t2));
1469 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
1470 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
1471 }
1472
1473 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1474 {
1475 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
1476 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
1477 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
1478 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
1479 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1480 reconstructed during fixup. */
1481 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
1482 during fixup. */
1483 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
1484 /* ??? Global types from different TUs have non-matching
1485 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
1486 equal. */
1487 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
1488 ;
1489 else
1490 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
1491 /* TYPE_CANONICAL is re-computed during type merging, so do not
1492 compare it here. */
1493 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
1494 }
1495
1496 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1497 {
1498 if (code == ENUMERAL_TYPE)
1499 compare_tree_edges (TYPE_VALUES (t1), TYPE_VALUES (t2));
1500 else if (code == ARRAY_TYPE)
1501 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
1502 else if (RECORD_OR_UNION_TYPE_P (t1))
1503 {
1504 tree f1, f2;
1505 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1506 f1 || f2;
1507 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1508 compare_tree_edges (f1, f2);
1509 }
1510 else if (code == FUNCTION_TYPE
1511 || code == METHOD_TYPE)
1512 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
1513
1514 if (!POINTER_TYPE_P (t1))
1515 compare_tree_edges (TYPE_MIN_VALUE_RAW (t1), TYPE_MIN_VALUE_RAW (t2));
1516 compare_tree_edges (TYPE_MAX_VALUE_RAW (t1), TYPE_MAX_VALUE_RAW (t2));
1517 }
1518
1519 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1520 {
1521 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
1522 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
1523 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
1524 }
1525
1526 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1527 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
1528 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
1529
1530 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1531 {
1532 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
1533 compare_tree_edges (TREE_OPERAND (t1, i),
1534 TREE_OPERAND (t2, i));
1535
1536 /* BLOCKs are function local and we don't merge anything there. */
1537 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
1538 return false;
1539 }
1540
1541 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1542 {
1543 unsigned i;
1544 tree t;
1545 /* Lengths have already been compared above. */
1546 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
1547 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
1548 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
1549 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
1550 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
1551 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
1552 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
1553 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1554 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1555 }
1556
1557 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1558 {
1559 unsigned i;
1560 tree index, value;
1561 /* Lengths have already been compared above. */
1562 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
1563 {
1564 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
1565 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
1566 }
1567 }
1568
1569 if (code == OMP_CLAUSE)
1570 {
1571 int i;
1572
1573 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++)
1574 compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i),
1575 OMP_CLAUSE_OPERAND (t2, i));
1576 compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2));
1577 }
1578
1579 #undef compare_tree_edges
1580
1581 return true;
1582 }
1583
1584 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
1585 out MAP if they are equal. */
1586
1587 static bool
1588 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
1589 tree *map)
1590 {
1591 /* Assume SCC entry hashes are sorted after their cardinality. Which
1592 means we can simply take the first n-tuple of equal hashes
1593 (which is recorded as entry_len) and do n SCC entry candidate
1594 comparisons. */
1595 for (unsigned i = 0; i < pscc->entry_len; ++i)
1596 {
1597 tree *mapp = map;
1598 num_scc_compare_collisions++;
1599 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
1600 {
1601 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
1602 on the scc as all trees will be freed. */
1603 return true;
1604 }
1605 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
1606 the SCC prevails. */
1607 for (unsigned j = 0; j < scc->len; ++j)
1608 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
1609 }
1610
1611 return false;
1612 }
1613
1614 /* QSort sort function to sort a map of two pointers after the 2nd
1615 pointer. */
1616
1617 static int
1618 cmp_tree (const void *p1_, const void *p2_)
1619 {
1620 tree *p1 = (tree *)(const_cast<void *>(p1_));
1621 tree *p2 = (tree *)(const_cast<void *>(p2_));
1622 if (p1[1] == p2[1])
1623 return 0;
1624 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
1625 }
1626
1627 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
1628 hash value SCC_HASH with an already recorded SCC. Return true if
1629 that was successful, otherwise return false. */
1630
1631 static bool
1632 unify_scc (class data_in *data_in, unsigned from,
1633 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
1634 {
1635 bool unified_p = false;
1636 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1637 tree_scc *scc
1638 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
1639 scc->next = NULL;
1640 scc->hash = scc_hash;
1641 scc->len = len;
1642 scc->entry_len = scc_entry_len;
1643 for (unsigned i = 0; i < len; ++i)
1644 {
1645 tree t = streamer_tree_cache_get_tree (cache, from + i);
1646 scc->entries[i] = t;
1647 /* Do not merge SCCs with local entities inside them. Also do
1648 not merge TRANSLATION_UNIT_DECLs and anonymous namespaces
1649 and types therein types. */
1650 if (TREE_CODE (t) == TRANSLATION_UNIT_DECL
1651 || (VAR_OR_FUNCTION_DECL_P (t)
1652 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
1653 || TREE_CODE (t) == LABEL_DECL
1654 || (TREE_CODE (t) == NAMESPACE_DECL && !DECL_NAME (t))
1655 || (TYPE_P (t)
1656 && type_with_linkage_p (TYPE_MAIN_VARIANT (t))
1657 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t))))
1658 {
1659 /* Avoid doing any work for these cases and do not worry to
1660 record the SCCs for further merging. */
1661 return false;
1662 }
1663 }
1664
1665 /* Look for the list of candidate SCCs to compare against. */
1666 tree_scc **slot;
1667 slot = tree_scc_hash->find_slot_with_hash (scc, scc_hash, INSERT);
1668 if (*slot)
1669 {
1670 /* Try unifying against each candidate. */
1671 num_scc_compares++;
1672
1673 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
1674 outside of the scc when following tree edges. Make sure
1675 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
1676 to track whether we visited the SCC member during the compare.
1677 We cannot use TREE_VISITED on the pscc members as the extended
1678 scc and pscc can overlap. */
1679 for (unsigned i = 0; i < scc->len; ++i)
1680 {
1681 TREE_VISITED (scc->entries[i]) = 1;
1682 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
1683 }
1684
1685 tree *map = XALLOCAVEC (tree, 2 * len);
1686 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
1687 {
1688 if (!compare_tree_sccs (pscc, scc, map))
1689 continue;
1690
1691 /* Found an equal SCC. */
1692 unified_p = true;
1693 num_scc_compare_collisions--;
1694 num_sccs_merged++;
1695 total_scc_size_merged += len;
1696
1697 if (flag_checking)
1698 for (unsigned i = 0; i < len; ++i)
1699 {
1700 tree t = map[2*i+1];
1701 enum tree_code code = TREE_CODE (t);
1702 /* IDENTIFIER_NODEs should be singletons and are merged by the
1703 streamer. The others should be singletons, too, and we
1704 should not merge them in any way. */
1705 gcc_assert (code != TRANSLATION_UNIT_DECL
1706 && code != IDENTIFIER_NODE);
1707 }
1708
1709 /* Fixup the streamer cache with the prevailing nodes according
1710 to the tree node mapping computed by compare_tree_sccs. */
1711 if (len == 1)
1712 {
1713 /* If we got a debug reference queued, see if the prevailing
1714 tree has a debug reference and if not, register the one
1715 for the tree we are about to throw away. */
1716 if (dref_queue.length () == 1)
1717 {
1718 dref_entry e = dref_queue.pop ();
1719 gcc_assert (e.decl
1720 == streamer_tree_cache_get_tree (cache, from));
1721 const char *sym;
1722 unsigned HOST_WIDE_INT off;
1723 if (!debug_hooks->die_ref_for_decl (pscc->entries[0], &sym,
1724 &off))
1725 debug_hooks->register_external_die (pscc->entries[0],
1726 e.sym, e.off);
1727 }
1728 lto_maybe_register_decl (data_in, pscc->entries[0], from);
1729 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
1730 }
1731 else
1732 {
1733 tree *map2 = XALLOCAVEC (tree, 2 * len);
1734 for (unsigned i = 0; i < len; ++i)
1735 {
1736 map2[i*2] = (tree)(uintptr_t)(from + i);
1737 map2[i*2+1] = scc->entries[i];
1738 }
1739 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
1740 qsort (map, len, 2 * sizeof (tree), cmp_tree);
1741 for (unsigned i = 0; i < len; ++i)
1742 {
1743 lto_maybe_register_decl (data_in, map[2*i],
1744 (uintptr_t)map2[2*i]);
1745 streamer_tree_cache_replace_tree (cache, map[2*i],
1746 (uintptr_t)map2[2*i]);
1747 }
1748 }
1749
1750 /* Free the tree nodes from the read SCC. */
1751 data_in->location_cache.revert_location_cache ();
1752 for (unsigned i = 0; i < len; ++i)
1753 {
1754 if (TYPE_P (scc->entries[i]))
1755 num_merged_types++;
1756 free_node (scc->entries[i]);
1757 }
1758
1759 /* Drop DIE references.
1760 ??? Do as in the size-one SCC case which involves sorting
1761 the queue. */
1762 dref_queue.truncate (0);
1763
1764 break;
1765 }
1766
1767 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
1768 if (!unified_p)
1769 for (unsigned i = 0; i < scc->len; ++i)
1770 TREE_VISITED (scc->entries[i]) = 0;
1771 }
1772
1773 /* If we didn't unify it to any candidate duplicate the relevant
1774 pieces to permanent storage and link it into the chain. */
1775 if (!unified_p)
1776 {
1777 tree_scc *pscc
1778 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
1779 memcpy (pscc, scc, sizeof (tree_scc));
1780 pscc->next = (*slot);
1781 *slot = pscc;
1782 }
1783 return unified_p;
1784 }
1785
1786
1787
1788 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
1789 RESOLUTIONS is the set of symbols picked by the linker (read from the
1790 resolution file when the linker plugin is being used). */
1791
1792 static void
1793 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
1794 vec<ld_plugin_symbol_resolution_t> resolutions)
1795 {
1796 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
1797 const int decl_offset = sizeof (struct lto_decl_header);
1798 const int main_offset = decl_offset + header->decl_state_size;
1799 const int string_offset = main_offset + header->main_size;
1800 class data_in *data_in;
1801 unsigned int i;
1802 const uint32_t *data_ptr, *data_end;
1803 uint32_t num_decl_states;
1804
1805 lto_input_block ib_main ((const char *) data + main_offset,
1806 header->main_size, decl_data->mode_table);
1807
1808 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
1809 header->string_size, resolutions);
1810
1811 /* We do not uniquify the pre-loaded cache entries, those are middle-end
1812 internal types that should not be merged. */
1813
1814 typedef int_hash<unsigned, 0, UINT_MAX> code_id_hash;
1815 hash_map <code_id_hash, unsigned> hm;
1816 unsigned total = 0;
1817
1818 /* Read the global declarations and types. */
1819 while (ib_main.p < ib_main.len)
1820 {
1821 tree t;
1822 unsigned from = data_in->reader_cache->nodes.length ();
1823 /* Read and uniquify SCCs as in the input stream. */
1824 enum LTO_tags tag = streamer_read_record_start (&ib_main);
1825 if (tag == LTO_tree_scc)
1826 {
1827 unsigned len_;
1828 unsigned scc_entry_len;
1829 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
1830 &scc_entry_len);
1831 unsigned len = data_in->reader_cache->nodes.length () - from;
1832 gcc_assert (len == len_);
1833
1834 total_scc_size += len;
1835 num_sccs_read++;
1836
1837 /* We have the special case of size-1 SCCs that are pre-merged
1838 by means of identifier and string sharing for example.
1839 ??? Maybe we should avoid streaming those as SCCs. */
1840 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
1841 from);
1842 if (len == 1
1843 && (TREE_CODE (first) == IDENTIFIER_NODE
1844 || (TREE_CODE (first) == INTEGER_CST
1845 && !TREE_OVERFLOW (first))))
1846 continue;
1847
1848 /* Try to unify the SCC with already existing ones. */
1849 if (!flag_ltrans
1850 && unify_scc (data_in, from,
1851 len, scc_entry_len, scc_hash))
1852 continue;
1853
1854 /* Tree merging failed, mark entries in location cache as
1855 permanent. */
1856 data_in->location_cache.accept_location_cache ();
1857
1858 bool seen_type = false;
1859 for (unsigned i = 0; i < len; ++i)
1860 {
1861 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
1862 from + i);
1863 /* Reconstruct the type variant and pointer-to/reference-to
1864 chains. */
1865 if (TYPE_P (t))
1866 {
1867 /* Map the tree types to their frequencies. */
1868 if (flag_lto_dump_type_stats)
1869 {
1870 unsigned key = (unsigned) TREE_CODE (t);
1871 unsigned *countp = hm.get (key);
1872 hm.put (key, countp ? (*countp) + 1 : 1);
1873 total++;
1874 }
1875
1876 seen_type = true;
1877 num_prevailing_types++;
1878 lto_fixup_prevailing_type (t);
1879
1880 /* Compute the canonical type of all non-ODR types.
1881 Delay ODR types for the end of merging process - the canonical
1882 type for those can be computed using the (unique) name however
1883 we want to do this only if units in other languages do not
1884 contain structurally equivalent type.
1885
1886 Because SCC components are streamed in random (hash) order
1887 we may have encountered the type before while registering
1888 type canonical of a derived type in the same SCC. */
1889 if (!TYPE_CANONICAL (t))
1890 {
1891 if (!RECORD_OR_UNION_TYPE_P (t)
1892 || !TYPE_CXX_ODR_P (t))
1893 gimple_register_canonical_type (t);
1894 else if (COMPLETE_TYPE_P (t))
1895 vec_safe_push (types_to_register, t);
1896 }
1897 if (TYPE_MAIN_VARIANT (t) == t && odr_type_p (t))
1898 register_odr_type (t);
1899 }
1900 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
1901 type which is also member of this SCC. */
1902 if (TREE_CODE (t) == INTEGER_CST
1903 && !TREE_OVERFLOW (t))
1904 cache_integer_cst (t);
1905 if (!flag_ltrans)
1906 {
1907 lto_maybe_register_decl (data_in, t, from + i);
1908 /* Scan the tree for references to global functions or
1909 variables and record those for later fixup. */
1910 if (mentions_vars_p (t))
1911 vec_safe_push (tree_with_vars, t);
1912 }
1913 }
1914
1915 /* Register DECLs with the debuginfo machinery. */
1916 while (!dref_queue.is_empty ())
1917 {
1918 dref_entry e = dref_queue.pop ();
1919 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1920 }
1921
1922 if (seen_type)
1923 num_type_scc_trees += len;
1924 }
1925 else
1926 {
1927 /* Pickle stray references. */
1928 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
1929 gcc_assert (t && data_in->reader_cache->nodes.length () == from);
1930 }
1931 }
1932
1933 /* Dump type statistics. */
1934 if (flag_lto_dump_type_stats)
1935 {
1936 fprintf (stdout, " Type Frequency Percentage\n\n");
1937 for (hash_map<code_id_hash, unsigned>::iterator itr = hm.begin ();
1938 itr != hm.end ();
1939 ++itr)
1940 {
1941 std::pair<unsigned, unsigned> p = *itr;
1942 enum tree_code code = (enum tree_code) p.first;
1943 fprintf (stdout, "%14s %6d %12.2f\n", get_tree_code_name (code),
1944 p.second, float (p.second)/total*100);
1945 }
1946 }
1947
1948 data_in->location_cache.apply_location_cache ();
1949
1950 /* Read in lto_in_decl_state objects. */
1951 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
1952 data_end
1953 = (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
1954 num_decl_states = *data_ptr++;
1955
1956 gcc_assert (num_decl_states > 0);
1957 decl_data->global_decl_state = lto_new_in_decl_state ();
1958 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
1959 decl_data->global_decl_state);
1960
1961 /* Read in per-function decl states and enter them in hash table. */
1962 decl_data->function_decl_states
1963 = hash_table<decl_state_hasher>::create_ggc (37);
1964
1965 for (i = 1; i < num_decl_states; i++)
1966 {
1967 struct lto_in_decl_state *state = lto_new_in_decl_state ();
1968
1969 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
1970 lto_in_decl_state **slot
1971 = decl_data->function_decl_states->find_slot (state, INSERT);
1972 gcc_assert (*slot == NULL);
1973 *slot = state;
1974 }
1975
1976 if (data_ptr != data_end)
1977 internal_error ("bytecode stream: garbage at the end of symbols section");
1978
1979 /* Set the current decl state to be the global state. */
1980 decl_data->current_decl_state = decl_data->global_decl_state;
1981
1982 lto_data_in_delete (data_in);
1983 }
1984
1985 /* Custom version of strtoll, which is not portable. */
1986
1987 static int64_t
1988 lto_parse_hex (const char *p)
1989 {
1990 int64_t ret = 0;
1991
1992 for (; *p != '\0'; ++p)
1993 {
1994 char c = *p;
1995 unsigned char part;
1996 ret <<= 4;
1997 if (c >= '0' && c <= '9')
1998 part = c - '0';
1999 else if (c >= 'a' && c <= 'f')
2000 part = c - 'a' + 10;
2001 else if (c >= 'A' && c <= 'F')
2002 part = c - 'A' + 10;
2003 else
2004 internal_error ("could not parse hex number");
2005 ret |= part;
2006 }
2007
2008 return ret;
2009 }
2010
2011 /* Read resolution for file named FILE_NAME. The resolution is read from
2012 RESOLUTION. */
2013
2014 static void
2015 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2016 {
2017 /* We require that objects in the resolution file are in the same
2018 order as the lto1 command line. */
2019 unsigned int name_len;
2020 char *obj_name;
2021 unsigned int num_symbols;
2022 unsigned int i;
2023 struct lto_file_decl_data *file_data;
2024 splay_tree_node nd = NULL;
2025
2026 if (!resolution)
2027 return;
2028
2029 name_len = strlen (file->filename);
2030 obj_name = XNEWVEC (char, name_len + 1);
2031 fscanf (resolution, " "); /* Read white space. */
2032
2033 fread (obj_name, sizeof (char), name_len, resolution);
2034 obj_name[name_len] = '\0';
2035 if (filename_cmp (obj_name, file->filename) != 0)
2036 internal_error ("unexpected file name %s in linker resolution file. "
2037 "Expected %s", obj_name, file->filename);
2038 if (file->offset != 0)
2039 {
2040 int t;
2041 char offset_p[17];
2042 int64_t offset;
2043 t = fscanf (resolution, "@0x%16s", offset_p);
2044 if (t != 1)
2045 internal_error ("could not parse file offset");
2046 offset = lto_parse_hex (offset_p);
2047 if (offset != file->offset)
2048 internal_error ("unexpected offset");
2049 }
2050
2051 free (obj_name);
2052
2053 fscanf (resolution, "%u", &num_symbols);
2054
2055 for (i = 0; i < num_symbols; i++)
2056 {
2057 int t;
2058 unsigned index;
2059 unsigned HOST_WIDE_INT id;
2060 char r_str[27];
2061 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2062 unsigned int j;
2063 unsigned int lto_resolution_str_len
2064 = sizeof (lto_resolution_str) / sizeof (char *);
2065 res_pair rp;
2066
2067 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE
2068 " %26s %*[^\n]\n", &index, &id, r_str);
2069 if (t != 3)
2070 internal_error ("invalid line in the resolution file");
2071
2072 for (j = 0; j < lto_resolution_str_len; j++)
2073 {
2074 if (strcmp (lto_resolution_str[j], r_str) == 0)
2075 {
2076 r = (enum ld_plugin_symbol_resolution) j;
2077 break;
2078 }
2079 }
2080 if (j == lto_resolution_str_len)
2081 internal_error ("invalid resolution in the resolution file");
2082
2083 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2084 {
2085 nd = lto_splay_tree_lookup (file_ids, id);
2086 if (nd == NULL)
2087 internal_error ("resolution sub id %wx not in object file", id);
2088 }
2089
2090 file_data = (struct lto_file_decl_data *)nd->value;
2091 /* The indexes are very sparse. To save memory save them in a compact
2092 format that is only unpacked later when the subfile is processed. */
2093 rp.res = r;
2094 rp.index = index;
2095 file_data->respairs.safe_push (rp);
2096 if (file_data->max_index < index)
2097 file_data->max_index = index;
2098 }
2099 }
2100
2101 /* List of file_decl_datas. */
2102 struct file_data_list
2103 {
2104 struct lto_file_decl_data *first, *last;
2105 };
2106
2107 /* Is the name for a id'ed LTO section? */
2108
2109 static int
2110 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2111 {
2112 const char *s;
2113
2114 if (strncmp (name, section_name_prefix, strlen (section_name_prefix)))
2115 return 0;
2116 s = strrchr (name, '.');
2117 if (!s)
2118 return 0;
2119 /* If the section is not suffixed with an ID return. */
2120 if ((size_t)(s - name) == strlen (section_name_prefix))
2121 return 0;
2122 return sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2123 }
2124
2125 /* Create file_data of each sub file id. */
2126
2127 static int
2128 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2129 struct file_data_list *list)
2130 {
2131 struct lto_section_slot s_slot, *new_slot;
2132 unsigned HOST_WIDE_INT id;
2133 splay_tree_node nd;
2134 void **hash_slot;
2135 char *new_name;
2136 struct lto_file_decl_data *file_data;
2137
2138 if (!lto_section_with_id (ls->name, &id))
2139 return 1;
2140
2141 /* Find hash table of sub module id. */
2142 nd = lto_splay_tree_lookup (file_ids, id);
2143 if (nd != NULL)
2144 {
2145 file_data = (struct lto_file_decl_data *)nd->value;
2146 }
2147 else
2148 {
2149 file_data = ggc_alloc<lto_file_decl_data> ();
2150 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2151 file_data->id = id;
2152 file_data->section_hash_table = lto_obj_create_section_hash_table ();
2153 lto_splay_tree_insert (file_ids, id, file_data);
2154
2155 /* Maintain list in linker order. */
2156 if (!list->first)
2157 list->first = file_data;
2158 if (list->last)
2159 list->last->next = file_data;
2160
2161 list->last = file_data;
2162 }
2163
2164 /* Copy section into sub module hash table. */
2165 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2166 s_slot.name = new_name;
2167 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2168 gcc_assert (*hash_slot == NULL);
2169
2170 new_slot = XDUP (struct lto_section_slot, ls);
2171 new_slot->name = new_name;
2172 *hash_slot = new_slot;
2173 return 1;
2174 }
2175
2176 /* Read declarations and other initializations for a FILE_DATA. */
2177
2178 static void
2179 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file,
2180 int order)
2181 {
2182 const char *data;
2183 size_t len;
2184 vec<ld_plugin_symbol_resolution_t>
2185 resolutions = vNULL;
2186 int i;
2187 res_pair *rp;
2188
2189 /* Create vector for fast access of resolution. We do this lazily
2190 to save memory. */
2191 resolutions.safe_grow_cleared (file_data->max_index + 1);
2192 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2193 resolutions[rp->index] = rp->res;
2194 file_data->respairs.release ();
2195
2196 file_data->renaming_hash_table = lto_create_renaming_table ();
2197 file_data->file_name = file->filename;
2198 file_data->order = order;
2199 #ifdef ACCEL_COMPILER
2200 lto_input_mode_table (file_data);
2201 #else
2202 file_data->mode_table = lto_mode_identity_table;
2203 #endif
2204
2205 /* Read and verify LTO section. */
2206 data = lto_get_summary_section_data (file_data, LTO_section_lto, &len);
2207 if (data == NULL)
2208 {
2209 fatal_error (input_location, "bytecode stream in file %qs generated "
2210 "with GCC compiler older than 10.0", file_data->file_name);
2211 return;
2212 }
2213
2214 memcpy (&file_data->lto_section_header, data, sizeof (lto_section));
2215 lto_check_version (file_data->lto_section_header.major_version,
2216 file_data->lto_section_header.minor_version,
2217 file_data->file_name);
2218
2219 data = lto_get_summary_section_data (file_data, LTO_section_decls, &len);
2220 if (data == NULL)
2221 {
2222 internal_error ("cannot read %<LTO_section_decls%> from %s",
2223 file_data->file_name);
2224 return;
2225 }
2226 /* Frees resolutions. */
2227 lto_read_decls (file_data, data, resolutions);
2228 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2229 }
2230
2231 /* Finalize FILE_DATA in FILE and increase COUNT. */
2232
2233 static int
2234 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2235 int *count, int order)
2236 {
2237 lto_file_finalize (file_data, file, order);
2238 if (symtab->dump_file)
2239 fprintf (symtab->dump_file,
2240 "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2241 file_data->file_name, file_data->id);
2242 (*count)++;
2243 return 0;
2244 }
2245
2246 /* Generate a TREE representation for all types and external decls
2247 entities in FILE.
2248
2249 Read all of the globals out of the file. Then read the cgraph
2250 and process the .o index into the cgraph nodes so that it can open
2251 the .o file to load the functions and ipa information. */
2252
2253 static struct lto_file_decl_data *
2254 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2255 {
2256 struct lto_file_decl_data *file_data = NULL;
2257 splay_tree file_ids;
2258 htab_t section_hash_table;
2259 struct lto_section_slot *section;
2260 struct file_data_list file_list;
2261 struct lto_section_list section_list;
2262
2263 memset (&section_list, 0, sizeof (struct lto_section_list));
2264 section_hash_table = lto_obj_build_section_table (file, &section_list);
2265
2266 /* Dump the details of LTO objects. */
2267 if (flag_lto_dump_objects)
2268 {
2269 int i=0;
2270 fprintf (stdout, "\n LTO Object Name: %s\n", file->filename);
2271 fprintf (stdout, "\nNo. Offset Size Section Name\n\n");
2272 for (section = section_list.first; section != NULL; section = section->next)
2273 fprintf (stdout, "%2d %8" PRId64 " %8" PRIu64 " %s\n",
2274 ++i, (int64_t) section->start, (uint64_t) section->len,
2275 section->name);
2276 }
2277
2278 /* Find all sub modules in the object and put their sections into new hash
2279 tables in a splay tree. */
2280 file_ids = lto_splay_tree_new ();
2281 memset (&file_list, 0, sizeof (struct file_data_list));
2282 for (section = section_list.first; section != NULL; section = section->next)
2283 create_subid_section_table (section, file_ids, &file_list);
2284
2285 /* Add resolutions to file ids. */
2286 lto_resolution_read (file_ids, resolution_file, file);
2287
2288 /* Finalize each lto file for each submodule in the merged object. */
2289 int order = 0;
2290 for (file_data = file_list.first; file_data != NULL;
2291 file_data = file_data->next)
2292 lto_create_files_from_ids (file, file_data, count, order++);
2293
2294 splay_tree_delete (file_ids);
2295 htab_delete (section_hash_table);
2296
2297 return file_list.first;
2298 }
2299
2300 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2301 #define LTO_MMAP_IO 1
2302 #endif
2303
2304 #if LTO_MMAP_IO
2305 /* Page size of machine is used for mmap and munmap calls. */
2306 static size_t page_mask;
2307 #endif
2308
2309 /* Get the section data of length LEN from FILENAME starting at
2310 OFFSET. The data segment must be freed by the caller when the
2311 caller is finished. Returns NULL if all was not well. */
2312
2313 static char *
2314 lto_read_section_data (struct lto_file_decl_data *file_data,
2315 intptr_t offset, size_t len)
2316 {
2317 char *result;
2318 static int fd = -1;
2319 static char *fd_name;
2320 #if LTO_MMAP_IO
2321 intptr_t computed_len;
2322 intptr_t computed_offset;
2323 intptr_t diff;
2324 #endif
2325
2326 /* Keep a single-entry file-descriptor cache. The last file we
2327 touched will get closed at exit.
2328 ??? Eventually we want to add a more sophisticated larger cache
2329 or rather fix function body streaming to not stream them in
2330 practically random order. */
2331 if (fd != -1
2332 && filename_cmp (fd_name, file_data->file_name) != 0)
2333 {
2334 free (fd_name);
2335 close (fd);
2336 fd = -1;
2337 }
2338 if (fd == -1)
2339 {
2340 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2341 if (fd == -1)
2342 {
2343 fatal_error (input_location, "Cannot open %s", file_data->file_name);
2344 return NULL;
2345 }
2346 fd_name = xstrdup (file_data->file_name);
2347 }
2348
2349 #if LTO_MMAP_IO
2350 if (!page_mask)
2351 {
2352 size_t page_size = sysconf (_SC_PAGE_SIZE);
2353 page_mask = ~(page_size - 1);
2354 }
2355
2356 computed_offset = offset & page_mask;
2357 diff = offset - computed_offset;
2358 computed_len = len + diff;
2359
2360 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2361 fd, computed_offset);
2362 if (result == MAP_FAILED)
2363 {
2364 fatal_error (input_location, "Cannot map %s", file_data->file_name);
2365 return NULL;
2366 }
2367
2368 return result + diff;
2369 #else
2370 result = (char *) xmalloc (len);
2371 if (lseek (fd, offset, SEEK_SET) != offset
2372 || read (fd, result, len) != (ssize_t) len)
2373 {
2374 free (result);
2375 fatal_error (input_location, "Cannot read %s", file_data->file_name);
2376 result = NULL;
2377 }
2378 #ifdef __MINGW32__
2379 /* Native windows doesn't supports delayed unlink on opened file. So
2380 we close file here again. This produces higher I/O load, but at least
2381 it prevents to have dangling file handles preventing unlink. */
2382 free (fd_name);
2383 fd_name = NULL;
2384 close (fd);
2385 fd = -1;
2386 #endif
2387 return result;
2388 #endif
2389 }
2390
2391
2392 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2393 NAME will be NULL unless the section type is for a function
2394 body. */
2395
2396 static const char *
2397 get_section_data (struct lto_file_decl_data *file_data,
2398 enum lto_section_type section_type,
2399 const char *name, int order,
2400 size_t *len)
2401 {
2402 htab_t section_hash_table = file_data->section_hash_table;
2403 struct lto_section_slot *f_slot;
2404 struct lto_section_slot s_slot;
2405 const char *section_name = lto_get_section_name (section_type, name,
2406 order, file_data);
2407 char *data = NULL;
2408
2409 *len = 0;
2410 s_slot.name = section_name;
2411 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2412 if (f_slot)
2413 {
2414 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2415 *len = f_slot->len;
2416 }
2417
2418 free (CONST_CAST (char *, section_name));
2419 return data;
2420 }
2421
2422
2423 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2424 starts at OFFSET and has LEN bytes. */
2425
2426 static void
2427 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2428 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2429 const char *name ATTRIBUTE_UNUSED,
2430 const char *offset, size_t len ATTRIBUTE_UNUSED)
2431 {
2432 #if LTO_MMAP_IO
2433 intptr_t computed_len;
2434 intptr_t computed_offset;
2435 intptr_t diff;
2436 #endif
2437
2438 #if LTO_MMAP_IO
2439 computed_offset = ((intptr_t) offset) & page_mask;
2440 diff = (intptr_t) offset - computed_offset;
2441 computed_len = len + diff;
2442
2443 munmap ((caddr_t) computed_offset, computed_len);
2444 #else
2445 free (CONST_CAST(char *, offset));
2446 #endif
2447 }
2448
2449 static lto_file *current_lto_file;
2450
2451 /* If TT is a variable or function decl replace it with its
2452 prevailing variant. */
2453 #define LTO_SET_PREVAIL(tt) \
2454 do {\
2455 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
2456 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
2457 { \
2458 tt = lto_symtab_prevailing_decl (tt); \
2459 fixed = true; \
2460 } \
2461 } while (0)
2462
2463 /* Ensure that TT isn't a replacable var of function decl. */
2464 #define LTO_NO_PREVAIL(tt) \
2465 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2466
2467 /* Given a tree T replace all fields referring to variables or functions
2468 with their prevailing variant. */
2469 static void
2470 lto_fixup_prevailing_decls (tree t)
2471 {
2472 enum tree_code code = TREE_CODE (t);
2473 bool fixed = false;
2474
2475 gcc_checking_assert (code != TREE_BINFO);
2476 LTO_NO_PREVAIL (TREE_TYPE (t));
2477 if (CODE_CONTAINS_STRUCT (code, TS_COMMON)
2478 /* lto_symtab_prevail_decl use TREE_CHAIN to link to the prevailing decl.
2479 in the case T is a prevailed declaration we would ICE here. */
2480 && !VAR_OR_FUNCTION_DECL_P (t))
2481 LTO_NO_PREVAIL (TREE_CHAIN (t));
2482 if (DECL_P (t))
2483 {
2484 LTO_NO_PREVAIL (DECL_NAME (t));
2485 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2486 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2487 {
2488 LTO_SET_PREVAIL (DECL_SIZE (t));
2489 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2490 LTO_SET_PREVAIL (DECL_INITIAL (t));
2491 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2492 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2493 }
2494 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2495 {
2496 LTO_NO_PREVAIL (DECL_ASSEMBLER_NAME_RAW (t));
2497 }
2498 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2499 {
2500 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2501 }
2502 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2503 {
2504 LTO_NO_PREVAIL (DECL_ARGUMENTS (t));
2505 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2506 LTO_NO_PREVAIL (DECL_VINDEX (t));
2507 }
2508 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2509 {
2510 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
2511 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2512 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2513 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2514 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2515 }
2516 }
2517 else if (TYPE_P (t))
2518 {
2519 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2520 LTO_SET_PREVAIL (TYPE_SIZE (t));
2521 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2522 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2523 LTO_NO_PREVAIL (TYPE_NAME (t));
2524
2525 LTO_SET_PREVAIL (TYPE_MIN_VALUE_RAW (t));
2526 LTO_SET_PREVAIL (TYPE_MAX_VALUE_RAW (t));
2527 LTO_NO_PREVAIL (TYPE_LANG_SLOT_1 (t));
2528
2529 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2530
2531 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2532 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2533 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2534 }
2535 else if (EXPR_P (t))
2536 {
2537 int i;
2538 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2539 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2540 }
2541 else if (TREE_CODE (t) == CONSTRUCTOR)
2542 {
2543 unsigned i;
2544 tree val;
2545 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
2546 LTO_SET_PREVAIL (val);
2547 }
2548 else
2549 {
2550 switch (code)
2551 {
2552 case TREE_LIST:
2553 LTO_SET_PREVAIL (TREE_VALUE (t));
2554 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2555 LTO_NO_PREVAIL (TREE_PURPOSE (t));
2556 break;
2557 default:
2558 gcc_unreachable ();
2559 }
2560 }
2561 /* If we fixed nothing, then we missed something seen by
2562 mentions_vars_p. */
2563 gcc_checking_assert (fixed);
2564 }
2565 #undef LTO_SET_PREVAIL
2566 #undef LTO_NO_PREVAIL
2567
2568 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2569 replaces var and function decls with the corresponding prevailing def. */
2570
2571 static void
2572 lto_fixup_state (struct lto_in_decl_state *state)
2573 {
2574 unsigned i, si;
2575
2576 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2577 we still need to walk from all DECLs to find the reachable
2578 FUNCTION_DECLs and VAR_DECLs. */
2579 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2580 {
2581 vec<tree, va_gc> *trees = state->streams[si];
2582 for (i = 0; i < vec_safe_length (trees); i++)
2583 {
2584 tree t = (*trees)[i];
2585 if (flag_checking && TYPE_P (t))
2586 verify_type (t);
2587 if (VAR_OR_FUNCTION_DECL_P (t)
2588 && (TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
2589 (*trees)[i] = lto_symtab_prevailing_decl (t);
2590 }
2591 }
2592 }
2593
2594 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2595 prevailing one. */
2596
2597 static void
2598 lto_fixup_decls (struct lto_file_decl_data **files)
2599 {
2600 unsigned int i;
2601 tree t;
2602
2603 if (tree_with_vars)
2604 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
2605 lto_fixup_prevailing_decls (t);
2606
2607 for (i = 0; files[i]; i++)
2608 {
2609 struct lto_file_decl_data *file = files[i];
2610 struct lto_in_decl_state *state = file->global_decl_state;
2611 lto_fixup_state (state);
2612
2613 hash_table<decl_state_hasher>::iterator iter;
2614 lto_in_decl_state *elt;
2615 FOR_EACH_HASH_TABLE_ELEMENT (*file->function_decl_states, elt,
2616 lto_in_decl_state *, iter)
2617 lto_fixup_state (elt);
2618 }
2619 }
2620
2621 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2622
2623 /* Turn file datas for sub files into a single array, so that they look
2624 like separate files for further passes. */
2625
2626 static void
2627 lto_flatten_files (struct lto_file_decl_data **orig, int count,
2628 int last_file_ix)
2629 {
2630 struct lto_file_decl_data *n, *next;
2631 int i, k;
2632
2633 lto_stats.num_input_files = count;
2634 all_file_decl_data
2635 = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (count + 1);
2636 /* Set the hooks so that all of the ipa passes can read in their data. */
2637 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2638 for (i = 0, k = 0; i < last_file_ix; i++)
2639 {
2640 for (n = orig[i]; n != NULL; n = next)
2641 {
2642 all_file_decl_data[k++] = n;
2643 next = n->next;
2644 n->next = NULL;
2645 }
2646 }
2647 all_file_decl_data[k] = NULL;
2648 gcc_assert (k == count);
2649 }
2650
2651 /* Input file data before flattening (i.e. splitting them to subfiles to support
2652 incremental linking. */
2653 static int real_file_count;
2654 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2655
2656 /* Read all the symbols from the input files FNAMES. NFILES is the
2657 number of files requested in the command line. Instantiate a
2658 global call graph by aggregating all the sub-graphs found in each
2659 file. */
2660
2661 void
2662 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2663 {
2664 unsigned int i, last_file_ix;
2665 FILE *resolution;
2666 int count = 0;
2667 struct lto_file_decl_data **decl_data;
2668 symtab_node *snode;
2669
2670 symtab->initialize ();
2671
2672 timevar_push (TV_IPA_LTO_DECL_IN);
2673
2674 #ifdef ACCEL_COMPILER
2675 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2676 lto_stream_offload_p = true;
2677 #endif
2678
2679 real_file_decl_data
2680 = decl_data = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (nfiles + 1);
2681 real_file_count = nfiles;
2682
2683 /* Read the resolution file. */
2684 resolution = NULL;
2685 if (resolution_file_name)
2686 {
2687 int t;
2688 unsigned num_objects;
2689
2690 resolution = fopen (resolution_file_name, "r");
2691 if (resolution == NULL)
2692 fatal_error (input_location,
2693 "could not open symbol resolution file: %m");
2694
2695 t = fscanf (resolution, "%u", &num_objects);
2696 gcc_assert (t == 1);
2697
2698 /* True, since the plugin splits the archives. */
2699 gcc_assert (num_objects == nfiles);
2700 }
2701 symtab->state = LTO_STREAMING;
2702
2703 canonical_type_hash_cache = new hash_map<const_tree, hashval_t> (251);
2704 gimple_canonical_types = htab_create (16381, gimple_canonical_type_hash,
2705 gimple_canonical_type_eq, NULL);
2706 gcc_obstack_init (&tree_scc_hash_obstack);
2707 tree_scc_hash = new hash_table<tree_scc_hasher> (4096);
2708
2709 /* Register the common node types with the canonical type machinery so
2710 we properly share alias-sets across languages and TUs. Do not
2711 expose the common nodes as type merge target - those that should be
2712 are already exposed so by pre-loading the LTO streamer caches.
2713 Do two passes - first clear TYPE_CANONICAL and then re-compute it. */
2714 for (i = 0; i < itk_none; ++i)
2715 lto_register_canonical_types (integer_types[i], true);
2716 for (i = 0; i < stk_type_kind_last; ++i)
2717 lto_register_canonical_types (sizetype_tab[i], true);
2718 for (i = 0; i < TI_MAX; ++i)
2719 lto_register_canonical_types (global_trees[i], true);
2720 for (i = 0; i < itk_none; ++i)
2721 lto_register_canonical_types (integer_types[i], false);
2722 for (i = 0; i < stk_type_kind_last; ++i)
2723 lto_register_canonical_types (sizetype_tab[i], false);
2724 for (i = 0; i < TI_MAX; ++i)
2725 lto_register_canonical_types (global_trees[i], false);
2726
2727 if (!quiet_flag)
2728 fprintf (stderr, "Reading object files:");
2729
2730 /* Read all of the object files specified on the command line. */
2731 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2732 {
2733 struct lto_file_decl_data *file_data = NULL;
2734 if (!quiet_flag)
2735 {
2736 fprintf (stderr, " %s", fnames[i]);
2737 fflush (stderr);
2738 }
2739
2740 current_lto_file = lto_obj_file_open (fnames[i], false);
2741 if (!current_lto_file)
2742 break;
2743
2744 file_data = lto_file_read (current_lto_file, resolution, &count);
2745 if (!file_data)
2746 {
2747 lto_obj_file_close (current_lto_file);
2748 free (current_lto_file);
2749 current_lto_file = NULL;
2750 break;
2751 }
2752
2753 decl_data[last_file_ix++] = file_data;
2754
2755 lto_obj_file_close (current_lto_file);
2756 free (current_lto_file);
2757 current_lto_file = NULL;
2758 }
2759
2760 lto_flatten_files (decl_data, count, last_file_ix);
2761 lto_stats.num_input_files = count;
2762 ggc_free(decl_data);
2763 real_file_decl_data = NULL;
2764
2765 lto_register_canonical_types_for_odr_types ();
2766
2767 if (resolution_file_name)
2768 fclose (resolution);
2769
2770 /* Show the LTO report before launching LTRANS. */
2771 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2772 print_lto_report_1 ();
2773
2774 /* Free gimple type merging datastructures. */
2775 delete tree_scc_hash;
2776 tree_scc_hash = NULL;
2777 obstack_free (&tree_scc_hash_obstack, NULL);
2778 htab_delete (gimple_canonical_types);
2779 gimple_canonical_types = NULL;
2780 delete canonical_type_hash_cache;
2781 canonical_type_hash_cache = NULL;
2782
2783 /* At this stage we know that majority of GGC memory is reachable.
2784 Growing the limits prevents unnecesary invocation of GGC. */
2785 ggc_grow ();
2786 report_heap_memory_use ();
2787
2788 /* Set the hooks so that all of the ipa passes can read in their data. */
2789 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2790
2791 timevar_pop (TV_IPA_LTO_DECL_IN);
2792
2793 if (!quiet_flag)
2794 fprintf (stderr, "\nReading the symbol table:");
2795
2796 timevar_push (TV_IPA_LTO_CGRAPH_IO);
2797 /* Read the symtab. */
2798 input_symtab ();
2799
2800 input_offload_tables (!flag_ltrans);
2801
2802 /* Store resolutions into the symbol table. */
2803
2804 FOR_EACH_SYMBOL (snode)
2805 if (snode->externally_visible && snode->real_symbol_p ()
2806 && snode->lto_file_data && snode->lto_file_data->resolution_map
2807 && !(TREE_CODE (snode->decl) == FUNCTION_DECL
2808 && fndecl_built_in_p (snode->decl))
2809 && !(VAR_P (snode->decl) && DECL_HARD_REGISTER (snode->decl)))
2810 {
2811 ld_plugin_symbol_resolution_t *res;
2812
2813 res = snode->lto_file_data->resolution_map->get (snode->decl);
2814 if (!res || *res == LDPR_UNKNOWN)
2815 {
2816 if (snode->output_to_lto_symbol_table_p ())
2817 fatal_error (input_location, "missing resolution data for %s",
2818 IDENTIFIER_POINTER
2819 (DECL_ASSEMBLER_NAME (snode->decl)));
2820 }
2821 /* Symbol versions are always used externally, but linker does not
2822 report that correctly.
2823 This is binutils PR25924. */
2824 else if (snode->symver && *res == LDPR_PREVAILING_DEF_IRONLY)
2825 snode->resolution = LDPR_PREVAILING_DEF_IRONLY_EXP;
2826 else
2827 snode->resolution = *res;
2828 }
2829 for (i = 0; all_file_decl_data[i]; i++)
2830 if (all_file_decl_data[i]->resolution_map)
2831 {
2832 delete all_file_decl_data[i]->resolution_map;
2833 all_file_decl_data[i]->resolution_map = NULL;
2834 }
2835
2836 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
2837
2838 if (!quiet_flag)
2839 fprintf (stderr, "\nMerging declarations:");
2840
2841 timevar_push (TV_IPA_LTO_DECL_MERGE);
2842 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
2843 need to care about resolving symbols again, we only need to replace
2844 duplicated declarations read from the callgraph and from function
2845 sections. */
2846 if (!flag_ltrans)
2847 {
2848 lto_symtab_merge_decls ();
2849
2850 /* If there were errors during symbol merging bail out, we have no
2851 good way to recover here. */
2852 if (seen_error ())
2853 fatal_error (input_location,
2854 "errors during merging of translation units");
2855
2856 /* Fixup all decls. */
2857 lto_fixup_decls (all_file_decl_data);
2858 }
2859 if (tree_with_vars)
2860 ggc_free (tree_with_vars);
2861 tree_with_vars = NULL;
2862 /* During WPA we want to prevent ggc collecting by default. Grow limits
2863 until after the IPA summaries are streamed in. Basically all IPA memory
2864 is explcitly managed by ggc_free and ggc collect is not useful.
2865 Exception are the merged declarations. */
2866 ggc_grow ();
2867 report_heap_memory_use ();
2868
2869 timevar_pop (TV_IPA_LTO_DECL_MERGE);
2870 /* Each pass will set the appropriate timer. */
2871
2872 if (!quiet_flag)
2873 fprintf (stderr, "\nReading summaries:");
2874
2875 /* Read the IPA summary data. */
2876 if (flag_ltrans)
2877 ipa_read_optimization_summaries ();
2878 else
2879 ipa_read_summaries ();
2880
2881 ggc_grow ();
2882
2883 for (i = 0; all_file_decl_data[i]; i++)
2884 {
2885 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
2886 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
2887 all_file_decl_data[i]->symtab_node_encoder = NULL;
2888 lto_in_decl_state *global_decl_state
2889 = all_file_decl_data[i]->global_decl_state;
2890 lto_free_function_in_decl_state (global_decl_state);
2891 all_file_decl_data[i]->global_decl_state = NULL;
2892 all_file_decl_data[i]->current_decl_state = NULL;
2893 }
2894
2895 if (!flag_ltrans)
2896 {
2897 /* Finally merge the cgraph according to the decl merging decisions. */
2898 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
2899
2900 if (!quiet_flag)
2901 fprintf (stderr, "\nMerging symbols:");
2902
2903 gcc_assert (!dump_file);
2904 dump_file = dump_begin (lto_link_dump_id, NULL);
2905
2906 if (dump_file)
2907 {
2908 fprintf (dump_file, "Before merging:\n");
2909 symtab->dump (dump_file);
2910 }
2911 lto_symtab_merge_symbols ();
2912 /* Removal of unreachable symbols is needed to make verify_symtab to pass;
2913 we are still having duplicated comdat groups containing local statics.
2914 We could also just remove them while merging. */
2915 symtab->remove_unreachable_nodes (dump_file);
2916 ggc_collect ();
2917 report_heap_memory_use ();
2918
2919 if (dump_file)
2920 dump_end (lto_link_dump_id, dump_file);
2921 dump_file = NULL;
2922 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
2923 }
2924 symtab->state = IPA_SSA;
2925 /* All node removals happening here are useless, because
2926 WPA should not stream them. Still always perform remove_unreachable_nodes
2927 because we may reshape clone tree, get rid of dead masters of inline
2928 clones and remove symbol entries for read-only variables we keep around
2929 only to be able to constant fold them. */
2930 if (flag_ltrans)
2931 {
2932 if (symtab->dump_file)
2933 symtab->dump (symtab->dump_file);
2934 symtab->remove_unreachable_nodes (symtab->dump_file);
2935 }
2936
2937 /* Indicate that the cgraph is built and ready. */
2938 symtab->function_flags_ready = true;
2939
2940 ggc_free (all_file_decl_data);
2941 all_file_decl_data = NULL;
2942 }
2943
2944
2945
2946 /* Show various memory usage statistics related to LTO. */
2947 void
2948 print_lto_report_1 (void)
2949 {
2950 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
2951 fprintf (stderr, "%s statistics\n", pfx);
2952
2953 fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
2954 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
2955 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
2956 if (flag_wpa && tree_scc_hash)
2957 {
2958 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
2959 "collision ratio: %f\n", pfx,
2960 (long) tree_scc_hash->size (),
2961 (long) tree_scc_hash->elements (),
2962 tree_scc_hash->collisions ());
2963 hash_table<tree_scc_hasher>::iterator hiter;
2964 tree_scc *scc, *max_scc = NULL;
2965 unsigned max_length = 0;
2966 FOR_EACH_HASH_TABLE_ELEMENT (*tree_scc_hash, scc, x, hiter)
2967 {
2968 unsigned length = 0;
2969 tree_scc *s = scc;
2970 for (; s; s = s->next)
2971 length++;
2972 if (length > max_length)
2973 {
2974 max_length = length;
2975 max_scc = scc;
2976 }
2977 }
2978 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
2979 pfx, max_length, max_scc->len);
2980 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
2981 num_scc_compares, num_scc_compare_collisions,
2982 num_scc_compare_collisions / (double) num_scc_compares);
2983 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
2984 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
2985 total_scc_size_merged);
2986 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
2987 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
2988 pfx, num_prevailing_types, num_type_scc_trees);
2989 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
2990 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
2991 (long) htab_size (gimple_canonical_types),
2992 (long) htab_elements (gimple_canonical_types),
2993 (long) gimple_canonical_types->searches,
2994 (long) gimple_canonical_types->collisions,
2995 htab_collisions (gimple_canonical_types));
2996 fprintf (stderr, "[%s] GIMPLE canonical type pointer-map: "
2997 "%lu elements, %ld searches\n", pfx,
2998 num_canonical_type_hash_entries,
2999 num_canonical_type_hash_queries);
3000 }
3001
3002 print_lto_report (pfx);
3003 }
3004
3005 GTY(()) tree lto_eh_personality_decl;
3006
3007 /* Return the LTO personality function decl. */
3008
3009 tree
3010 lto_eh_personality (void)
3011 {
3012 if (!lto_eh_personality_decl)
3013 {
3014 /* Use the first personality DECL for our personality if we don't
3015 support multiple ones. This ensures that we don't artificially
3016 create the need for them in a single-language program. */
3017 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3018 lto_eh_personality_decl = first_personality_decl;
3019 else
3020 lto_eh_personality_decl = lhd_gcc_personality ();
3021 }
3022
3023 return lto_eh_personality_decl;
3024 }
3025
3026 /* Set the process name based on the LTO mode. */
3027
3028 static void
3029 lto_process_name (void)
3030 {
3031 if (flag_lto)
3032 setproctitle (flag_incremental_link == INCREMENTAL_LINK_LTO
3033 ? "lto1-inclink" : "lto1-lto");
3034 if (flag_wpa)
3035 setproctitle ("lto1-wpa");
3036 if (flag_ltrans)
3037 setproctitle ("lto1-ltrans");
3038 }
3039
3040
3041 /* Initialize the LTO front end. */
3042
3043 void
3044 lto_fe_init (void)
3045 {
3046 lto_process_name ();
3047 lto_streamer_hooks_init ();
3048 lto_reader_init ();
3049 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3050 memset (&lto_stats, 0, sizeof (lto_stats));
3051 bitmap_obstack_initialize (NULL);
3052 gimple_register_cfg_hooks ();
3053 #ifndef ACCEL_COMPILER
3054 unsigned char *table
3055 = ggc_vec_alloc<unsigned char> (MAX_MACHINE_MODE);
3056 for (int m = 0; m < MAX_MACHINE_MODE; m++)
3057 table[m] = m;
3058 lto_mode_identity_table = table;
3059 #endif
3060 }
3061
3062 #include "gt-lto-lto-common.h"