comparison gcc/cgraphclones.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 /* Callgraph clones
2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module provide facilities for clonning functions. I.e. creating
22 new functions based on existing functions with simple modifications,
23 such as replacement of parameters.
24
25 To allow whole program optimization without actual presence of function
26 bodies, an additional infrastructure is provided for so-called virtual
27 clones
28
29 A virtual clone in the callgraph is a function that has no
30 associated body, just a description of how to create its body based
31 on a different function (which itself may be a virtual clone).
32
33 The description of function modifications includes adjustments to
34 the function's signature (which allows, for example, removing or
35 adding function arguments), substitutions to perform on the
36 function body, and, for inlined functions, a pointer to the
37 function that it will be inlined into.
38
39 It is also possible to redirect any edge of the callgraph from a
40 function to its virtual clone. This implies updating of the call
41 site to adjust for the new function signature.
42
43 Most of the transformations performed by inter-procedural
44 optimizations can be represented via virtual clones. For
45 instance, a constant propagation pass can produce a virtual clone
46 of the function which replaces one of its arguments by a
47 constant. The inliner can represent its decisions by producing a
48 clone of a function whose body will be later integrated into
49 a given function.
50
51 Using virtual clones, the program can be easily updated
52 during the Execute stage, solving most of pass interactions
53 problems that would otherwise occur during Transform.
54
55 Virtual clones are later materialized in the LTRANS stage and
56 turned into real functions. Passes executed after the virtual
57 clone were introduced also perform their Transform stage
58 on new functions, so for a pass there is no significant
59 difference between operating on a real function or a virtual
60 clone introduced before its Execute stage.
61
62 Optimization passes then work on virtual clones introduced before
63 their Execute stage as if they were real functions. The
64 only difference is that clones are not visible during the
65 Generate Summary stage. */
66
67 #include "config.h"
68 #include "system.h"
69 #include "coretypes.h"
70 #include "backend.h"
71 #include "target.h"
72 #include "rtl.h"
73 #include "tree.h"
74 #include "gimple.h"
75 #include "stringpool.h"
76 #include "cgraph.h"
77 #include "lto-streamer.h"
78 #include "tree-eh.h"
79 #include "tree-cfg.h"
80 #include "tree-inline.h"
81 #include "dumpfile.h"
82 #include "gimple-pretty-print.h"
83
84 /* Create clone of edge in the node N represented by CALL_EXPR
85 the callgraph. */
86
87 cgraph_edge *
88 cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
89 profile_count num, profile_count den,
90 int freq_scale, bool update_original)
91 {
92 cgraph_edge *new_edge;
93 profile_count gcov_count
94 = (num == profile_count::zero () || den > 0)
95 ? count.apply_scale (num, den) : count;
96 gcov_type freq;
97
98 /* We do not want to ignore loop nest after frequency drops to 0. */
99 if (!freq_scale)
100 freq_scale = 1;
101 freq = frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
102 if (freq > CGRAPH_FREQ_MAX)
103 freq = CGRAPH_FREQ_MAX;
104
105 if (indirect_unknown_callee)
106 {
107 tree decl;
108
109 if (call_stmt && (decl = gimple_call_fndecl (call_stmt))
110 /* When the call is speculative, we need to resolve it
111 via cgraph_resolve_speculation and not here. */
112 && !speculative)
113 {
114 cgraph_node *callee = cgraph_node::get (decl);
115 gcc_checking_assert (callee);
116 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq);
117 }
118 else
119 {
120 new_edge = n->create_indirect_edge (call_stmt,
121 indirect_info->ecf_flags,
122 gcov_count, freq, false);
123 *new_edge->indirect_info = *indirect_info;
124 }
125 }
126 else
127 {
128 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq);
129 if (indirect_info)
130 {
131 new_edge->indirect_info
132 = ggc_cleared_alloc<cgraph_indirect_call_info> ();
133 *new_edge->indirect_info = *indirect_info;
134 }
135 }
136
137 new_edge->inline_failed = inline_failed;
138 new_edge->indirect_inlining_edge = indirect_inlining_edge;
139 new_edge->lto_stmt_uid = stmt_uid;
140 /* Clone flags that depend on call_stmt availability manually. */
141 new_edge->can_throw_external = can_throw_external;
142 new_edge->call_stmt_cannot_inline_p = call_stmt_cannot_inline_p;
143 new_edge->speculative = speculative;
144 new_edge->in_polymorphic_cdtor = in_polymorphic_cdtor;
145 if (update_original)
146 {
147 count -= new_edge->count;
148 }
149 symtab->call_edge_duplication_hooks (this, new_edge);
150 return new_edge;
151 }
152
153 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
154 return value if SKIP_RETURN is true. */
155
156 tree
157 cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
158 bool skip_return)
159 {
160 tree new_type = NULL;
161 tree args, new_args = NULL;
162 tree new_reversed;
163 int i = 0;
164
165 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
166 args = TREE_CHAIN (args), i++)
167 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
168 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
169
170 new_reversed = nreverse (new_args);
171 if (args)
172 {
173 if (new_reversed)
174 TREE_CHAIN (new_args) = void_list_node;
175 else
176 new_reversed = void_list_node;
177 }
178
179 /* Use copy_node to preserve as much as possible from original type
180 (debug info, attribute lists etc.)
181 Exception is METHOD_TYPEs must have THIS argument.
182 When we are asked to remove it, we need to build new FUNCTION_TYPE
183 instead. */
184 if (TREE_CODE (orig_type) != METHOD_TYPE
185 || !args_to_skip
186 || !bitmap_bit_p (args_to_skip, 0))
187 {
188 new_type = build_distinct_type_copy (orig_type);
189 TYPE_ARG_TYPES (new_type) = new_reversed;
190 }
191 else
192 {
193 new_type
194 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
195 new_reversed));
196 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
197 }
198
199 if (skip_return)
200 TREE_TYPE (new_type) = void_type_node;
201
202 return new_type;
203 }
204
205 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
206 return value if SKIP_RETURN is true.
207
208 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
209 linked by TREE_CHAIN directly. The caller is responsible for eliminating
210 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
211
212 static tree
213 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
214 bool skip_return)
215 {
216 tree new_decl = copy_node (orig_decl);
217 tree new_type;
218
219 new_type = TREE_TYPE (orig_decl);
220 if (prototype_p (new_type)
221 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
222 new_type
223 = cgraph_build_function_type_skip_args (new_type, args_to_skip,
224 skip_return);
225 TREE_TYPE (new_decl) = new_type;
226
227 /* For declarations setting DECL_VINDEX (i.e. methods)
228 we expect first argument to be THIS pointer. */
229 if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
230 DECL_VINDEX (new_decl) = NULL_TREE;
231
232 /* When signature changes, we need to clear builtin info. */
233 if (DECL_BUILT_IN (new_decl)
234 && args_to_skip
235 && !bitmap_empty_p (args_to_skip))
236 {
237 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
238 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
239 }
240 /* The FE might have information and assumptions about the other
241 arguments. */
242 DECL_LANG_SPECIFIC (new_decl) = NULL;
243 return new_decl;
244 }
245
246 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private
247 clone or its thunk. */
248
249 static void
250 set_new_clone_decl_and_node_flags (cgraph_node *new_node)
251 {
252 DECL_EXTERNAL (new_node->decl) = 0;
253 TREE_PUBLIC (new_node->decl) = 0;
254 DECL_COMDAT (new_node->decl) = 0;
255 DECL_WEAK (new_node->decl) = 0;
256 DECL_VIRTUAL_P (new_node->decl) = 0;
257 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
258 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
259
260 new_node->externally_visible = 0;
261 new_node->local.local = 1;
262 new_node->lowered = true;
263 }
264
265 /* Duplicate thunk THUNK if necessary but make it to refer to NODE.
266 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted.
267 Function can return NODE if no thunk is necessary, which can happen when
268 thunk is this_adjusting but we are removing this parameter. */
269
270 static cgraph_node *
271 duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
272 {
273 cgraph_node *new_thunk, *thunk_of;
274 thunk_of = thunk->callees->callee->ultimate_alias_target ();
275
276 if (thunk_of->thunk.thunk_p)
277 node = duplicate_thunk_for_node (thunk_of, node);
278
279 if (!DECL_ARGUMENTS (thunk->decl))
280 thunk->get_untransformed_body ();
281
282 cgraph_edge *cs;
283 for (cs = node->callers; cs; cs = cs->next_caller)
284 if (cs->caller->thunk.thunk_p
285 && cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting
286 && cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset
287 && cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p
288 && cs->caller->thunk.virtual_value == thunk->thunk.virtual_value)
289 return cs->caller;
290
291 tree new_decl;
292 if (!node->clone.args_to_skip)
293 new_decl = copy_node (thunk->decl);
294 else
295 {
296 /* We do not need to duplicate this_adjusting thunks if we have removed
297 this. */
298 if (thunk->thunk.this_adjusting
299 && bitmap_bit_p (node->clone.args_to_skip, 0))
300 return node;
301
302 new_decl = build_function_decl_skip_args (thunk->decl,
303 node->clone.args_to_skip,
304 false);
305 }
306
307 tree *link = &DECL_ARGUMENTS (new_decl);
308 int i = 0;
309 for (tree pd = DECL_ARGUMENTS (thunk->decl); pd; pd = DECL_CHAIN (pd), i++)
310 {
311 if (!node->clone.args_to_skip
312 || !bitmap_bit_p (node->clone.args_to_skip, i))
313 {
314 tree nd = copy_node (pd);
315 DECL_CONTEXT (nd) = new_decl;
316 *link = nd;
317 link = &DECL_CHAIN (nd);
318 }
319 }
320 *link = NULL_TREE;
321
322 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl));
323 gcc_checking_assert (!DECL_INITIAL (new_decl));
324 gcc_checking_assert (!DECL_RESULT (new_decl));
325 gcc_checking_assert (!DECL_RTL_SET_P (new_decl));
326
327 DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk");
328 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
329
330 new_thunk = cgraph_node::create (new_decl);
331 set_new_clone_decl_and_node_flags (new_thunk);
332 new_thunk->definition = true;
333 new_thunk->local.can_change_signature = node->local.can_change_signature;
334 new_thunk->thunk = thunk->thunk;
335 new_thunk->unique_name = in_lto_p;
336 new_thunk->former_clone_of = thunk->decl;
337 new_thunk->clone.args_to_skip = node->clone.args_to_skip;
338 new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
339
340 cgraph_edge *e = new_thunk->create_edge (node, NULL, new_thunk->count,
341 CGRAPH_FREQ_BASE);
342 symtab->call_edge_duplication_hooks (thunk->callees, e);
343 symtab->call_cgraph_duplication_hooks (thunk, new_thunk);
344 return new_thunk;
345 }
346
347 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create
348 one or more equivalent thunks for N and redirect E to the first in the
349 chain. Note that it is then necessary to call
350 n->expand_all_artificial_thunks once all callers are redirected. */
351
352 void
353 cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node *n)
354 {
355 cgraph_node *orig_to = callee->ultimate_alias_target ();
356 if (orig_to->thunk.thunk_p)
357 n = duplicate_thunk_for_node (orig_to, n);
358
359 redirect_callee (n);
360 }
361
362 /* Call expand_thunk on all callers that are thunks and if analyze those nodes
363 that were expanded. */
364
365 void
366 cgraph_node::expand_all_artificial_thunks ()
367 {
368 cgraph_edge *e;
369 for (e = callers; e;)
370 if (e->caller->thunk.thunk_p)
371 {
372 cgraph_node *thunk = e->caller;
373
374 e = e->next_caller;
375 if (thunk->expand_thunk (false, false))
376 {
377 thunk->thunk.thunk_p = false;
378 thunk->analyze ();
379 }
380 thunk->expand_all_artificial_thunks ();
381 }
382 else
383 e = e->next_caller;
384 }
385
386 void
387 dump_callgraph_transformation (const cgraph_node *original,
388 const cgraph_node *clone,
389 const char *suffix)
390 {
391 if (symtab->ipa_clones_dump_file)
392 {
393 fprintf (symtab->ipa_clones_dump_file,
394 "Callgraph clone;%s;%d;%s;%d;%d;%s;%d;%s;%d;%d;%s\n",
395 original->asm_name (), original->order,
396 DECL_SOURCE_FILE (original->decl),
397 DECL_SOURCE_LINE (original->decl),
398 DECL_SOURCE_COLUMN (original->decl), clone->asm_name (),
399 clone->order, DECL_SOURCE_FILE (clone->decl),
400 DECL_SOURCE_LINE (clone->decl), DECL_SOURCE_COLUMN (clone->decl),
401 suffix);
402
403 symtab->cloned_nodes.add (original);
404 symtab->cloned_nodes.add (clone);
405 }
406 }
407
408 /* Create node representing clone of N executed COUNT times. Decrease
409 the execution counts from original node too.
410 The new clone will have decl set to DECL that may or may not be the same
411 as decl of N.
412
413 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
414 function's profile to reflect the fact that part of execution is handled
415 by node.
416 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
417 the new clone. Otherwise the caller is responsible for doing so later.
418
419 If the new node is being inlined into another one, NEW_INLINED_TO should be
420 the outline function the new one is (even indirectly) inlined to. All hooks
421 will see this in node's global.inlined_to, when invoked. Can be NULL if the
422 node is not inlined. */
423
424 cgraph_node *
425 cgraph_node::create_clone (tree new_decl, profile_count prof_count, int freq,
426 bool update_original,
427 vec<cgraph_edge *> redirect_callers,
428 bool call_duplication_hook,
429 cgraph_node *new_inlined_to,
430 bitmap args_to_skip, const char *suffix)
431 {
432 cgraph_node *new_node = symtab->create_empty ();
433 cgraph_edge *e;
434 unsigned i;
435
436 if (new_inlined_to)
437 dump_callgraph_transformation (this, new_inlined_to, "inlining to");
438
439 new_node->count = prof_count;
440 new_node->decl = new_decl;
441 new_node->register_symbol ();
442 new_node->origin = origin;
443 new_node->lto_file_data = lto_file_data;
444 if (new_node->origin)
445 {
446 new_node->next_nested = new_node->origin->nested;
447 new_node->origin->nested = new_node;
448 }
449 new_node->analyzed = analyzed;
450 new_node->definition = definition;
451 new_node->local = local;
452 new_node->externally_visible = false;
453 new_node->no_reorder = no_reorder;
454 new_node->local.local = true;
455 new_node->global = global;
456 new_node->global.inlined_to = new_inlined_to;
457 new_node->rtl = rtl;
458 new_node->frequency = frequency;
459 new_node->tp_first_run = tp_first_run;
460 new_node->tm_clone = tm_clone;
461 new_node->icf_merged = icf_merged;
462 new_node->merged_comdat = merged_comdat;
463 new_node->thunk = thunk;
464
465 new_node->clone.tree_map = NULL;
466 new_node->clone.args_to_skip = args_to_skip;
467 new_node->split_part = split_part;
468 if (!args_to_skip)
469 new_node->clone.combined_args_to_skip = clone.combined_args_to_skip;
470 else if (clone.combined_args_to_skip)
471 {
472 new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
473 bitmap_ior (new_node->clone.combined_args_to_skip,
474 clone.combined_args_to_skip, args_to_skip);
475 }
476 else
477 new_node->clone.combined_args_to_skip = args_to_skip;
478
479 FOR_EACH_VEC_ELT (redirect_callers, i, e)
480 {
481 /* Redirect calls to the old version node to point to its new
482 version. The only exception is when the edge was proved to
483 be unreachable during the clonning procedure. */
484 if (!e->callee
485 || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL
486 || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE)
487 e->redirect_callee_duplicating_thunks (new_node);
488 }
489 new_node->expand_all_artificial_thunks ();
490
491 for (e = callees;e; e=e->next_callee)
492 e->clone (new_node, e->call_stmt, e->lto_stmt_uid, new_node->count, count,
493 freq, update_original);
494
495 for (e = indirect_calls; e; e = e->next_callee)
496 e->clone (new_node, e->call_stmt, e->lto_stmt_uid,
497 new_node->count, count, freq, update_original);
498 new_node->clone_references (this);
499
500 new_node->next_sibling_clone = clones;
501 if (clones)
502 clones->prev_sibling_clone = new_node;
503 clones = new_node;
504 new_node->clone_of = this;
505
506 if (update_original)
507 count -= prof_count;
508
509 if (call_duplication_hook)
510 symtab->call_cgraph_duplication_hooks (this, new_node);
511
512 if (!new_inlined_to)
513 dump_callgraph_transformation (this, new_node, suffix);
514
515 return new_node;
516 }
517
518 static GTY(()) unsigned int clone_fn_id_num;
519
520 /* Return a new assembler name for a clone with SUFFIX of a decl named
521 NAME. */
522
523 tree
524 clone_function_name_1 (const char *name, const char *suffix)
525 {
526 size_t len = strlen (name);
527 char *tmp_name, *prefix;
528
529 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
530 memcpy (prefix, name, len);
531 strcpy (prefix + len + 1, suffix);
532 prefix[len] = symbol_table::symbol_suffix_separator ();
533 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
534 return get_identifier (tmp_name);
535 }
536
537 /* Return a new assembler name for a clone of DECL with SUFFIX. */
538
539 tree
540 clone_function_name (tree decl, const char *suffix)
541 {
542 tree name = DECL_ASSEMBLER_NAME (decl);
543 return clone_function_name_1 (IDENTIFIER_POINTER (name), suffix);
544 }
545
546
547 /* Create callgraph node clone with new declaration. The actual body will
548 be copied later at compilation stage.
549
550 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
551 bitmap interface.
552 */
553 cgraph_node *
554 cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
555 vec<ipa_replace_map *, va_gc> *tree_map,
556 bitmap args_to_skip, const char * suffix)
557 {
558 tree old_decl = decl;
559 cgraph_node *new_node = NULL;
560 tree new_decl;
561 size_t len, i;
562 ipa_replace_map *map;
563 char *name;
564
565 gcc_checking_assert (local.versionable);
566 gcc_assert (local.can_change_signature || !args_to_skip);
567
568 /* Make a new FUNCTION_DECL tree node */
569 if (!args_to_skip)
570 new_decl = copy_node (old_decl);
571 else
572 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
573
574 /* These pointers represent function body and will be populated only when clone
575 is materialized. */
576 gcc_assert (new_decl != old_decl);
577 DECL_STRUCT_FUNCTION (new_decl) = NULL;
578 DECL_ARGUMENTS (new_decl) = NULL;
579 DECL_INITIAL (new_decl) = NULL;
580 DECL_RESULT (new_decl) = NULL;
581 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
582 sometimes storing only clone decl instead of original. */
583
584 /* Generate a new name for the new version. */
585 len = IDENTIFIER_LENGTH (DECL_NAME (old_decl));
586 name = XALLOCAVEC (char, len + strlen (suffix) + 2);
587 memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len);
588 strcpy (name + len + 1, suffix);
589 name[len] = '.';
590 DECL_NAME (new_decl) = get_identifier (name);
591 SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
592 SET_DECL_RTL (new_decl, NULL);
593
594 new_node = create_clone (new_decl, count, CGRAPH_FREQ_BASE, false,
595 redirect_callers, false, NULL, args_to_skip, suffix);
596
597 /* Update the properties.
598 Make clone visible only within this translation unit. Make sure
599 that is not weak also.
600 ??? We cannot use COMDAT linkage because there is no
601 ABI support for this. */
602 set_new_clone_decl_and_node_flags (new_node);
603 new_node->clone.tree_map = tree_map;
604 if (!implicit_section)
605 new_node->set_section (get_section ());
606
607 /* Clones of global symbols or symbols with unique names are unique. */
608 if ((TREE_PUBLIC (old_decl)
609 && !DECL_EXTERNAL (old_decl)
610 && !DECL_WEAK (old_decl)
611 && !DECL_COMDAT (old_decl))
612 || in_lto_p)
613 new_node->unique_name = true;
614 FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
615 new_node->maybe_create_reference (map->new_tree, NULL);
616
617 if (ipa_transforms_to_apply.exists ())
618 new_node->ipa_transforms_to_apply
619 = ipa_transforms_to_apply.copy ();
620
621 symtab->call_cgraph_duplication_hooks (this, new_node);
622
623 return new_node;
624 }
625
626 /* callgraph node being removed from symbol table; see if its entry can be
627 replaced by other inline clone. */
628 cgraph_node *
629 cgraph_node::find_replacement (void)
630 {
631 cgraph_node *next_inline_clone, *replacement;
632
633 for (next_inline_clone = clones;
634 next_inline_clone
635 && next_inline_clone->decl != decl;
636 next_inline_clone = next_inline_clone->next_sibling_clone)
637 ;
638
639 /* If there is inline clone of the node being removed, we need
640 to put it into the position of removed node and reorganize all
641 other clones to be based on it. */
642 if (next_inline_clone)
643 {
644 cgraph_node *n;
645 cgraph_node *new_clones;
646
647 replacement = next_inline_clone;
648
649 /* Unlink inline clone from the list of clones of removed node. */
650 if (next_inline_clone->next_sibling_clone)
651 next_inline_clone->next_sibling_clone->prev_sibling_clone
652 = next_inline_clone->prev_sibling_clone;
653 if (next_inline_clone->prev_sibling_clone)
654 {
655 gcc_assert (clones != next_inline_clone);
656 next_inline_clone->prev_sibling_clone->next_sibling_clone
657 = next_inline_clone->next_sibling_clone;
658 }
659 else
660 {
661 gcc_assert (clones == next_inline_clone);
662 clones = next_inline_clone->next_sibling_clone;
663 }
664
665 new_clones = clones;
666 clones = NULL;
667
668 /* Copy clone info. */
669 next_inline_clone->clone = clone;
670
671 /* Now place it into clone tree at same level at NODE. */
672 next_inline_clone->clone_of = clone_of;
673 next_inline_clone->prev_sibling_clone = NULL;
674 next_inline_clone->next_sibling_clone = NULL;
675 if (clone_of)
676 {
677 if (clone_of->clones)
678 clone_of->clones->prev_sibling_clone = next_inline_clone;
679 next_inline_clone->next_sibling_clone = clone_of->clones;
680 clone_of->clones = next_inline_clone;
681 }
682
683 /* Merge the clone list. */
684 if (new_clones)
685 {
686 if (!next_inline_clone->clones)
687 next_inline_clone->clones = new_clones;
688 else
689 {
690 n = next_inline_clone->clones;
691 while (n->next_sibling_clone)
692 n = n->next_sibling_clone;
693 n->next_sibling_clone = new_clones;
694 new_clones->prev_sibling_clone = n;
695 }
696 }
697
698 /* Update clone_of pointers. */
699 n = new_clones;
700 while (n)
701 {
702 n->clone_of = next_inline_clone;
703 n = n->next_sibling_clone;
704 }
705 return replacement;
706 }
707 else
708 return NULL;
709 }
710
711 /* Like cgraph_set_call_stmt but walk the clone tree and update all
712 clones sharing the same function body.
713 When WHOLE_SPECULATIVE_EDGES is true, all three components of
714 speculative edge gets updated. Otherwise we update only direct
715 call. */
716
717 void
718 cgraph_node::set_call_stmt_including_clones (gimple *old_stmt,
719 gcall *new_stmt,
720 bool update_speculative)
721 {
722 cgraph_node *node;
723 cgraph_edge *edge = get_edge (old_stmt);
724
725 if (edge)
726 edge->set_call_stmt (new_stmt, update_speculative);
727
728 node = clones;
729 if (node)
730 while (node != this)
731 {
732 cgraph_edge *edge = node->get_edge (old_stmt);
733 if (edge)
734 {
735 edge->set_call_stmt (new_stmt, update_speculative);
736 /* If UPDATE_SPECULATIVE is false, it means that we are turning
737 speculative call into a real code sequence. Update the
738 callgraph edges. */
739 if (edge->speculative && !update_speculative)
740 {
741 cgraph_edge *direct, *indirect;
742 ipa_ref *ref;
743
744 gcc_assert (!edge->indirect_unknown_callee);
745 edge->speculative_call_info (direct, indirect, ref);
746 direct->speculative = false;
747 indirect->speculative = false;
748 ref->speculative = false;
749 }
750 }
751 if (node->clones)
752 node = node->clones;
753 else if (node->next_sibling_clone)
754 node = node->next_sibling_clone;
755 else
756 {
757 while (node != this && !node->next_sibling_clone)
758 node = node->clone_of;
759 if (node != this)
760 node = node->next_sibling_clone;
761 }
762 }
763 }
764
765 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
766 same function body. If clones already have edge for OLD_STMT; only
767 update the edge same way as cgraph_set_call_stmt_including_clones does.
768
769 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
770 frequencies of the clones. */
771
772 void
773 cgraph_node::create_edge_including_clones (cgraph_node *callee,
774 gimple *old_stmt, gcall *stmt,
775 profile_count count,
776 int freq,
777 cgraph_inline_failed_t reason)
778 {
779 cgraph_node *node;
780 cgraph_edge *edge;
781
782 if (!get_edge (stmt))
783 {
784 edge = create_edge (callee, stmt, count, freq);
785 edge->inline_failed = reason;
786 }
787
788 node = clones;
789 if (node)
790 while (node != this)
791 /* Thunk clones do not get updated while copying inline function body. */
792 if (!node->thunk.thunk_p)
793 {
794 cgraph_edge *edge = node->get_edge (old_stmt);
795
796 /* It is possible that clones already contain the edge while
797 master didn't. Either we promoted indirect call into direct
798 call in the clone or we are processing clones of unreachable
799 master where edges has been removed. */
800 if (edge)
801 edge->set_call_stmt (stmt);
802 else if (! node->get_edge (stmt))
803 {
804 edge = node->create_edge (callee, stmt, count, freq);
805 edge->inline_failed = reason;
806 }
807
808 if (node->clones)
809 node = node->clones;
810 else if (node->next_sibling_clone)
811 node = node->next_sibling_clone;
812 else
813 {
814 while (node != this && !node->next_sibling_clone)
815 node = node->clone_of;
816 if (node != this)
817 node = node->next_sibling_clone;
818 }
819 }
820 }
821
822 /* Remove the node from cgraph and all inline clones inlined into it.
823 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
824 removed. This allows to call the function from outer loop walking clone
825 tree. */
826
827 bool
828 cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node)
829 {
830 cgraph_edge *e, *next;
831 bool found = false;
832
833 if (this == forbidden_node)
834 {
835 callers->remove ();
836 return true;
837 }
838 for (e = callees; e; e = next)
839 {
840 next = e->next_callee;
841 if (!e->inline_failed)
842 found |= e->callee->remove_symbol_and_inline_clones (forbidden_node);
843 }
844 remove ();
845 return found;
846 }
847
848 /* The edges representing the callers of the NEW_VERSION node were
849 fixed by cgraph_function_versioning (), now the call_expr in their
850 respective tree code should be updated to call the NEW_VERSION. */
851
852 static void
853 update_call_expr (cgraph_node *new_version)
854 {
855 cgraph_edge *e;
856
857 gcc_assert (new_version);
858
859 /* Update the call expr on the edges to call the new version. */
860 for (e = new_version->callers; e; e = e->next_caller)
861 {
862 function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
863 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
864 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
865 }
866 }
867
868
869 /* Create a new cgraph node which is the new version of
870 callgraph node. REDIRECT_CALLERS holds the callers
871 edges which should be redirected to point to
872 NEW_VERSION. ALL the callees edges of the node
873 are cloned to the new version node. Return the new
874 version node.
875
876 If non-NULL BLOCK_TO_COPY determine what basic blocks
877 was copied to prevent duplications of calls that are dead
878 in the clone. */
879
880 cgraph_node *
881 cgraph_node::create_version_clone (tree new_decl,
882 vec<cgraph_edge *> redirect_callers,
883 bitmap bbs_to_copy,
884 const char *suffix)
885 {
886 cgraph_node *new_version;
887 cgraph_edge *e;
888 unsigned i;
889
890 new_version = cgraph_node::create (new_decl);
891
892 new_version->analyzed = analyzed;
893 new_version->definition = definition;
894 new_version->local = local;
895 new_version->externally_visible = false;
896 new_version->no_reorder = no_reorder;
897 new_version->local.local = new_version->definition;
898 new_version->global = global;
899 new_version->rtl = rtl;
900 new_version->count = count;
901
902 for (e = callees; e; e=e->next_callee)
903 if (!bbs_to_copy
904 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
905 e->clone (new_version, e->call_stmt,
906 e->lto_stmt_uid, count, count,
907 CGRAPH_FREQ_BASE,
908 true);
909 for (e = indirect_calls; e; e=e->next_callee)
910 if (!bbs_to_copy
911 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
912 e->clone (new_version, e->call_stmt,
913 e->lto_stmt_uid, count, count,
914 CGRAPH_FREQ_BASE,
915 true);
916 FOR_EACH_VEC_ELT (redirect_callers, i, e)
917 {
918 /* Redirect calls to the old version node to point to its new
919 version. */
920 e->redirect_callee (new_version);
921 }
922
923 symtab->call_cgraph_duplication_hooks (this, new_version);
924
925 dump_callgraph_transformation (this, new_version, suffix);
926
927 return new_version;
928 }
929
930 /* Perform function versioning.
931 Function versioning includes copying of the tree and
932 a callgraph update (creating a new cgraph node and updating
933 its callees and callers).
934
935 REDIRECT_CALLERS varray includes the edges to be redirected
936 to the new version.
937
938 TREE_MAP is a mapping of tree nodes we want to replace with
939 new ones (according to results of prior analysis).
940
941 If non-NULL ARGS_TO_SKIP determine function parameters to remove
942 from new version.
943 If SKIP_RETURN is true, the new version will return void.
944 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
945 If non_NULL NEW_ENTRY determine new entry BB of the clone.
946
947 Return the new version's cgraph node. */
948
949 cgraph_node *
950 cgraph_node::create_version_clone_with_body
951 (vec<cgraph_edge *> redirect_callers,
952 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
953 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
954 const char *suffix)
955 {
956 tree old_decl = decl;
957 cgraph_node *new_version_node = NULL;
958 tree new_decl;
959
960 if (!tree_versionable_function_p (old_decl))
961 return NULL;
962
963 gcc_assert (local.can_change_signature || !args_to_skip);
964
965 /* Make a new FUNCTION_DECL tree node for the new version. */
966 if (!args_to_skip && !skip_return)
967 new_decl = copy_node (old_decl);
968 else
969 new_decl
970 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
971
972 /* Generate a new name for the new version. */
973 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
974 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
975 SET_DECL_RTL (new_decl, NULL);
976
977 /* When the old decl was a con-/destructor make sure the clone isn't. */
978 DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
979 DECL_STATIC_DESTRUCTOR (new_decl) = 0;
980
981 /* Create the new version's call-graph node.
982 and update the edges of the new node. */
983 new_version_node = create_version_clone (new_decl, redirect_callers,
984 bbs_to_copy, suffix);
985
986 if (ipa_transforms_to_apply.exists ())
987 new_version_node->ipa_transforms_to_apply
988 = ipa_transforms_to_apply.copy ();
989 /* Copy the OLD_VERSION_NODE function tree to the new version. */
990 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
991 skip_return, bbs_to_copy, new_entry_block);
992
993 /* Update the new version's properties.
994 Make The new version visible only within this translation unit. Make sure
995 that is not weak also.
996 ??? We cannot use COMDAT linkage because there is no
997 ABI support for this. */
998 new_version_node->make_decl_local ();
999 DECL_VIRTUAL_P (new_version_node->decl) = 0;
1000 new_version_node->externally_visible = 0;
1001 new_version_node->local.local = 1;
1002 new_version_node->lowered = true;
1003 if (!implicit_section)
1004 new_version_node->set_section (get_section ());
1005 /* Clones of global symbols or symbols with unique names are unique. */
1006 if ((TREE_PUBLIC (old_decl)
1007 && !DECL_EXTERNAL (old_decl)
1008 && !DECL_WEAK (old_decl)
1009 && !DECL_COMDAT (old_decl))
1010 || in_lto_p)
1011 new_version_node->unique_name = true;
1012
1013 /* Update the call_expr on the edges to call the new version node. */
1014 update_call_expr (new_version_node);
1015
1016 symtab->call_cgraph_insertion_hooks (this);
1017 return new_version_node;
1018 }
1019
1020 /* Given virtual clone, turn it into actual clone. */
1021
1022 static void
1023 cgraph_materialize_clone (cgraph_node *node)
1024 {
1025 bitmap_obstack_initialize (NULL);
1026 node->former_clone_of = node->clone_of->decl;
1027 if (node->clone_of->former_clone_of)
1028 node->former_clone_of = node->clone_of->former_clone_of;
1029 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1030 tree_function_versioning (node->clone_of->decl, node->decl,
1031 node->clone.tree_map, true,
1032 node->clone.args_to_skip, false,
1033 NULL, NULL);
1034 if (symtab->dump_file)
1035 {
1036 dump_function_to_file (node->clone_of->decl, symtab->dump_file,
1037 dump_flags);
1038 dump_function_to_file (node->decl, symtab->dump_file, dump_flags);
1039 }
1040
1041 /* Function is no longer clone. */
1042 if (node->next_sibling_clone)
1043 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1044 if (node->prev_sibling_clone)
1045 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1046 else
1047 node->clone_of->clones = node->next_sibling_clone;
1048 node->next_sibling_clone = NULL;
1049 node->prev_sibling_clone = NULL;
1050 if (!node->clone_of->analyzed && !node->clone_of->clones)
1051 {
1052 node->clone_of->release_body ();
1053 node->clone_of->remove_callees ();
1054 node->clone_of->remove_all_references ();
1055 }
1056 node->clone_of = NULL;
1057 bitmap_obstack_release (NULL);
1058 }
1059
1060 /* Once all functions from compilation unit are in memory, produce all clones
1061 and update all calls. We might also do this on demand if we don't want to
1062 bring all functions to memory prior compilation, but current WHOPR
1063 implementation does that and it is a bit easier to keep everything right in
1064 this order. */
1065
1066 void
1067 symbol_table::materialize_all_clones (void)
1068 {
1069 cgraph_node *node;
1070 bool stabilized = false;
1071
1072
1073 if (symtab->dump_file)
1074 fprintf (symtab->dump_file, "Materializing clones\n");
1075
1076 cgraph_node::checking_verify_cgraph_nodes ();
1077
1078 /* We can also do topological order, but number of iterations should be
1079 bounded by number of IPA passes since single IPA pass is probably not
1080 going to create clones of clones it created itself. */
1081 while (!stabilized)
1082 {
1083 stabilized = true;
1084 FOR_EACH_FUNCTION (node)
1085 {
1086 if (node->clone_of && node->decl != node->clone_of->decl
1087 && !gimple_has_body_p (node->decl))
1088 {
1089 if (!node->clone_of->clone_of)
1090 node->clone_of->get_untransformed_body ();
1091 if (gimple_has_body_p (node->clone_of->decl))
1092 {
1093 if (symtab->dump_file)
1094 {
1095 fprintf (symtab->dump_file, "cloning %s to %s\n",
1096 xstrdup_for_dump (node->clone_of->name ()),
1097 xstrdup_for_dump (node->name ()));
1098 if (node->clone.tree_map)
1099 {
1100 unsigned int i;
1101 fprintf (symtab->dump_file, " replace map: ");
1102 for (i = 0;
1103 i < vec_safe_length (node->clone.tree_map);
1104 i++)
1105 {
1106 ipa_replace_map *replace_info;
1107 replace_info = (*node->clone.tree_map)[i];
1108 print_generic_expr (symtab->dump_file,
1109 replace_info->old_tree);
1110 fprintf (symtab->dump_file, " -> ");
1111 print_generic_expr (symtab->dump_file,
1112 replace_info->new_tree);
1113 fprintf (symtab->dump_file, "%s%s;",
1114 replace_info->replace_p ? "(replace)":"",
1115 replace_info->ref_p ? "(ref)":"");
1116 }
1117 fprintf (symtab->dump_file, "\n");
1118 }
1119 if (node->clone.args_to_skip)
1120 {
1121 fprintf (symtab->dump_file, " args_to_skip: ");
1122 dump_bitmap (symtab->dump_file,
1123 node->clone.args_to_skip);
1124 }
1125 if (node->clone.args_to_skip)
1126 {
1127 fprintf (symtab->dump_file, " combined_args_to_skip:");
1128 dump_bitmap (symtab->dump_file, node->clone.combined_args_to_skip);
1129 }
1130 }
1131 cgraph_materialize_clone (node);
1132 stabilized = false;
1133 }
1134 }
1135 }
1136 }
1137 FOR_EACH_FUNCTION (node)
1138 if (!node->analyzed && node->callees)
1139 {
1140 node->remove_callees ();
1141 node->remove_all_references ();
1142 }
1143 else
1144 node->clear_stmts_in_references ();
1145 if (symtab->dump_file)
1146 fprintf (symtab->dump_file, "Materialization Call site updates done.\n");
1147
1148 cgraph_node::checking_verify_cgraph_nodes ();
1149
1150 symtab->remove_unreachable_nodes (symtab->dump_file);
1151 }
1152
1153 #include "gt-cgraphclones.h"