comparison gcc/cgraphunit.c @ 0:a06113de4d67

first commit
author kent <kent@cr.ie.u-ryukyu.ac.jp>
date Fri, 17 Jul 2009 14:47:48 +0900
parents
children 855418dad1a3
comparison
equal deleted inserted replaced
-1:000000000000 0:a06113de4d67
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "timevar.h"
127 #include "params.h"
128 #include "fibheap.h"
129 #include "c-common.h"
130 #include "intl.h"
131 #include "function.h"
132 #include "ipa-prop.h"
133 #include "gimple.h"
134 #include "tree-iterator.h"
135 #include "tree-pass.h"
136 #include "output.h"
137
138 static void cgraph_expand_all_functions (void);
139 static void cgraph_mark_functions_to_output (void);
140 static void cgraph_expand_function (struct cgraph_node *);
141 static void cgraph_output_pending_asms (void);
142
143 static FILE *cgraph_dump_file;
144
145 /* A vector of FUNCTION_DECLs declared as static constructors. */
146 static GTY (()) VEC(tree, gc) *static_ctors;
147 /* A vector of FUNCTION_DECLs declared as static destructors. */
148 static GTY (()) VEC(tree, gc) *static_dtors;
149
150 /* When target does not have ctors and dtors, we call all constructor
151 and destructor by special initialization/destruction function
152 recognized by collect2.
153
154 When we are going to build this function, collect all constructors and
155 destructors and turn them into normal functions. */
156
157 static void
158 record_cdtor_fn (tree fndecl)
159 {
160 struct cgraph_node *node;
161 if (targetm.have_ctors_dtors
162 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
163 && !DECL_STATIC_DESTRUCTOR (fndecl)))
164 return;
165
166 if (DECL_STATIC_CONSTRUCTOR (fndecl))
167 {
168 VEC_safe_push (tree, gc, static_ctors, fndecl);
169 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
170 }
171 if (DECL_STATIC_DESTRUCTOR (fndecl))
172 {
173 VEC_safe_push (tree, gc, static_dtors, fndecl);
174 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
175 }
176 node = cgraph_node (fndecl);
177 node->local.disregard_inline_limits = 1;
178 cgraph_mark_reachable_node (node);
179 }
180
181 /* Define global constructors/destructor functions for the CDTORS, of
182 which they are LEN. The CDTORS are sorted by initialization
183 priority. If CTOR_P is true, these are constructors; otherwise,
184 they are destructors. */
185
186 static void
187 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
188 {
189 size_t i;
190
191 i = 0;
192 while (i < len)
193 {
194 tree body;
195 tree fn;
196 priority_type priority;
197
198 priority = 0;
199 body = NULL_TREE;
200 /* Find the next batch of constructors/destructors with the same
201 initialization priority. */
202 do
203 {
204 priority_type p;
205 fn = cdtors[i];
206 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
207 if (!body)
208 priority = p;
209 else if (p != priority)
210 break;
211 append_to_statement_list (build_function_call_expr (fn, 0),
212 &body);
213 ++i;
214 }
215 while (i < len);
216 gcc_assert (body != NULL_TREE);
217 /* Generate a function to call all the function of like
218 priority. */
219 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
220 }
221 }
222
223 /* Comparison function for qsort. P1 and P2 are actually of type
224 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
225 used to determine the sort order. */
226
227 static int
228 compare_ctor (const void *p1, const void *p2)
229 {
230 tree f1;
231 tree f2;
232 int priority1;
233 int priority2;
234
235 f1 = *(const tree *)p1;
236 f2 = *(const tree *)p2;
237 priority1 = DECL_INIT_PRIORITY (f1);
238 priority2 = DECL_INIT_PRIORITY (f2);
239
240 if (priority1 < priority2)
241 return -1;
242 else if (priority1 > priority2)
243 return 1;
244 else
245 /* Ensure a stable sort. */
246 return (const tree *)p1 - (const tree *)p2;
247 }
248
249 /* Comparison function for qsort. P1 and P2 are actually of type
250 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
251 used to determine the sort order. */
252
253 static int
254 compare_dtor (const void *p1, const void *p2)
255 {
256 tree f1;
257 tree f2;
258 int priority1;
259 int priority2;
260
261 f1 = *(const tree *)p1;
262 f2 = *(const tree *)p2;
263 priority1 = DECL_FINI_PRIORITY (f1);
264 priority2 = DECL_FINI_PRIORITY (f2);
265
266 if (priority1 < priority2)
267 return -1;
268 else if (priority1 > priority2)
269 return 1;
270 else
271 /* Ensure a stable sort. */
272 return (const tree *)p1 - (const tree *)p2;
273 }
274
275 /* Generate functions to call static constructors and destructors
276 for targets that do not support .ctors/.dtors sections. These
277 functions have magic names which are detected by collect2. */
278
279 static void
280 cgraph_build_cdtor_fns (void)
281 {
282 if (!VEC_empty (tree, static_ctors))
283 {
284 gcc_assert (!targetm.have_ctors_dtors);
285 qsort (VEC_address (tree, static_ctors),
286 VEC_length (tree, static_ctors),
287 sizeof (tree),
288 compare_ctor);
289 build_cdtor (/*ctor_p=*/true,
290 VEC_address (tree, static_ctors),
291 VEC_length (tree, static_ctors));
292 VEC_truncate (tree, static_ctors, 0);
293 }
294
295 if (!VEC_empty (tree, static_dtors))
296 {
297 gcc_assert (!targetm.have_ctors_dtors);
298 qsort (VEC_address (tree, static_dtors),
299 VEC_length (tree, static_dtors),
300 sizeof (tree),
301 compare_dtor);
302 build_cdtor (/*ctor_p=*/false,
303 VEC_address (tree, static_dtors),
304 VEC_length (tree, static_dtors));
305 VEC_truncate (tree, static_dtors, 0);
306 }
307 }
308
309 /* Determine if function DECL is needed. That is, visible to something
310 either outside this translation unit, something magic in the system
311 configury. */
312
313 static bool
314 decide_is_function_needed (struct cgraph_node *node, tree decl)
315 {
316 if (MAIN_NAME_P (DECL_NAME (decl))
317 && TREE_PUBLIC (decl))
318 {
319 node->local.externally_visible = true;
320 return true;
321 }
322
323 /* If the user told us it is used, then it must be so. */
324 if (node->local.externally_visible)
325 return true;
326
327 /* ??? If the assembler name is set by hand, it is possible to assemble
328 the name later after finalizing the function and the fact is noticed
329 in assemble_name then. This is arguably a bug. */
330 if (DECL_ASSEMBLER_NAME_SET_P (decl)
331 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
332 return true;
333
334 /* With -fkeep-inline-functions we are keeping all inline functions except
335 for extern inline ones. */
336 if (flag_keep_inline_functions
337 && DECL_DECLARED_INLINE_P (decl)
338 && !DECL_EXTERNAL (decl)
339 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
340 return true;
341
342 /* If we decided it was needed before, but at the time we didn't have
343 the body of the function available, then it's still needed. We have
344 to go back and re-check its dependencies now. */
345 if (node->needed)
346 return true;
347
348 /* Externally visible functions must be output. The exception is
349 COMDAT functions that must be output only when they are needed.
350
351 When not optimizing, also output the static functions. (see
352 PR24561), but don't do so for always_inline functions, functions
353 declared inline and nested functions. These was optimized out
354 in the original implementation and it is unclear whether we want
355 to change the behavior here. */
356 if (((TREE_PUBLIC (decl)
357 || (!optimize && !node->local.disregard_inline_limits
358 && !DECL_DECLARED_INLINE_P (decl)
359 && !node->origin))
360 && !flag_whole_program)
361 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
362 return true;
363
364 /* Constructors and destructors are reachable from the runtime by
365 some mechanism. */
366 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
367 return true;
368
369 return false;
370 }
371
372 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
373 functions into callgraph in a way so they look like ordinary reachable
374 functions inserted into callgraph already at construction time. */
375
376 bool
377 cgraph_process_new_functions (void)
378 {
379 bool output = false;
380 tree fndecl;
381 struct cgraph_node *node;
382
383 /* Note that this queue may grow as its being processed, as the new
384 functions may generate new ones. */
385 while (cgraph_new_nodes)
386 {
387 node = cgraph_new_nodes;
388 fndecl = node->decl;
389 cgraph_new_nodes = cgraph_new_nodes->next_needed;
390 switch (cgraph_state)
391 {
392 case CGRAPH_STATE_CONSTRUCTION:
393 /* At construction time we just need to finalize function and move
394 it into reachable functions list. */
395
396 node->next_needed = NULL;
397 cgraph_finalize_function (fndecl, false);
398 cgraph_mark_reachable_node (node);
399 output = true;
400 break;
401
402 case CGRAPH_STATE_IPA:
403 case CGRAPH_STATE_IPA_SSA:
404 /* When IPA optimization already started, do all essential
405 transformations that has been already performed on the whole
406 cgraph but not on this function. */
407
408 gimple_register_cfg_hooks ();
409 if (!node->analyzed)
410 cgraph_analyze_function (node);
411 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
412 current_function_decl = fndecl;
413 compute_inline_parameters (node);
414 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
415 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
416 /* When not optimizing, be sure we run early local passes anyway
417 to expand OMP. */
418 || !optimize)
419 execute_pass_list (pass_early_local_passes.pass.sub);
420 free_dominance_info (CDI_POST_DOMINATORS);
421 free_dominance_info (CDI_DOMINATORS);
422 pop_cfun ();
423 current_function_decl = NULL;
424 break;
425
426 case CGRAPH_STATE_EXPANSION:
427 /* Functions created during expansion shall be compiled
428 directly. */
429 node->output = 0;
430 cgraph_expand_function (node);
431 break;
432
433 default:
434 gcc_unreachable ();
435 break;
436 }
437 cgraph_call_function_insertion_hooks (node);
438 }
439 return output;
440 }
441
442 /* As an GCC extension we allow redefinition of the function. The
443 semantics when both copies of bodies differ is not well defined.
444 We replace the old body with new body so in unit at a time mode
445 we always use new body, while in normal mode we may end up with
446 old body inlined into some functions and new body expanded and
447 inlined in others.
448
449 ??? It may make more sense to use one body for inlining and other
450 body for expanding the function but this is difficult to do. */
451
452 static void
453 cgraph_reset_node (struct cgraph_node *node)
454 {
455 /* If node->output is set, then we have already begun whole-unit analysis.
456 This is *not* testing for whether we've already emitted the function.
457 That case can be sort-of legitimately seen with real function redefinition
458 errors. I would argue that the front end should never present us with
459 such a case, but don't enforce that for now. */
460 gcc_assert (!node->output);
461
462 /* Reset our data structures so we can analyze the function again. */
463 memset (&node->local, 0, sizeof (node->local));
464 memset (&node->global, 0, sizeof (node->global));
465 memset (&node->rtl, 0, sizeof (node->rtl));
466 node->analyzed = false;
467 node->local.redefined_extern_inline = true;
468 node->local.finalized = false;
469
470 cgraph_node_remove_callees (node);
471
472 /* We may need to re-queue the node for assembling in case
473 we already proceeded it and ignored as not needed or got
474 a re-declaration in IMA mode. */
475 if (node->reachable)
476 {
477 struct cgraph_node *n;
478
479 for (n = cgraph_nodes_queue; n; n = n->next_needed)
480 if (n == node)
481 break;
482 if (!n)
483 node->reachable = 0;
484 }
485 }
486
487 static void
488 cgraph_lower_function (struct cgraph_node *node)
489 {
490 if (node->lowered)
491 return;
492 tree_lowering_passes (node->decl);
493 node->lowered = true;
494 }
495
496 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
497 logic in effect. If NESTED is true, then our caller cannot stand to have
498 the garbage collector run at the moment. We would need to either create
499 a new GC context, or just not compile right now. */
500
501 void
502 cgraph_finalize_function (tree decl, bool nested)
503 {
504 struct cgraph_node *node = cgraph_node (decl);
505
506 if (node->local.finalized)
507 cgraph_reset_node (node);
508
509 node->pid = cgraph_max_pid ++;
510 notice_global_symbol (decl);
511 node->local.finalized = true;
512 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
513 record_cdtor_fn (node->decl);
514 if (node->nested)
515 lower_nested_functions (decl);
516 gcc_assert (!node->nested);
517
518 if (decide_is_function_needed (node, decl))
519 cgraph_mark_needed_node (node);
520
521 /* Since we reclaim unreachable nodes at the end of every language
522 level unit, we need to be conservative about possible entry points
523 there. */
524 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
525 cgraph_mark_reachable_node (node);
526
527 /* If we've not yet emitted decl, tell the debug info about it. */
528 if (!TREE_ASM_WRITTEN (decl))
529 (*debug_hooks->deferred_inline_function) (decl);
530
531 /* Possibly warn about unused parameters. */
532 if (warn_unused_parameter)
533 do_warn_unused_parameter (decl);
534
535 if (!nested)
536 ggc_collect ();
537 }
538
539 /* C99 extern inline keywords allow changing of declaration after function
540 has been finalized. We need to re-decide if we want to mark the function as
541 needed then. */
542
543 void
544 cgraph_mark_if_needed (tree decl)
545 {
546 struct cgraph_node *node = cgraph_node (decl);
547 if (node->local.finalized && decide_is_function_needed (node, decl))
548 cgraph_mark_needed_node (node);
549 }
550
551 /* Verify cgraph nodes of given cgraph node. */
552 void
553 verify_cgraph_node (struct cgraph_node *node)
554 {
555 struct cgraph_edge *e;
556 struct cgraph_node *main_clone;
557 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
558 struct function *saved_cfun = cfun;
559 basic_block this_block;
560 gimple_stmt_iterator gsi;
561 bool error_found = false;
562
563 if (errorcount || sorrycount)
564 return;
565
566 timevar_push (TV_CGRAPH_VERIFY);
567 /* debug_generic_stmt needs correct cfun */
568 set_cfun (this_cfun);
569 for (e = node->callees; e; e = e->next_callee)
570 if (e->aux)
571 {
572 error ("aux field set for edge %s->%s",
573 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
574 error_found = true;
575 }
576 if (node->count < 0)
577 {
578 error ("Execution count is negative");
579 error_found = true;
580 }
581 for (e = node->callers; e; e = e->next_caller)
582 {
583 if (e->count < 0)
584 {
585 error ("caller edge count is negative");
586 error_found = true;
587 }
588 if (e->frequency < 0)
589 {
590 error ("caller edge frequency is negative");
591 error_found = true;
592 }
593 if (e->frequency > CGRAPH_FREQ_MAX)
594 {
595 error ("caller edge frequency is too large");
596 error_found = true;
597 }
598 if (!e->inline_failed)
599 {
600 if (node->global.inlined_to
601 != (e->caller->global.inlined_to
602 ? e->caller->global.inlined_to : e->caller))
603 {
604 error ("inlined_to pointer is wrong");
605 error_found = true;
606 }
607 if (node->callers->next_caller)
608 {
609 error ("multiple inline callers");
610 error_found = true;
611 }
612 }
613 else
614 if (node->global.inlined_to)
615 {
616 error ("inlined_to pointer set for noninline callers");
617 error_found = true;
618 }
619 }
620 if (!node->callers && node->global.inlined_to)
621 {
622 error ("inlined_to pointer is set but no predecessors found");
623 error_found = true;
624 }
625 if (node->global.inlined_to == node)
626 {
627 error ("inlined_to pointer refers to itself");
628 error_found = true;
629 }
630
631 for (main_clone = cgraph_node (node->decl); main_clone;
632 main_clone = main_clone->next_clone)
633 if (main_clone == node)
634 break;
635 if (!cgraph_node (node->decl))
636 {
637 error ("node not found in cgraph_hash");
638 error_found = true;
639 }
640
641 if (node->analyzed
642 && !TREE_ASM_WRITTEN (node->decl)
643 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
644 {
645 if (this_cfun->cfg)
646 {
647 /* The nodes we're interested in are never shared, so walk
648 the tree ignoring duplicates. */
649 struct pointer_set_t *visited_nodes = pointer_set_create ();
650 /* Reach the trees by walking over the CFG, and note the
651 enclosing basic-blocks in the call edges. */
652 FOR_EACH_BB_FN (this_block, this_cfun)
653 for (gsi = gsi_start_bb (this_block);
654 !gsi_end_p (gsi);
655 gsi_next (&gsi))
656 {
657 gimple stmt = gsi_stmt (gsi);
658 tree decl;
659 if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
660 {
661 struct cgraph_edge *e = cgraph_edge (node, stmt);
662 if (e)
663 {
664 if (e->aux)
665 {
666 error ("shared call_stmt:");
667 debug_gimple_stmt (stmt);
668 error_found = true;
669 }
670 if (e->callee->decl != cgraph_node (decl)->decl
671 && e->inline_failed)
672 {
673 error ("edge points to wrong declaration:");
674 debug_tree (e->callee->decl);
675 fprintf (stderr," Instead of:");
676 debug_tree (decl);
677 }
678 e->aux = (void *)1;
679 }
680 else
681 {
682 error ("missing callgraph edge for call stmt:");
683 debug_gimple_stmt (stmt);
684 error_found = true;
685 }
686 }
687 }
688 pointer_set_destroy (visited_nodes);
689 }
690 else
691 /* No CFG available?! */
692 gcc_unreachable ();
693
694 for (e = node->callees; e; e = e->next_callee)
695 {
696 if (!e->aux && !e->indirect_call)
697 {
698 error ("edge %s->%s has no corresponding call_stmt",
699 cgraph_node_name (e->caller),
700 cgraph_node_name (e->callee));
701 debug_gimple_stmt (e->call_stmt);
702 error_found = true;
703 }
704 e->aux = 0;
705 }
706 }
707 if (error_found)
708 {
709 dump_cgraph_node (stderr, node);
710 internal_error ("verify_cgraph_node failed");
711 }
712 set_cfun (saved_cfun);
713 timevar_pop (TV_CGRAPH_VERIFY);
714 }
715
716 /* Verify whole cgraph structure. */
717 void
718 verify_cgraph (void)
719 {
720 struct cgraph_node *node;
721
722 if (sorrycount || errorcount)
723 return;
724
725 for (node = cgraph_nodes; node; node = node->next)
726 verify_cgraph_node (node);
727 }
728
729 /* Output all asm statements we have stored up to be output. */
730
731 static void
732 cgraph_output_pending_asms (void)
733 {
734 struct cgraph_asm_node *can;
735
736 if (errorcount || sorrycount)
737 return;
738
739 for (can = cgraph_asm_nodes; can; can = can->next)
740 assemble_asm (can->asm_str);
741 cgraph_asm_nodes = NULL;
742 }
743
744 /* Analyze the function scheduled to be output. */
745 void
746 cgraph_analyze_function (struct cgraph_node *node)
747 {
748 tree decl = node->decl;
749
750 current_function_decl = decl;
751 push_cfun (DECL_STRUCT_FUNCTION (decl));
752 cgraph_lower_function (node);
753 node->analyzed = true;
754
755 pop_cfun ();
756 current_function_decl = NULL;
757 }
758
759 /* Look for externally_visible and used attributes and mark cgraph nodes
760 accordingly.
761
762 We cannot mark the nodes at the point the attributes are processed (in
763 handle_*_attribute) because the copy of the declarations available at that
764 point may not be canonical. For example, in:
765
766 void f();
767 void f() __attribute__((used));
768
769 the declaration we see in handle_used_attribute will be the second
770 declaration -- but the front end will subsequently merge that declaration
771 with the original declaration and discard the second declaration.
772
773 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
774
775 void f() {}
776 void f() __attribute__((externally_visible));
777
778 is valid.
779
780 So, we walk the nodes at the end of the translation unit, applying the
781 attributes at that point. */
782
783 static void
784 process_function_and_variable_attributes (struct cgraph_node *first,
785 struct varpool_node *first_var)
786 {
787 struct cgraph_node *node;
788 struct varpool_node *vnode;
789
790 for (node = cgraph_nodes; node != first; node = node->next)
791 {
792 tree decl = node->decl;
793 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
794 {
795 mark_decl_referenced (decl);
796 if (node->local.finalized)
797 cgraph_mark_needed_node (node);
798 }
799 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
800 {
801 if (! TREE_PUBLIC (node->decl))
802 warning (OPT_Wattributes,
803 "%J%<externally_visible%> attribute have effect only on public objects",
804 node->decl);
805 else
806 {
807 if (node->local.finalized)
808 cgraph_mark_needed_node (node);
809 node->local.externally_visible = true;
810 }
811 }
812 }
813 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
814 {
815 tree decl = vnode->decl;
816 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
817 {
818 mark_decl_referenced (decl);
819 if (vnode->finalized)
820 varpool_mark_needed_node (vnode);
821 }
822 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
823 {
824 if (! TREE_PUBLIC (vnode->decl))
825 warning (OPT_Wattributes,
826 "%J%<externally_visible%> attribute have effect only on public objects",
827 vnode->decl);
828 else
829 {
830 if (vnode->finalized)
831 varpool_mark_needed_node (vnode);
832 vnode->externally_visible = true;
833 }
834 }
835 }
836 }
837
838 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
839 each reachable functions) and build cgraph.
840 The function can be called multiple times after inserting new nodes
841 into beginning of queue. Just the new part of queue is re-scanned then. */
842
843 static void
844 cgraph_analyze_functions (void)
845 {
846 /* Keep track of already processed nodes when called multiple times for
847 intermodule optimization. */
848 static struct cgraph_node *first_analyzed;
849 struct cgraph_node *first_processed = first_analyzed;
850 static struct varpool_node *first_analyzed_var;
851 struct cgraph_node *node, *next;
852
853 process_function_and_variable_attributes (first_processed,
854 first_analyzed_var);
855 first_processed = cgraph_nodes;
856 first_analyzed_var = varpool_nodes;
857 varpool_analyze_pending_decls ();
858 if (cgraph_dump_file)
859 {
860 fprintf (cgraph_dump_file, "Initial entry points:");
861 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
862 if (node->needed)
863 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
864 fprintf (cgraph_dump_file, "\n");
865 }
866 cgraph_process_new_functions ();
867
868 /* Propagate reachability flag and lower representation of all reachable
869 functions. In the future, lowering will introduce new functions and
870 new entry points on the way (by template instantiation and virtual
871 method table generation for instance). */
872 while (cgraph_nodes_queue)
873 {
874 struct cgraph_edge *edge;
875 tree decl = cgraph_nodes_queue->decl;
876
877 node = cgraph_nodes_queue;
878 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
879 node->next_needed = NULL;
880
881 /* ??? It is possible to create extern inline function and later using
882 weak alias attribute to kill its body. See
883 gcc.c-torture/compile/20011119-1.c */
884 if (!DECL_STRUCT_FUNCTION (decl))
885 {
886 cgraph_reset_node (node);
887 continue;
888 }
889
890 gcc_assert (!node->analyzed && node->reachable);
891 gcc_assert (gimple_body (decl));
892
893 cgraph_analyze_function (node);
894
895 for (edge = node->callees; edge; edge = edge->next_callee)
896 if (!edge->callee->reachable)
897 cgraph_mark_reachable_node (edge->callee);
898
899 /* If decl is a clone of an abstract function, mark that abstract
900 function so that we don't release its body. The DECL_INITIAL() of that
901 abstract function declaration will be later needed to output debug info. */
902 if (DECL_ABSTRACT_ORIGIN (decl))
903 {
904 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
905 origin_node->abstract_and_needed = true;
906 }
907
908 /* We finalize local static variables during constructing callgraph
909 edges. Process their attributes too. */
910 process_function_and_variable_attributes (first_processed,
911 first_analyzed_var);
912 first_processed = cgraph_nodes;
913 first_analyzed_var = varpool_nodes;
914 varpool_analyze_pending_decls ();
915 cgraph_process_new_functions ();
916 }
917
918 /* Collect entry points to the unit. */
919 if (cgraph_dump_file)
920 {
921 fprintf (cgraph_dump_file, "Unit entry points:");
922 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
923 if (node->needed)
924 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
925 fprintf (cgraph_dump_file, "\n\nInitial ");
926 dump_cgraph (cgraph_dump_file);
927 }
928
929 if (cgraph_dump_file)
930 fprintf (cgraph_dump_file, "\nReclaiming functions:");
931
932 for (node = cgraph_nodes; node != first_analyzed; node = next)
933 {
934 tree decl = node->decl;
935 next = node->next;
936
937 if (node->local.finalized && !gimple_has_body_p (decl))
938 cgraph_reset_node (node);
939
940 if (!node->reachable && gimple_has_body_p (decl))
941 {
942 if (cgraph_dump_file)
943 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
944 cgraph_remove_node (node);
945 continue;
946 }
947 else
948 node->next_needed = NULL;
949 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
950 gcc_assert (node->analyzed == node->local.finalized);
951 }
952 if (cgraph_dump_file)
953 {
954 fprintf (cgraph_dump_file, "\n\nReclaimed ");
955 dump_cgraph (cgraph_dump_file);
956 }
957 first_analyzed = cgraph_nodes;
958 ggc_collect ();
959 }
960
961 /* Analyze the whole compilation unit once it is parsed completely. */
962
963 void
964 cgraph_finalize_compilation_unit (void)
965 {
966 if (errorcount || sorrycount)
967 return;
968
969 finish_aliases_1 ();
970
971 if (!quiet_flag)
972 {
973 fprintf (stderr, "\nAnalyzing compilation unit\n");
974 fflush (stderr);
975 }
976
977 timevar_push (TV_CGRAPH);
978 cgraph_analyze_functions ();
979 timevar_pop (TV_CGRAPH);
980 }
981 /* Figure out what functions we want to assemble. */
982
983 static void
984 cgraph_mark_functions_to_output (void)
985 {
986 struct cgraph_node *node;
987
988 for (node = cgraph_nodes; node; node = node->next)
989 {
990 tree decl = node->decl;
991 struct cgraph_edge *e;
992
993 gcc_assert (!node->output);
994
995 for (e = node->callers; e; e = e->next_caller)
996 if (e->inline_failed)
997 break;
998
999 /* We need to output all local functions that are used and not
1000 always inlined, as well as those that are reachable from
1001 outside the current compilation unit. */
1002 if (node->analyzed
1003 && !node->global.inlined_to
1004 && (node->needed
1005 || (e && node->reachable))
1006 && !TREE_ASM_WRITTEN (decl)
1007 && !DECL_EXTERNAL (decl))
1008 node->output = 1;
1009 else
1010 {
1011 /* We should've reclaimed all functions that are not needed. */
1012 #ifdef ENABLE_CHECKING
1013 if (!node->global.inlined_to
1014 && gimple_has_body_p (decl)
1015 && !DECL_EXTERNAL (decl))
1016 {
1017 dump_cgraph_node (stderr, node);
1018 internal_error ("failed to reclaim unneeded function");
1019 }
1020 #endif
1021 gcc_assert (node->global.inlined_to
1022 || !gimple_has_body_p (decl)
1023 || DECL_EXTERNAL (decl));
1024
1025 }
1026
1027 }
1028 }
1029
1030 /* Expand function specified by NODE. */
1031
1032 static void
1033 cgraph_expand_function (struct cgraph_node *node)
1034 {
1035 tree decl = node->decl;
1036
1037 /* We ought to not compile any inline clones. */
1038 gcc_assert (!node->global.inlined_to);
1039
1040 announce_function (decl);
1041
1042 gcc_assert (node->lowered);
1043
1044 /* Generate RTL for the body of DECL. */
1045 if (lang_hooks.callgraph.emit_associated_thunks)
1046 lang_hooks.callgraph.emit_associated_thunks (decl);
1047 tree_rest_of_compilation (decl);
1048
1049 /* Make sure that BE didn't give up on compiling. */
1050 gcc_assert (TREE_ASM_WRITTEN (decl));
1051 current_function_decl = NULL;
1052 gcc_assert (!cgraph_preserve_function_body_p (decl));
1053 cgraph_release_function_body (node);
1054 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1055 points to the dead function body. */
1056 cgraph_node_remove_callees (node);
1057
1058 cgraph_function_flags_ready = true;
1059 }
1060
1061 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1062
1063 bool
1064 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1065 {
1066 *reason = e->inline_failed;
1067 return !e->inline_failed;
1068 }
1069
1070
1071
1072 /* Expand all functions that must be output.
1073
1074 Attempt to topologically sort the nodes so function is output when
1075 all called functions are already assembled to allow data to be
1076 propagated across the callgraph. Use a stack to get smaller distance
1077 between a function and its callees (later we may choose to use a more
1078 sophisticated algorithm for function reordering; we will likely want
1079 to use subsections to make the output functions appear in top-down
1080 order). */
1081
1082 static void
1083 cgraph_expand_all_functions (void)
1084 {
1085 struct cgraph_node *node;
1086 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1087 int order_pos, new_order_pos = 0;
1088 int i;
1089
1090 order_pos = cgraph_postorder (order);
1091 gcc_assert (order_pos == cgraph_n_nodes);
1092
1093 /* Garbage collector may remove inline clones we eliminate during
1094 optimization. So we must be sure to not reference them. */
1095 for (i = 0; i < order_pos; i++)
1096 if (order[i]->output)
1097 order[new_order_pos++] = order[i];
1098
1099 for (i = new_order_pos - 1; i >= 0; i--)
1100 {
1101 node = order[i];
1102 if (node->output)
1103 {
1104 gcc_assert (node->reachable);
1105 node->output = 0;
1106 cgraph_expand_function (node);
1107 }
1108 }
1109 cgraph_process_new_functions ();
1110
1111 free (order);
1112
1113 }
1114
1115 /* This is used to sort the node types by the cgraph order number. */
1116
1117 struct cgraph_order_sort
1118 {
1119 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1120 union
1121 {
1122 struct cgraph_node *f;
1123 struct varpool_node *v;
1124 struct cgraph_asm_node *a;
1125 } u;
1126 };
1127
1128 /* Output all functions, variables, and asm statements in the order
1129 according to their order fields, which is the order in which they
1130 appeared in the file. This implements -fno-toplevel-reorder. In
1131 this mode we may output functions and variables which don't really
1132 need to be output. */
1133
1134 static void
1135 cgraph_output_in_order (void)
1136 {
1137 int max;
1138 size_t size;
1139 struct cgraph_order_sort *nodes;
1140 int i;
1141 struct cgraph_node *pf;
1142 struct varpool_node *pv;
1143 struct cgraph_asm_node *pa;
1144
1145 max = cgraph_order;
1146 size = max * sizeof (struct cgraph_order_sort);
1147 nodes = (struct cgraph_order_sort *) alloca (size);
1148 memset (nodes, 0, size);
1149
1150 varpool_analyze_pending_decls ();
1151
1152 for (pf = cgraph_nodes; pf; pf = pf->next)
1153 {
1154 if (pf->output)
1155 {
1156 i = pf->order;
1157 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1158 nodes[i].kind = ORDER_FUNCTION;
1159 nodes[i].u.f = pf;
1160 }
1161 }
1162
1163 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1164 {
1165 i = pv->order;
1166 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1167 nodes[i].kind = ORDER_VAR;
1168 nodes[i].u.v = pv;
1169 }
1170
1171 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1172 {
1173 i = pa->order;
1174 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1175 nodes[i].kind = ORDER_ASM;
1176 nodes[i].u.a = pa;
1177 }
1178
1179 /* In toplevel reorder mode we output all statics; mark them as needed. */
1180 for (i = 0; i < max; ++i)
1181 {
1182 if (nodes[i].kind == ORDER_VAR)
1183 {
1184 varpool_mark_needed_node (nodes[i].u.v);
1185 }
1186 }
1187 varpool_empty_needed_queue ();
1188
1189 for (i = 0; i < max; ++i)
1190 {
1191 switch (nodes[i].kind)
1192 {
1193 case ORDER_FUNCTION:
1194 nodes[i].u.f->output = 0;
1195 cgraph_expand_function (nodes[i].u.f);
1196 break;
1197
1198 case ORDER_VAR:
1199 varpool_assemble_decl (nodes[i].u.v);
1200 break;
1201
1202 case ORDER_ASM:
1203 assemble_asm (nodes[i].u.a->asm_str);
1204 break;
1205
1206 case ORDER_UNDEFINED:
1207 break;
1208
1209 default:
1210 gcc_unreachable ();
1211 }
1212 }
1213
1214 cgraph_asm_nodes = NULL;
1215 }
1216
1217 /* Return true when function body of DECL still needs to be kept around
1218 for later re-use. */
1219 bool
1220 cgraph_preserve_function_body_p (tree decl)
1221 {
1222 struct cgraph_node *node;
1223
1224 gcc_assert (cgraph_global_info_ready);
1225 /* Look if there is any clone around. */
1226 for (node = cgraph_node (decl); node; node = node->next_clone)
1227 if (node->global.inlined_to)
1228 return true;
1229 return false;
1230 }
1231
1232 static void
1233 ipa_passes (void)
1234 {
1235 set_cfun (NULL);
1236 current_function_decl = NULL;
1237 gimple_register_cfg_hooks ();
1238 bitmap_obstack_initialize (NULL);
1239 execute_ipa_pass_list (all_ipa_passes);
1240 bitmap_obstack_release (NULL);
1241 }
1242
1243 /* Perform simple optimizations based on callgraph. */
1244
1245 void
1246 cgraph_optimize (void)
1247 {
1248 if (errorcount || sorrycount)
1249 return;
1250
1251 #ifdef ENABLE_CHECKING
1252 verify_cgraph ();
1253 #endif
1254
1255 /* Call functions declared with the "constructor" or "destructor"
1256 attribute. */
1257 cgraph_build_cdtor_fns ();
1258
1259 /* Frontend may output common variables after the unit has been finalized.
1260 It is safe to deal with them here as they are always zero initialized. */
1261 varpool_analyze_pending_decls ();
1262 cgraph_analyze_functions ();
1263
1264 timevar_push (TV_CGRAPHOPT);
1265 if (pre_ipa_mem_report)
1266 {
1267 fprintf (stderr, "Memory consumption before IPA\n");
1268 dump_memory_report (false);
1269 }
1270 if (!quiet_flag)
1271 fprintf (stderr, "Performing interprocedural optimizations\n");
1272 cgraph_state = CGRAPH_STATE_IPA;
1273
1274 /* Don't run the IPA passes if there was any error or sorry messages. */
1275 if (errorcount == 0 && sorrycount == 0)
1276 ipa_passes ();
1277
1278 /* This pass remove bodies of extern inline functions we never inlined.
1279 Do this later so other IPA passes see what is really going on. */
1280 cgraph_remove_unreachable_nodes (false, dump_file);
1281 cgraph_global_info_ready = true;
1282 if (cgraph_dump_file)
1283 {
1284 fprintf (cgraph_dump_file, "Optimized ");
1285 dump_cgraph (cgraph_dump_file);
1286 dump_varpool (cgraph_dump_file);
1287 }
1288 if (post_ipa_mem_report)
1289 {
1290 fprintf (stderr, "Memory consumption after IPA\n");
1291 dump_memory_report (false);
1292 }
1293 timevar_pop (TV_CGRAPHOPT);
1294
1295 /* Output everything. */
1296 if (!quiet_flag)
1297 fprintf (stderr, "Assembling functions:\n");
1298 #ifdef ENABLE_CHECKING
1299 verify_cgraph ();
1300 #endif
1301
1302 cgraph_mark_functions_to_output ();
1303
1304 cgraph_state = CGRAPH_STATE_EXPANSION;
1305 if (!flag_toplevel_reorder)
1306 cgraph_output_in_order ();
1307 else
1308 {
1309 cgraph_output_pending_asms ();
1310
1311 cgraph_expand_all_functions ();
1312 varpool_remove_unreferenced_decls ();
1313
1314 varpool_assemble_pending_decls ();
1315 }
1316 cgraph_process_new_functions ();
1317 cgraph_state = CGRAPH_STATE_FINISHED;
1318
1319 if (cgraph_dump_file)
1320 {
1321 fprintf (cgraph_dump_file, "\nFinal ");
1322 dump_cgraph (cgraph_dump_file);
1323 }
1324 #ifdef ENABLE_CHECKING
1325 verify_cgraph ();
1326 /* Double check that all inline clones are gone and that all
1327 function bodies have been released from memory. */
1328 if (!(sorrycount || errorcount))
1329 {
1330 struct cgraph_node *node;
1331 bool error_found = false;
1332
1333 for (node = cgraph_nodes; node; node = node->next)
1334 if (node->analyzed
1335 && (node->global.inlined_to
1336 || gimple_has_body_p (node->decl)))
1337 {
1338 error_found = true;
1339 dump_cgraph_node (stderr, node);
1340 }
1341 if (error_found)
1342 internal_error ("nodes with unreleased memory found");
1343 }
1344 #endif
1345 }
1346 /* Generate and emit a static constructor or destructor. WHICH must
1347 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1348 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1349 initialization priority for this constructor or destructor. */
1350
1351 void
1352 cgraph_build_static_cdtor (char which, tree body, int priority)
1353 {
1354 static int counter = 0;
1355 char which_buf[16];
1356 tree decl, name, resdecl;
1357
1358 /* The priority is encoded in the constructor or destructor name.
1359 collect2 will sort the names and arrange that they are called at
1360 program startup. */
1361 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1362 name = get_file_function_name (which_buf);
1363
1364 decl = build_decl (FUNCTION_DECL, name,
1365 build_function_type (void_type_node, void_list_node));
1366 current_function_decl = decl;
1367
1368 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1369 DECL_ARTIFICIAL (resdecl) = 1;
1370 DECL_RESULT (decl) = resdecl;
1371
1372 allocate_struct_function (decl, false);
1373
1374 TREE_STATIC (decl) = 1;
1375 TREE_USED (decl) = 1;
1376 DECL_ARTIFICIAL (decl) = 1;
1377 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1378 DECL_SAVED_TREE (decl) = body;
1379 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1380 DECL_UNINLINABLE (decl) = 1;
1381
1382 DECL_INITIAL (decl) = make_node (BLOCK);
1383 TREE_USED (DECL_INITIAL (decl)) = 1;
1384
1385 DECL_SOURCE_LOCATION (decl) = input_location;
1386 cfun->function_end_locus = input_location;
1387
1388 switch (which)
1389 {
1390 case 'I':
1391 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1392 decl_init_priority_insert (decl, priority);
1393 break;
1394 case 'D':
1395 DECL_STATIC_DESTRUCTOR (decl) = 1;
1396 decl_fini_priority_insert (decl, priority);
1397 break;
1398 default:
1399 gcc_unreachable ();
1400 }
1401
1402 gimplify_function_tree (decl);
1403
1404 cgraph_add_new_function (decl, false);
1405 cgraph_mark_needed_node (cgraph_node (decl));
1406 set_cfun (NULL);
1407 }
1408
1409 void
1410 init_cgraph (void)
1411 {
1412 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1413 }
1414
1415 /* The edges representing the callers of the NEW_VERSION node were
1416 fixed by cgraph_function_versioning (), now the call_expr in their
1417 respective tree code should be updated to call the NEW_VERSION. */
1418
1419 static void
1420 update_call_expr (struct cgraph_node *new_version)
1421 {
1422 struct cgraph_edge *e;
1423
1424 gcc_assert (new_version);
1425
1426 /* Update the call expr on the edges to call the new version. */
1427 for (e = new_version->callers; e; e = e->next_caller)
1428 {
1429 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1430 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1431 /* Update EH information too, just in case. */
1432 if (!stmt_could_throw_p (e->call_stmt)
1433 && lookup_stmt_eh_region_fn (inner_function, e->call_stmt))
1434 remove_stmt_from_eh_region_fn (inner_function, e->call_stmt);
1435 }
1436 }
1437
1438
1439 /* Create a new cgraph node which is the new version of
1440 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1441 edges which should be redirected to point to
1442 NEW_VERSION. ALL the callees edges of OLD_VERSION
1443 are cloned to the new version node. Return the new
1444 version node. */
1445
1446 static struct cgraph_node *
1447 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1448 tree new_decl,
1449 VEC(cgraph_edge_p,heap) *redirect_callers)
1450 {
1451 struct cgraph_node *new_version;
1452 struct cgraph_edge *e, *new_e;
1453 struct cgraph_edge *next_callee;
1454 unsigned i;
1455
1456 gcc_assert (old_version);
1457
1458 new_version = cgraph_node (new_decl);
1459
1460 new_version->analyzed = true;
1461 new_version->local = old_version->local;
1462 new_version->global = old_version->global;
1463 new_version->rtl = new_version->rtl;
1464 new_version->reachable = true;
1465 new_version->count = old_version->count;
1466
1467 /* Clone the old node callees. Recursive calls are
1468 also cloned. */
1469 for (e = old_version->callees;e; e=e->next_callee)
1470 {
1471 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->frequency,
1472 e->loop_nest, true);
1473 new_e->count = e->count;
1474 }
1475 /* Fix recursive calls.
1476 If OLD_VERSION has a recursive call after the
1477 previous edge cloning, the new version will have an edge
1478 pointing to the old version, which is wrong;
1479 Redirect it to point to the new version. */
1480 for (e = new_version->callees ; e; e = next_callee)
1481 {
1482 next_callee = e->next_callee;
1483 if (e->callee == old_version)
1484 cgraph_redirect_edge_callee (e, new_version);
1485
1486 if (!next_callee)
1487 break;
1488 }
1489 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1490 {
1491 /* Redirect calls to the old version node to point to its new
1492 version. */
1493 cgraph_redirect_edge_callee (e, new_version);
1494 }
1495
1496 return new_version;
1497 }
1498
1499 /* Perform function versioning.
1500 Function versioning includes copying of the tree and
1501 a callgraph update (creating a new cgraph node and updating
1502 its callees and callers).
1503
1504 REDIRECT_CALLERS varray includes the edges to be redirected
1505 to the new version.
1506
1507 TREE_MAP is a mapping of tree nodes we want to replace with
1508 new ones (according to results of prior analysis).
1509 OLD_VERSION_NODE is the node that is versioned.
1510 It returns the new version's cgraph node.
1511 ARGS_TO_SKIP lists arguments to be omitted from functions
1512 */
1513
1514 struct cgraph_node *
1515 cgraph_function_versioning (struct cgraph_node *old_version_node,
1516 VEC(cgraph_edge_p,heap) *redirect_callers,
1517 varray_type tree_map,
1518 bitmap args_to_skip)
1519 {
1520 tree old_decl = old_version_node->decl;
1521 struct cgraph_node *new_version_node = NULL;
1522 tree new_decl;
1523
1524 if (!tree_versionable_function_p (old_decl))
1525 return NULL;
1526
1527 /* Make a new FUNCTION_DECL tree node for the
1528 new version. */
1529 if (!args_to_skip)
1530 new_decl = copy_node (old_decl);
1531 else
1532 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1533
1534 /* Create the new version's call-graph node.
1535 and update the edges of the new node. */
1536 new_version_node =
1537 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1538 redirect_callers);
1539
1540 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1541 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
1542
1543 /* Update the new version's properties.
1544 Make The new version visible only within this translation unit. Make sure
1545 that is not weak also.
1546 ??? We cannot use COMDAT linkage because there is no
1547 ABI support for this. */
1548 DECL_EXTERNAL (new_version_node->decl) = 0;
1549 DECL_ONE_ONLY (new_version_node->decl) = 0;
1550 TREE_PUBLIC (new_version_node->decl) = 0;
1551 DECL_COMDAT (new_version_node->decl) = 0;
1552 DECL_WEAK (new_version_node->decl) = 0;
1553 DECL_VIRTUAL_P (new_version_node->decl) = 0;
1554 new_version_node->local.externally_visible = 0;
1555 new_version_node->local.local = 1;
1556 new_version_node->lowered = true;
1557
1558 /* Update the call_expr on the edges to call the new version node. */
1559 update_call_expr (new_version_node);
1560
1561 cgraph_call_function_insertion_hooks (new_version_node);
1562 return new_version_node;
1563 }
1564
1565 /* Produce separate function body for inline clones so the offline copy can be
1566 modified without affecting them. */
1567 struct cgraph_node *
1568 save_inline_function_body (struct cgraph_node *node)
1569 {
1570 struct cgraph_node *first_clone;
1571
1572 gcc_assert (node == cgraph_node (node->decl));
1573
1574 cgraph_lower_function (node);
1575
1576 first_clone = node->next_clone;
1577
1578 first_clone->decl = copy_node (node->decl);
1579 node->next_clone = NULL;
1580 first_clone->prev_clone = NULL;
1581 cgraph_insert_node_to_hashtable (first_clone);
1582 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1583
1584 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1585 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
1586
1587 DECL_EXTERNAL (first_clone->decl) = 0;
1588 DECL_ONE_ONLY (first_clone->decl) = 0;
1589 TREE_PUBLIC (first_clone->decl) = 0;
1590 DECL_COMDAT (first_clone->decl) = 0;
1591
1592 for (node = first_clone->next_clone; node; node = node->next_clone)
1593 node->decl = first_clone->decl;
1594 #ifdef ENABLE_CHECKING
1595 verify_cgraph_node (first_clone);
1596 #endif
1597 return first_clone;
1598 }
1599
1600 #include "gt-cgraphunit.h"