comparison gcc/cgraph.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents a06113de4d67
children b7f97abdc517
comparison
equal deleted inserted replaced
52:c156f1bd5cd9 55:77e2b8dfacca
1 /* Callgraph handling code. 1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc. 3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka 4 Contributed by Jan Hubicka
5 5
6 This file is part of GCC. 6 This file is part of GCC.
7 7
76 #include "ggc.h" 76 #include "ggc.h"
77 #include "debug.h" 77 #include "debug.h"
78 #include "target.h" 78 #include "target.h"
79 #include "basic-block.h" 79 #include "basic-block.h"
80 #include "cgraph.h" 80 #include "cgraph.h"
81 #include "varray.h"
82 #include "output.h" 81 #include "output.h"
83 #include "intl.h" 82 #include "intl.h"
84 #include "gimple.h" 83 #include "gimple.h"
85 #include "tree-dump.h" 84 #include "tree-dump.h"
86 #include "tree-flow.h" 85 #include "tree-flow.h"
87 #include "value-prof.h" 86 #include "value-prof.h"
87 #include "except.h"
88 #include "diagnostic.h"
88 89
89 static void cgraph_node_remove_callers (struct cgraph_node *node); 90 static void cgraph_node_remove_callers (struct cgraph_node *node);
90 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e); 91 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
91 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e); 92 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
92 93
403 { 404 {
404 const struct cgraph_node *n = (const struct cgraph_node *) p; 405 const struct cgraph_node *n = (const struct cgraph_node *) p;
405 return (hashval_t) DECL_UID (n->decl); 406 return (hashval_t) DECL_UID (n->decl);
406 } 407 }
407 408
409
408 /* Returns nonzero if P1 and P2 are equal. */ 410 /* Returns nonzero if P1 and P2 are equal. */
409 411
410 static int 412 static int
411 eq_node (const void *p1, const void *p2) 413 eq_node (const void *p1, const void *p2)
412 { 414 {
413 const struct cgraph_node *n1 = (const struct cgraph_node *) p1; 415 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
414 const struct cgraph_node *n2 = (const struct cgraph_node *) p2; 416 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
415 return DECL_UID (n1->decl) == DECL_UID (n2->decl); 417 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
416 } 418 }
417 419
420 /* Allocate new callgraph node. */
421
422 static inline struct cgraph_node *
423 cgraph_allocate_node (void)
424 {
425 struct cgraph_node *node;
426
427 if (free_nodes)
428 {
429 node = free_nodes;
430 free_nodes = NEXT_FREE_NODE (node);
431 }
432 else
433 {
434 node = GGC_CNEW (struct cgraph_node);
435 node->uid = cgraph_max_uid++;
436 }
437
438 return node;
439 }
440
418 /* Allocate new callgraph node and insert it into basic data structures. */ 441 /* Allocate new callgraph node and insert it into basic data structures. */
419 442
420 static struct cgraph_node * 443 static struct cgraph_node *
421 cgraph_create_node (void) 444 cgraph_create_node (void)
422 { 445 {
423 struct cgraph_node *node; 446 struct cgraph_node *node = cgraph_allocate_node ();
424
425 if (free_nodes)
426 {
427 node = free_nodes;
428 free_nodes = NEXT_FREE_NODE (node);
429 }
430 else
431 {
432 node = GGC_CNEW (struct cgraph_node);
433 node->uid = cgraph_max_uid++;
434 }
435 447
436 node->next = cgraph_nodes; 448 node->next = cgraph_nodes;
437 node->pid = -1; 449 node->pid = -1;
438 node->order = cgraph_order++; 450 node->order = cgraph_order++;
439 if (cgraph_nodes) 451 if (cgraph_nodes)
462 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT); 474 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
463 475
464 if (*slot) 476 if (*slot)
465 { 477 {
466 node = *slot; 478 node = *slot;
467 if (!node->master_clone) 479 if (node->same_body_alias)
468 node->master_clone = node; 480 node = node->same_body;
469 return node; 481 return node;
470 } 482 }
471 483
472 node = cgraph_create_node (); 484 node = cgraph_create_node ();
473 node->decl = decl; 485 node->decl = decl;
475 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL) 487 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
476 { 488 {
477 node->origin = cgraph_node (DECL_CONTEXT (decl)); 489 node->origin = cgraph_node (DECL_CONTEXT (decl));
478 node->next_nested = node->origin->nested; 490 node->next_nested = node->origin->nested;
479 node->origin->nested = node; 491 node->origin->nested = node;
480 node->master_clone = node;
481 } 492 }
482 if (assembler_name_hash) 493 if (assembler_name_hash)
483 { 494 {
484 void **aslot; 495 void **aslot;
485 tree name = DECL_ASSEMBLER_NAME (decl); 496 tree name = DECL_ASSEMBLER_NAME (decl);
491 it is __builtin_strlen and strlen, for instance. Do we need to 502 it is __builtin_strlen and strlen, for instance. Do we need to
492 record them all? Original implementation marked just first one 503 record them all? Original implementation marked just first one
493 so lets hope for the best. */ 504 so lets hope for the best. */
494 if (*aslot == NULL) 505 if (*aslot == NULL)
495 *aslot = node; 506 *aslot = node;
507 }
508 return node;
509 }
510
511 /* Mark ALIAS as an alias to DECL. */
512
513 static struct cgraph_node *
514 cgraph_same_body_alias_1 (tree alias, tree decl)
515 {
516 struct cgraph_node key, *alias_node, *decl_node, **slot;
517
518 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
519 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
520 decl_node = cgraph_node (decl);
521
522 key.decl = alias;
523
524 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
525
526 /* If the cgraph_node has been already created, fail. */
527 if (*slot)
528 return NULL;
529
530 alias_node = cgraph_allocate_node ();
531 alias_node->decl = alias;
532 alias_node->same_body_alias = 1;
533 alias_node->same_body = decl_node;
534 alias_node->previous = NULL;
535 if (decl_node->same_body)
536 decl_node->same_body->previous = alias_node;
537 alias_node->next = decl_node->same_body;
538 alias_node->thunk.alias = decl;
539 decl_node->same_body = alias_node;
540 *slot = alias_node;
541 return alias_node;
542 }
543
544 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
545 Same body aliases are output whenever the body of DECL is output,
546 and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
547
548 bool
549 cgraph_same_body_alias (tree alias, tree decl)
550 {
551 #ifndef ASM_OUTPUT_DEF
552 /* If aliases aren't supported by the assembler, fail. */
553 return false;
554 #endif
555
556 /*gcc_assert (!assembler_name_hash);*/
557
558 return cgraph_same_body_alias_1 (alias, decl) != NULL;
559 }
560
561 void
562 cgraph_add_thunk (tree alias, tree decl, bool this_adjusting,
563 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
564 tree virtual_offset,
565 tree real_alias)
566 {
567 struct cgraph_node *node = cgraph_get_node (alias);
568
569 if (node)
570 {
571 gcc_assert (node->local.finalized);
572 gcc_assert (!node->same_body);
573 cgraph_remove_node (node);
574 }
575
576 node = cgraph_same_body_alias_1 (alias, decl);
577 gcc_assert (node);
578 #ifdef ENABLE_CHECKING
579 gcc_assert (!virtual_offset
580 || tree_int_cst_equal (virtual_offset, size_int (virtual_value)));
581 #endif
582 node->thunk.fixed_offset = fixed_offset;
583 node->thunk.this_adjusting = this_adjusting;
584 node->thunk.virtual_value = virtual_value;
585 node->thunk.virtual_offset_p = virtual_offset != NULL;
586 node->thunk.alias = real_alias;
587 node->thunk.thunk_p = true;
588 }
589
590 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
591 is assigned. */
592
593 struct cgraph_node *
594 cgraph_get_node (tree decl)
595 {
596 struct cgraph_node key, *node = NULL, **slot;
597
598 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
599
600 if (!cgraph_hash)
601 return NULL;
602
603 key.decl = decl;
604
605 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
606 NO_INSERT);
607
608 if (slot && *slot)
609 {
610 node = *slot;
611 if (node->same_body_alias)
612 node = node->same_body;
496 } 613 }
497 return node; 614 return node;
498 } 615 }
499 616
500 /* Insert already constructed node into hashtable. */ 617 /* Insert already constructed node into hashtable. */
552 INSERT); 669 INSERT);
553 /* We can have multiple declarations with same assembler name. For C++ 670 /* We can have multiple declarations with same assembler name. For C++
554 it is __builtin_strlen and strlen, for instance. Do we need to 671 it is __builtin_strlen and strlen, for instance. Do we need to
555 record them all? Original implementation marked just first one 672 record them all? Original implementation marked just first one
556 so lets hope for the best. */ 673 so lets hope for the best. */
557 if (*slot) 674 if (!*slot)
558 continue; 675 *slot = node;
559 *slot = node; 676 if (node->same_body)
677 {
678 struct cgraph_node *alias;
679
680 for (alias = node->same_body; alias; alias = alias->next)
681 {
682 hashval_t hash;
683 name = DECL_ASSEMBLER_NAME (alias->decl);
684 hash = decl_assembler_name_hash (name);
685 slot = htab_find_slot_with_hash (assembler_name_hash, name,
686 hash, INSERT);
687 if (!*slot)
688 *slot = alias;
689 }
690 }
560 } 691 }
561 } 692 }
562 693
563 slot = htab_find_slot_with_hash (assembler_name_hash, asmname, 694 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
564 decl_assembler_name_hash (asmname), 695 decl_assembler_name_hash (asmname),
565 NO_INSERT); 696 NO_INSERT);
566 697
567 if (slot) 698 if (slot)
568 return (struct cgraph_node *) *slot; 699 {
700 node = (struct cgraph_node *) *slot;
701 if (node->same_body_alias)
702 node = node->same_body;
703 return node;
704 }
569 return NULL; 705 return NULL;
570 } 706 }
571 707
572 /* Returns a hash value for X (which really is a die_struct). */ 708 /* Returns a hash value for X (which really is a die_struct). */
573 709
629 765
630 return e; 766 return e;
631 } 767 }
632 768
633 769
634 /* Change field call_smt of edge E to NEW_STMT. */ 770 /* Change field call_stmt of edge E to NEW_STMT. */
635 771
636 void 772 void
637 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt) 773 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
638 { 774 {
639 if (e->caller->call_site_hash) 775 if (e->caller->call_site_hash)
641 htab_remove_elt_with_hash (e->caller->call_site_hash, 777 htab_remove_elt_with_hash (e->caller->call_site_hash,
642 e->call_stmt, 778 e->call_stmt,
643 htab_hash_pointer (e->call_stmt)); 779 htab_hash_pointer (e->call_stmt));
644 } 780 }
645 e->call_stmt = new_stmt; 781 e->call_stmt = new_stmt;
782 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
783 e->can_throw_external = stmt_can_throw_external (new_stmt);
784 pop_cfun ();
646 if (e->caller->call_site_hash) 785 if (e->caller->call_site_hash)
647 { 786 {
648 void **slot; 787 void **slot;
649 slot = htab_find_slot_with_hash (e->caller->call_site_hash, 788 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
650 e->call_stmt, 789 e->call_stmt,
653 gcc_assert (!*slot); 792 gcc_assert (!*slot);
654 *slot = e; 793 *slot = e;
655 } 794 }
656 } 795 }
657 796
797 /* Like cgraph_set_call_stmt but walk the clone tree and update all
798 clones sharing the same function body. */
799
800 void
801 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
802 gimple old_stmt, gimple new_stmt)
803 {
804 struct cgraph_node *node;
805 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
806
807 if (edge)
808 cgraph_set_call_stmt (edge, new_stmt);
809
810 node = orig->clones;
811 if (node)
812 while (node != orig)
813 {
814 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
815 if (edge)
816 cgraph_set_call_stmt (edge, new_stmt);
817 if (node->clones)
818 node = node->clones;
819 else if (node->next_sibling_clone)
820 node = node->next_sibling_clone;
821 else
822 {
823 while (node != orig && !node->next_sibling_clone)
824 node = node->clone_of;
825 if (node != orig)
826 node = node->next_sibling_clone;
827 }
828 }
829 }
830
831 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
832 same function body. If clones already have edge for OLD_STMT; only
833 update the edge same way as cgraph_set_call_stmt_including_clones does.
834
835 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
836 frequencies of the clones. */
837
838 void
839 cgraph_create_edge_including_clones (struct cgraph_node *orig,
840 struct cgraph_node *callee,
841 gimple old_stmt,
842 gimple stmt, gcov_type count,
843 int freq, int loop_depth,
844 cgraph_inline_failed_t reason)
845 {
846 struct cgraph_node *node;
847 struct cgraph_edge *edge;
848
849 if (!cgraph_edge (orig, stmt))
850 {
851 edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
852 edge->inline_failed = reason;
853 }
854
855 node = orig->clones;
856 if (node)
857 while (node != orig)
858 {
859 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
860
861 /* It is possible that clones already contain the edge while
862 master didn't. Either we promoted indirect call into direct
863 call in the clone or we are processing clones of unreachable
864 master where edges has been rmeoved. */
865 if (edge)
866 cgraph_set_call_stmt (edge, stmt);
867 else if (!cgraph_edge (node, stmt))
868 {
869 edge = cgraph_create_edge (node, callee, stmt, count,
870 freq, loop_depth);
871 edge->inline_failed = reason;
872 }
873
874 if (node->clones)
875 node = node->clones;
876 else if (node->next_sibling_clone)
877 node = node->next_sibling_clone;
878 else
879 {
880 while (node != orig && !node->next_sibling_clone)
881 node = node->clone_of;
882 if (node != orig)
883 node = node->next_sibling_clone;
884 }
885 }
886 }
887
888 /* Give initial reasons why inlining would fail on EDGE. This gets either
889 nullified or usually overwritten by more precise reasons later. */
890
891 static void
892 initialize_inline_failed (struct cgraph_edge *e)
893 {
894 struct cgraph_node *callee = e->callee;
895
896 if (!callee->analyzed)
897 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
898 else if (callee->local.redefined_extern_inline)
899 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
900 else if (!callee->local.inlinable)
901 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
902 else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
903 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
904 else
905 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
906 }
907
658 /* Create edge from CALLER to CALLEE in the cgraph. */ 908 /* Create edge from CALLER to CALLEE in the cgraph. */
659 909
660 struct cgraph_edge * 910 struct cgraph_edge *
661 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee, 911 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
662 gimple call_stmt, gcov_type count, int freq, int nest) 912 gimple call_stmt, gcov_type count, int freq, int nest)
663 { 913 {
664 struct cgraph_edge *edge; 914 struct cgraph_edge *edge;
665 915
916
917 /* LTO does not actually have access to the call_stmt since these
918 have not been loaded yet. */
919 if (call_stmt)
920 {
666 #ifdef ENABLE_CHECKING 921 #ifdef ENABLE_CHECKING
667 /* This is rather pricely check possibly trigerring construction of call stmt 922 /* This is rather pricely check possibly trigerring construction of
668 hashtable. */ 923 call stmt hashtable. */
669 gcc_assert (!cgraph_edge (caller, call_stmt)); 924 gcc_assert (!cgraph_edge (caller, call_stmt));
670 #endif 925 #endif
671 926
672 gcc_assert (is_gimple_call (call_stmt)); 927 gcc_assert (is_gimple_call (call_stmt));
928 }
673 929
674 if (free_edges) 930 if (free_edges)
675 { 931 {
676 edge = free_edges; 932 edge = free_edges;
677 free_edges = NEXT_FREE_EDGE (edge); 933 free_edges = NEXT_FREE_EDGE (edge);
679 else 935 else
680 { 936 {
681 edge = GGC_NEW (struct cgraph_edge); 937 edge = GGC_NEW (struct cgraph_edge);
682 edge->uid = cgraph_edge_max_uid++; 938 edge->uid = cgraph_edge_max_uid++;
683 } 939 }
684
685 if (!callee->analyzed)
686 edge->inline_failed = N_("function body not available");
687 else if (callee->local.redefined_extern_inline)
688 edge->inline_failed = N_("redefined extern inline functions are not "
689 "considered for inlining");
690 else if (callee->local.inlinable)
691 edge->inline_failed = N_("function not considered for inlining");
692 else
693 edge->inline_failed = N_("function not inlinable");
694 940
695 edge->aux = NULL; 941 edge->aux = NULL;
696 942
697 edge->caller = caller; 943 edge->caller = caller;
698 edge->callee = callee; 944 edge->callee = callee;
699 edge->call_stmt = call_stmt; 945 edge->call_stmt = call_stmt;
946 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
947 edge->can_throw_external
948 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
949 pop_cfun ();
700 edge->prev_caller = NULL; 950 edge->prev_caller = NULL;
701 edge->next_caller = callee->callers; 951 edge->next_caller = callee->callers;
702 if (callee->callers) 952 if (callee->callers)
703 callee->callers->prev_caller = edge; 953 callee->callers->prev_caller = edge;
704 edge->prev_callee = NULL; 954 edge->prev_callee = NULL;
712 edge->frequency = freq; 962 edge->frequency = freq;
713 gcc_assert (freq >= 0); 963 gcc_assert (freq >= 0);
714 gcc_assert (freq <= CGRAPH_FREQ_MAX); 964 gcc_assert (freq <= CGRAPH_FREQ_MAX);
715 edge->loop_nest = nest; 965 edge->loop_nest = nest;
716 edge->indirect_call = 0; 966 edge->indirect_call = 0;
717 if (caller->call_site_hash) 967 edge->call_stmt_cannot_inline_p =
968 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
969 if (call_stmt && caller->call_site_hash)
718 { 970 {
719 void **slot; 971 void **slot;
720 slot = htab_find_slot_with_hash (caller->call_site_hash, 972 slot = htab_find_slot_with_hash (caller->call_site_hash,
721 edge->call_stmt, 973 edge->call_stmt,
722 htab_hash_pointer 974 htab_hash_pointer
723 (edge->call_stmt), 975 (edge->call_stmt),
724 INSERT); 976 INSERT);
725 gcc_assert (!*slot); 977 gcc_assert (!*slot);
726 *slot = edge; 978 *slot = edge;
727 } 979 }
980
981 initialize_inline_failed (edge);
982
728 return edge; 983 return edge;
729 } 984 }
730 985
731 /* Remove the edge E from the list of the callers of the callee. */ 986 /* Remove the edge E from the list of the callers of the callee. */
732 987
808 e->callee = n; 1063 e->callee = n;
809 } 1064 }
810 1065
811 1066
812 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL 1067 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
813 OLD_STMT changed into NEW_STMT. */ 1068 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
814 1069 of OLD_STMT if it was previously call statement. */
815 void 1070
816 cgraph_update_edges_for_call_stmt (gimple old_stmt, gimple new_stmt) 1071 static void
817 { 1072 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
818 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fn (new_stmt) : 0; 1073 gimple old_stmt, tree old_call, gimple new_stmt)
819 tree old_call = (is_gimple_call (old_stmt)) ? gimple_call_fn (old_stmt) : 0; 1074 {
820 struct cgraph_node *node = cgraph_node (cfun->decl); 1075 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
821 1076
1077 /* We are seeing indirect calls, then there is nothing to update. */
1078 if (!new_call && !old_call)
1079 return;
1080 /* See if we turned indirect call into direct call or folded call to one builtin
1081 into different bultin. */
822 if (old_call != new_call) 1082 if (old_call != new_call)
823 { 1083 {
824 struct cgraph_edge *e = cgraph_edge (node, old_stmt); 1084 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
825 struct cgraph_edge *ne = NULL; 1085 struct cgraph_edge *ne = NULL;
826 tree new_decl; 1086 gcov_type count;
1087 int frequency;
1088 int loop_nest;
827 1089
828 if (e) 1090 if (e)
829 { 1091 {
830 gcov_type count = e->count; 1092 /* See if the call is already there. It might be because of indirect
831 int frequency = e->frequency; 1093 inlining already found it. */
832 int loop_nest = e->loop_nest; 1094 if (new_call && e->callee->decl == new_call)
833 1095 return;
1096
1097 /* Otherwise remove edge and create new one; we can't simply redirect
1098 since function has changed, so inline plan and other information
1099 attached to edge is invalid. */
1100 count = e->count;
1101 frequency = e->frequency;
1102 loop_nest = e->loop_nest;
834 cgraph_remove_edge (e); 1103 cgraph_remove_edge (e);
835 if (new_call)
836 {
837 new_decl = gimple_call_fndecl (new_stmt);
838 if (new_decl)
839 {
840 ne = cgraph_create_edge (node, cgraph_node (new_decl),
841 new_stmt, count, frequency,
842 loop_nest);
843 gcc_assert (ne->inline_failed);
844 }
845 }
846 } 1104 }
847 } 1105 else
1106 {
1107 /* We are seeing new direct call; compute profile info based on BB. */
1108 basic_block bb = gimple_bb (new_stmt);
1109 count = bb->count;
1110 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1111 bb);
1112 loop_nest = bb->loop_depth;
1113 }
1114
1115 if (new_call)
1116 {
1117 ne = cgraph_create_edge (node, cgraph_node (new_call),
1118 new_stmt, count, frequency,
1119 loop_nest);
1120 gcc_assert (ne->inline_failed);
1121 }
1122 }
1123 /* We only updated the call stmt; update pointer in cgraph edge.. */
848 else if (old_stmt != new_stmt) 1124 else if (old_stmt != new_stmt)
849 { 1125 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
850 struct cgraph_edge *e = cgraph_edge (node, old_stmt); 1126 }
851 1127
852 if (e) 1128 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
853 cgraph_set_call_stmt (e, new_stmt); 1129 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
854 } 1130 of OLD_STMT before it was updated (updating can happen inplace). */
1131
1132 void
1133 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1134 {
1135 struct cgraph_node *orig = cgraph_node (cfun->decl);
1136 struct cgraph_node *node;
1137
1138 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1139 if (orig->clones)
1140 for (node = orig->clones; node != orig;)
1141 {
1142 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1143 if (node->clones)
1144 node = node->clones;
1145 else if (node->next_sibling_clone)
1146 node = node->next_sibling_clone;
1147 else
1148 {
1149 while (node != orig && !node->next_sibling_clone)
1150 node = node->clone_of;
1151 if (node != orig)
1152 node = node->next_sibling_clone;
1153 }
1154 }
855 } 1155 }
856 1156
857 1157
858 /* Remove all callees from the node. */ 1158 /* Remove all callees from the node. */
859 1159
927 free_histograms (); 1227 free_histograms ();
928 gcc_assert (!current_loops); 1228 gcc_assert (!current_loops);
929 pop_cfun(); 1229 pop_cfun();
930 gimple_set_body (node->decl, NULL); 1230 gimple_set_body (node->decl, NULL);
931 VEC_free (ipa_opt_pass, heap, 1231 VEC_free (ipa_opt_pass, heap,
932 DECL_STRUCT_FUNCTION (node->decl)->ipa_transforms_to_apply); 1232 node->ipa_transforms_to_apply);
933 /* Struct function hangs a lot of data that would leak if we didn't 1233 /* Struct function hangs a lot of data that would leak if we didn't
934 removed all pointers to it. */ 1234 removed all pointers to it. */
935 ggc_free (DECL_STRUCT_FUNCTION (node->decl)); 1235 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
936 DECL_STRUCT_FUNCTION (node->decl) = NULL; 1236 DECL_STRUCT_FUNCTION (node->decl) = NULL;
937 } 1237 }
941 needed to emit debug info later. */ 1241 needed to emit debug info later. */
942 if (!node->abstract_and_needed) 1242 if (!node->abstract_and_needed)
943 DECL_INITIAL (node->decl) = error_mark_node; 1243 DECL_INITIAL (node->decl) = error_mark_node;
944 } 1244 }
945 1245
1246 /* Remove same body alias node. */
1247
1248 void
1249 cgraph_remove_same_body_alias (struct cgraph_node *node)
1250 {
1251 void **slot;
1252 int uid = node->uid;
1253
1254 gcc_assert (node->same_body_alias);
1255 if (node->previous)
1256 node->previous->next = node->next;
1257 else
1258 node->same_body->same_body = node->next;
1259 if (node->next)
1260 node->next->previous = node->previous;
1261 node->next = NULL;
1262 node->previous = NULL;
1263 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1264 if (*slot == node)
1265 htab_clear_slot (cgraph_hash, slot);
1266 if (assembler_name_hash)
1267 {
1268 tree name = DECL_ASSEMBLER_NAME (node->decl);
1269 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1270 decl_assembler_name_hash (name),
1271 NO_INSERT);
1272 if (slot && *slot == node)
1273 htab_clear_slot (assembler_name_hash, slot);
1274 }
1275
1276 /* Clear out the node to NULL all pointers and add the node to the free
1277 list. */
1278 memset (node, 0, sizeof(*node));
1279 node->uid = uid;
1280 NEXT_FREE_NODE (node) = free_nodes;
1281 free_nodes = node;
1282 }
1283
946 /* Remove the node from cgraph. */ 1284 /* Remove the node from cgraph. */
947 1285
948 void 1286 void
949 cgraph_remove_node (struct cgraph_node *node) 1287 cgraph_remove_node (struct cgraph_node *node)
950 { 1288 {
954 int uid = node->uid; 1292 int uid = node->uid;
955 1293
956 cgraph_call_node_removal_hooks (node); 1294 cgraph_call_node_removal_hooks (node);
957 cgraph_node_remove_callers (node); 1295 cgraph_node_remove_callers (node);
958 cgraph_node_remove_callees (node); 1296 cgraph_node_remove_callees (node);
1297 VEC_free (ipa_opt_pass, heap,
1298 node->ipa_transforms_to_apply);
959 1299
960 /* Incremental inlining access removed nodes stored in the postorder list. 1300 /* Incremental inlining access removed nodes stored in the postorder list.
961 */ 1301 */
962 node->needed = node->reachable = false; 1302 node->needed = node->reachable = false;
963 for (n = node->nested; n; n = n->next_nested) 1303 for (n = node->nested; n; n = n->next_nested)
980 node->next = NULL; 1320 node->next = NULL;
981 node->previous = NULL; 1321 node->previous = NULL;
982 slot = htab_find_slot (cgraph_hash, node, NO_INSERT); 1322 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
983 if (*slot == node) 1323 if (*slot == node)
984 { 1324 {
985 if (node->next_clone) 1325 struct cgraph_node *next_inline_clone;
986 { 1326
987 struct cgraph_node *new_node = node->next_clone; 1327 for (next_inline_clone = node->clones;
988 struct cgraph_node *n; 1328 next_inline_clone && next_inline_clone->decl != node->decl;
989 1329 next_inline_clone = next_inline_clone->next_sibling_clone)
990 /* Make the next clone be the master clone */ 1330 ;
991 for (n = new_node; n; n = n->next_clone) 1331
992 n->master_clone = new_node; 1332 /* If there is inline clone of the node being removed, we need
993 1333 to put it into the position of removed node and reorganize all
994 *slot = new_node; 1334 other clones to be based on it. */
995 node->next_clone->prev_clone = NULL; 1335 if (next_inline_clone)
996 } 1336 {
1337 struct cgraph_node *n;
1338 struct cgraph_node *new_clones;
1339
1340 *slot = next_inline_clone;
1341
1342 /* Unlink inline clone from the list of clones of removed node. */
1343 if (next_inline_clone->next_sibling_clone)
1344 next_inline_clone->next_sibling_clone->prev_sibling_clone
1345 = next_inline_clone->prev_sibling_clone;
1346 if (next_inline_clone->prev_sibling_clone)
1347 {
1348 gcc_assert (node->clones != next_inline_clone);
1349 next_inline_clone->prev_sibling_clone->next_sibling_clone
1350 = next_inline_clone->next_sibling_clone;
1351 }
1352 else
1353 {
1354 gcc_assert (node->clones == next_inline_clone);
1355 node->clones = next_inline_clone->next_sibling_clone;
1356 }
1357
1358 new_clones = node->clones;
1359 node->clones = NULL;
1360
1361 /* Copy clone info. */
1362 next_inline_clone->clone = node->clone;
1363
1364 /* Now place it into clone tree at same level at NODE. */
1365 next_inline_clone->clone_of = node->clone_of;
1366 next_inline_clone->prev_sibling_clone = NULL;
1367 next_inline_clone->next_sibling_clone = NULL;
1368 if (node->clone_of)
1369 {
1370 if (node->clone_of->clones)
1371 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1372 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1373 node->clone_of->clones = next_inline_clone;
1374 }
1375
1376 /* Merge the clone list. */
1377 if (new_clones)
1378 {
1379 if (!next_inline_clone->clones)
1380 next_inline_clone->clones = new_clones;
1381 else
1382 {
1383 n = next_inline_clone->clones;
1384 while (n->next_sibling_clone)
1385 n = n->next_sibling_clone;
1386 n->next_sibling_clone = new_clones;
1387 new_clones->prev_sibling_clone = n;
1388 }
1389 }
1390
1391 /* Update clone_of pointers. */
1392 n = new_clones;
1393 while (n)
1394 {
1395 n->clone_of = next_inline_clone;
1396 n = n->next_sibling_clone;
1397 }
1398 }
997 else 1399 else
998 { 1400 {
999 htab_clear_slot (cgraph_hash, slot); 1401 htab_clear_slot (cgraph_hash, slot);
1000 kill_body = true; 1402 kill_body = true;
1001 } 1403 }
1002 } 1404
1003 else 1405 }
1004 { 1406 if (node->prev_sibling_clone)
1005 node->prev_clone->next_clone = node->next_clone; 1407 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1006 if (node->next_clone) 1408 else if (node->clone_of)
1007 node->next_clone->prev_clone = node->prev_clone; 1409 node->clone_of->clones = node->next_sibling_clone;
1410 if (node->next_sibling_clone)
1411 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1412 if (node->clones)
1413 {
1414 struct cgraph_node *n, *next;
1415
1416 if (node->clone_of)
1417 {
1418 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1419 n->clone_of = node->clone_of;
1420 n->clone_of = node->clone_of;
1421 n->next_sibling_clone = node->clone_of->clones;
1422 if (node->clone_of->clones)
1423 node->clone_of->clones->prev_sibling_clone = n;
1424 node->clone_of->clones = node->clones;
1425 }
1426 else
1427 {
1428 /* We are removing node with clones. this makes clones inconsistent,
1429 but assume they will be removed subsequently and just keep clone
1430 tree intact. This can happen in unreachable function removal since
1431 we remove unreachable functions in random order, not by bottom-up
1432 walk of clone trees. */
1433 for (n = node->clones; n; n = next)
1434 {
1435 next = n->next_sibling_clone;
1436 n->next_sibling_clone = NULL;
1437 n->prev_sibling_clone = NULL;
1438 n->clone_of = NULL;
1439 }
1440 }
1441 }
1442
1443 while (node->same_body)
1444 cgraph_remove_same_body_alias (node->same_body);
1445
1446 if (node->same_comdat_group)
1447 {
1448 struct cgraph_node *prev;
1449 for (prev = node->same_comdat_group;
1450 prev->same_comdat_group != node;
1451 prev = prev->same_comdat_group)
1452 ;
1453 if (node->same_comdat_group == prev)
1454 prev->same_comdat_group = NULL;
1455 else
1456 prev->same_comdat_group = node->same_comdat_group;
1457 node->same_comdat_group = NULL;
1008 } 1458 }
1009 1459
1010 /* While all the clones are removed after being proceeded, the function 1460 /* While all the clones are removed after being proceeded, the function
1011 itself is kept in the cgraph even after it is compiled. Check whether 1461 itself is kept in the cgraph even after it is compiled. Check whether
1012 we are done with this body and reclaim it proactively if this is the case. 1462 we are done with this body and reclaim it proactively if this is the case.
1013 */ 1463 */
1014 if (!kill_body && *slot) 1464 if (!kill_body && *slot)
1015 { 1465 {
1016 struct cgraph_node *n = (struct cgraph_node *) *slot; 1466 struct cgraph_node *n = (struct cgraph_node *) *slot;
1017 if (!n->next_clone && !n->global.inlined_to 1467 if (!n->clones && !n->clone_of && !n->global.inlined_to
1018 && (cgraph_global_info_ready 1468 && (cgraph_global_info_ready
1019 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)))) 1469 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
1020 kill_body = true; 1470 kill_body = true;
1021 } 1471 }
1022 if (assembler_name_hash) 1472 if (assembler_name_hash)
1046 node->uid = uid; 1496 node->uid = uid;
1047 NEXT_FREE_NODE (node) = free_nodes; 1497 NEXT_FREE_NODE (node) = free_nodes;
1048 free_nodes = node; 1498 free_nodes = node;
1049 } 1499 }
1050 1500
1501 /* Remove the node from cgraph. */
1502
1503 void
1504 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1505 {
1506 struct cgraph_edge *e, *next;
1507 for (e = node->callees; e; e = next)
1508 {
1509 next = e->next_callee;
1510 if (!e->inline_failed)
1511 cgraph_remove_node_and_inline_clones (e->callee);
1512 }
1513 cgraph_remove_node (node);
1514 }
1515
1051 /* Notify finalize_compilation_unit that given node is reachable. */ 1516 /* Notify finalize_compilation_unit that given node is reachable. */
1052 1517
1053 void 1518 void
1054 cgraph_mark_reachable_node (struct cgraph_node *node) 1519 cgraph_mark_reachable_node (struct cgraph_node *node)
1055 { 1520 {
1069 1534
1070 void 1535 void
1071 cgraph_mark_needed_node (struct cgraph_node *node) 1536 cgraph_mark_needed_node (struct cgraph_node *node)
1072 { 1537 {
1073 node->needed = 1; 1538 node->needed = 1;
1539 gcc_assert (!node->global.inlined_to);
1074 cgraph_mark_reachable_node (node); 1540 cgraph_mark_reachable_node (node);
1541 }
1542
1543 /* Likewise indicate that a node is having address taken. */
1544
1545 void
1546 cgraph_mark_address_taken_node (struct cgraph_node *node)
1547 {
1548 node->address_taken = 1;
1549 cgraph_mark_needed_node (node);
1075 } 1550 }
1076 1551
1077 /* Return local info for the compiled function. */ 1552 /* Return local info for the compiled function. */
1078 1553
1079 struct cgraph_local_info * 1554 struct cgraph_local_info *
1111 && !TREE_ASM_WRITTEN (node->decl)) 1586 && !TREE_ASM_WRITTEN (node->decl))
1112 return NULL; 1587 return NULL;
1113 return &node->rtl; 1588 return &node->rtl;
1114 } 1589 }
1115 1590
1591 /* Return a string describing the failure REASON. */
1592
1593 const char*
1594 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1595 {
1596 #undef DEFCIFCODE
1597 #define DEFCIFCODE(code, string) string,
1598
1599 static const char *cif_string_table[CIF_N_REASONS] = {
1600 #include "cif-code.def"
1601 };
1602
1603 /* Signedness of an enum type is implementation defined, so cast it
1604 to unsigned before testing. */
1605 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1606 return cif_string_table[reason];
1607 }
1608
1116 /* Return name of the node used in debug output. */ 1609 /* Return name of the node used in debug output. */
1117 const char * 1610 const char *
1118 cgraph_node_name (struct cgraph_node *node) 1611 cgraph_node_name (struct cgraph_node *node)
1119 { 1612 {
1120 return lang_hooks.decl_printable_name (node->decl, 2); 1613 return lang_hooks.decl_printable_name (node->decl, 2);
1129 1622
1130 void 1623 void
1131 dump_cgraph_node (FILE *f, struct cgraph_node *node) 1624 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1132 { 1625 {
1133 struct cgraph_edge *edge; 1626 struct cgraph_edge *edge;
1134 fprintf (f, "%s/%i(%i):", cgraph_node_name (node), node->uid, node->pid); 1627 fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1628 node->pid);
1629 dump_addr (f, " @", (void *)node);
1135 if (node->global.inlined_to) 1630 if (node->global.inlined_to)
1136 fprintf (f, " (inline copy in %s/%i)", 1631 fprintf (f, " (inline copy in %s/%i)",
1137 cgraph_node_name (node->global.inlined_to), 1632 cgraph_node_name (node->global.inlined_to),
1138 node->global.inlined_to->uid); 1633 node->global.inlined_to->uid);
1634 if (node->clone_of)
1635 fprintf (f, " (clone of %s/%i)",
1636 cgraph_node_name (node->clone_of),
1637 node->clone_of->uid);
1139 if (cgraph_function_flags_ready) 1638 if (cgraph_function_flags_ready)
1140 fprintf (f, " availability:%s", 1639 fprintf (f, " availability:%s",
1141 cgraph_availability_names [cgraph_function_body_availability (node)]); 1640 cgraph_availability_names [cgraph_function_body_availability (node)]);
1142 if (node->master_clone && node->master_clone->uid != node->uid)
1143 fprintf (f, "(%i)", node->master_clone->uid);
1144 if (node->count) 1641 if (node->count)
1145 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x", 1642 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1146 (HOST_WIDEST_INT)node->count); 1643 (HOST_WIDEST_INT)node->count);
1147 if (node->local.inline_summary.self_insns) 1644 if (node->local.inline_summary.self_time)
1148 fprintf (f, " %i insns", node->local.inline_summary.self_insns); 1645 fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
1149 if (node->global.insns && node->global.insns 1646 node->local.inline_summary.time_inlining_benefit);
1150 != node->local.inline_summary.self_insns) 1647 if (node->global.time && node->global.time
1151 fprintf (f, " (%i after inlining)", node->global.insns); 1648 != node->local.inline_summary.self_time)
1649 fprintf (f, " (%i after inlining)", node->global.time);
1650 if (node->local.inline_summary.self_size)
1651 fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
1652 node->local.inline_summary.size_inlining_benefit);
1653 if (node->global.size && node->global.size
1654 != node->local.inline_summary.self_size)
1655 fprintf (f, " (%i after inlining)", node->global.size);
1152 if (node->local.inline_summary.estimated_self_stack_size) 1656 if (node->local.inline_summary.estimated_self_stack_size)
1153 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size); 1657 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1154 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size) 1658 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1155 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size); 1659 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1156 if (node->origin) 1660 if (node->origin)
1157 fprintf (f, " nested in: %s", cgraph_node_name (node->origin)); 1661 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1158 if (node->needed) 1662 if (node->needed)
1159 fprintf (f, " needed"); 1663 fprintf (f, " needed");
1664 if (node->address_taken)
1665 fprintf (f, " address_taken");
1160 else if (node->reachable) 1666 else if (node->reachable)
1161 fprintf (f, " reachable"); 1667 fprintf (f, " reachable");
1162 if (gimple_has_body_p (node->decl)) 1668 if (gimple_has_body_p (node->decl))
1163 fprintf (f, " body"); 1669 fprintf (f, " body");
1164 if (node->output) 1670 if (node->process)
1165 fprintf (f, " output"); 1671 fprintf (f, " process");
1166 if (node->local.local) 1672 if (node->local.local)
1167 fprintf (f, " local"); 1673 fprintf (f, " local");
1168 if (node->local.externally_visible) 1674 if (node->local.externally_visible)
1169 fprintf (f, " externally_visible"); 1675 fprintf (f, " externally_visible");
1170 if (node->local.finalized) 1676 if (node->local.finalized)
1191 edge->frequency / (double)CGRAPH_FREQ_BASE); 1697 edge->frequency / (double)CGRAPH_FREQ_BASE);
1192 if (!edge->inline_failed) 1698 if (!edge->inline_failed)
1193 fprintf(f, "(inlined) "); 1699 fprintf(f, "(inlined) ");
1194 if (edge->indirect_call) 1700 if (edge->indirect_call)
1195 fprintf(f, "(indirect) "); 1701 fprintf(f, "(indirect) ");
1702 if (edge->can_throw_external)
1703 fprintf(f, "(can throw external) ");
1196 } 1704 }
1197 1705
1198 fprintf (f, "\n calls: "); 1706 fprintf (f, "\n calls: ");
1199 for (edge = node->callees; edge; edge = edge->next_callee) 1707 for (edge = node->callees; edge; edge = edge->next_callee)
1200 { 1708 {
1210 if (edge->frequency) 1718 if (edge->frequency)
1211 fprintf (f, "(%.2f per call) ", 1719 fprintf (f, "(%.2f per call) ",
1212 edge->frequency / (double)CGRAPH_FREQ_BASE); 1720 edge->frequency / (double)CGRAPH_FREQ_BASE);
1213 if (edge->loop_nest) 1721 if (edge->loop_nest)
1214 fprintf (f, "(nested in %i loops) ", edge->loop_nest); 1722 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1723 if (edge->can_throw_external)
1724 fprintf(f, "(can throw external) ");
1215 } 1725 }
1216 fprintf (f, "\n"); 1726 fprintf (f, "\n");
1727
1728 if (node->same_body)
1729 {
1730 struct cgraph_node *n;
1731 fprintf (f, " aliases & thunks:");
1732 for (n = node->same_body; n; n = n->next)
1733 {
1734 fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
1735 if (n->thunk.thunk_p)
1736 {
1737 fprintf (f, " (thunk of %s fixed ofset %i virtual value %i has "
1738 "virtual offset %i",
1739 lang_hooks.decl_printable_name (n->thunk.alias, 2),
1740 (int)n->thunk.fixed_offset,
1741 (int)n->thunk.virtual_value,
1742 (int)n->thunk.virtual_offset_p);
1743 fprintf (f, ")");
1744 }
1745 }
1746 fprintf (f, "\n");
1747 }
1217 } 1748 }
1218 1749
1219 1750
1220 /* Dump call graph node NODE to stderr. */ 1751 /* Dump call graph node NODE to stderr. */
1221 1752
1298 } 1829 }
1299 1830
1300 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */ 1831 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
1301 struct cgraph_edge * 1832 struct cgraph_edge *
1302 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n, 1833 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
1303 gimple call_stmt, gcov_type count_scale, int freq_scale, 1834 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
1304 int loop_nest, bool update_original) 1835 int freq_scale, int loop_nest, bool update_original)
1305 { 1836 {
1306 struct cgraph_edge *new_edge; 1837 struct cgraph_edge *new_edge;
1307 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE; 1838 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
1308 gcov_type freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE; 1839 gcov_type freq;
1309 1840
1841 /* We do not want to ignore loop nest after frequency drops to 0. */
1842 if (!freq_scale)
1843 freq_scale = 1;
1844 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
1310 if (freq > CGRAPH_FREQ_MAX) 1845 if (freq > CGRAPH_FREQ_MAX)
1311 freq = CGRAPH_FREQ_MAX; 1846 freq = CGRAPH_FREQ_MAX;
1312 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq, 1847 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
1313 e->loop_nest + loop_nest); 1848 e->loop_nest + loop_nest);
1314 1849
1315 new_edge->inline_failed = e->inline_failed; 1850 new_edge->inline_failed = e->inline_failed;
1316 new_edge->indirect_call = e->indirect_call; 1851 new_edge->indirect_call = e->indirect_call;
1852 new_edge->lto_stmt_uid = stmt_uid;
1317 if (update_original) 1853 if (update_original)
1318 { 1854 {
1319 e->count -= new_edge->count; 1855 e->count -= new_edge->count;
1320 if (e->count < 0) 1856 if (e->count < 0)
1321 e->count = 0; 1857 e->count = 0;
1330 When UPDATE_ORIGINAL is true, the counts are subtracted from the original 1866 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
1331 function's profile to reflect the fact that part of execution is handled 1867 function's profile to reflect the fact that part of execution is handled
1332 by node. */ 1868 by node. */
1333 struct cgraph_node * 1869 struct cgraph_node *
1334 cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq, 1870 cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
1335 int loop_nest, bool update_original) 1871 int loop_nest, bool update_original,
1872 VEC(cgraph_edge_p,heap) *redirect_callers)
1336 { 1873 {
1337 struct cgraph_node *new_node = cgraph_create_node (); 1874 struct cgraph_node *new_node = cgraph_create_node ();
1338 struct cgraph_edge *e; 1875 struct cgraph_edge *e;
1339 gcov_type count_scale; 1876 gcov_type count_scale;
1877 unsigned i;
1340 1878
1341 new_node->decl = n->decl; 1879 new_node->decl = n->decl;
1342 new_node->origin = n->origin; 1880 new_node->origin = n->origin;
1343 if (new_node->origin) 1881 if (new_node->origin)
1344 { 1882 {
1345 new_node->next_nested = new_node->origin->nested; 1883 new_node->next_nested = new_node->origin->nested;
1346 new_node->origin->nested = new_node; 1884 new_node->origin->nested = new_node;
1347 } 1885 }
1348 new_node->analyzed = n->analyzed; 1886 new_node->analyzed = n->analyzed;
1349 new_node->local = n->local; 1887 new_node->local = n->local;
1888 new_node->local.externally_visible = false;
1350 new_node->global = n->global; 1889 new_node->global = n->global;
1351 new_node->rtl = n->rtl; 1890 new_node->rtl = n->rtl;
1352 new_node->master_clone = n->master_clone;
1353 new_node->count = count; 1891 new_node->count = count;
1892 new_node->clone = n->clone;
1354 if (n->count) 1893 if (n->count)
1355 { 1894 {
1356 if (new_node->count > n->count) 1895 if (new_node->count > n->count)
1357 count_scale = REG_BR_PROB_BASE; 1896 count_scale = REG_BR_PROB_BASE;
1358 else 1897 else
1365 n->count -= count; 1904 n->count -= count;
1366 if (n->count < 0) 1905 if (n->count < 0)
1367 n->count = 0; 1906 n->count = 0;
1368 } 1907 }
1369 1908
1909 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1910 {
1911 /* Redirect calls to the old version node to point to its new
1912 version. */
1913 cgraph_redirect_edge_callee (e, new_node);
1914 }
1915
1916
1370 for (e = n->callees;e; e=e->next_callee) 1917 for (e = n->callees;e; e=e->next_callee)
1371 cgraph_clone_edge (e, new_node, e->call_stmt, count_scale, freq, loop_nest, 1918 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
1372 update_original); 1919 count_scale, freq, loop_nest, update_original);
1373 1920
1374 new_node->next_clone = n->next_clone; 1921 new_node->next_sibling_clone = n->clones;
1375 new_node->prev_clone = n; 1922 if (n->clones)
1376 n->next_clone = new_node; 1923 n->clones->prev_sibling_clone = new_node;
1377 if (new_node->next_clone) 1924 n->clones = new_node;
1378 new_node->next_clone->prev_clone = new_node; 1925 new_node->clone_of = n;
1379 1926
1380 cgraph_call_node_duplication_hooks (n, new_node); 1927 cgraph_call_node_duplication_hooks (n, new_node);
1381 return new_node; 1928 return new_node;
1382 } 1929 }
1383 1930
1384 /* Return true if N is an master_clone, (see cgraph_master_clone). */ 1931 /* Create a new name for omp child function. Returns an identifier. */
1385 1932
1386 bool 1933 static GTY(()) unsigned int clone_fn_id_num;
1387 cgraph_is_master_clone (struct cgraph_node *n) 1934
1388 { 1935 static tree
1389 return (n == cgraph_master_clone (n)); 1936 clone_function_name (tree decl)
1390 } 1937 {
1391 1938 tree name = DECL_ASSEMBLER_NAME (decl);
1939 size_t len = IDENTIFIER_LENGTH (name);
1940 char *tmp_name, *prefix;
1941
1942 prefix = XALLOCAVEC (char, len + strlen ("_clone") + 1);
1943 memcpy (prefix, IDENTIFIER_POINTER (name), len);
1944 strcpy (prefix + len, "_clone");
1945 #ifndef NO_DOT_IN_LABEL
1946 prefix[len] = '.';
1947 #elif !defined NO_DOLLAR_IN_LABEL
1948 prefix[len] = '$';
1949 #endif
1950 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
1951 return get_identifier (tmp_name);
1952 }
1953
1954 /* Create callgraph node clone with new declaration. The actual body will
1955 be copied later at compilation stage.
1956
1957 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
1958 bitmap interface.
1959 */
1392 struct cgraph_node * 1960 struct cgraph_node *
1393 cgraph_master_clone (struct cgraph_node *n) 1961 cgraph_create_virtual_clone (struct cgraph_node *old_node,
1394 { 1962 VEC(cgraph_edge_p,heap) *redirect_callers,
1395 enum availability avail = cgraph_function_body_availability (n); 1963 VEC(ipa_replace_map_p,gc) *tree_map,
1396 1964 bitmap args_to_skip)
1397 if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE) 1965 {
1398 return NULL; 1966 tree old_decl = old_node->decl;
1399 1967 struct cgraph_node *new_node = NULL;
1400 if (!n->master_clone) 1968 tree new_decl;
1401 n->master_clone = cgraph_node (n->decl); 1969 struct cgraph_node key, **slot;
1402 1970
1403 return n->master_clone; 1971 gcc_assert (tree_versionable_function_p (old_decl));
1972
1973 /* Make a new FUNCTION_DECL tree node */
1974 if (!args_to_skip)
1975 new_decl = copy_node (old_decl);
1976 else
1977 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1978 DECL_STRUCT_FUNCTION (new_decl) = NULL;
1979
1980 /* Generate a new name for the new version. */
1981 DECL_NAME (new_decl) = clone_function_name (old_decl);
1982 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1983 SET_DECL_RTL (new_decl, NULL);
1984
1985 new_node = cgraph_clone_node (old_node, old_node->count,
1986 CGRAPH_FREQ_BASE, 0, false,
1987 redirect_callers);
1988 new_node->decl = new_decl;
1989 /* Update the properties.
1990 Make clone visible only within this translation unit. Make sure
1991 that is not weak also.
1992 ??? We cannot use COMDAT linkage because there is no
1993 ABI support for this. */
1994 DECL_EXTERNAL (new_node->decl) = 0;
1995 DECL_COMDAT_GROUP (new_node->decl) = 0;
1996 TREE_PUBLIC (new_node->decl) = 0;
1997 DECL_COMDAT (new_node->decl) = 0;
1998 DECL_WEAK (new_node->decl) = 0;
1999 new_node->clone.tree_map = tree_map;
2000 new_node->clone.args_to_skip = args_to_skip;
2001 if (!args_to_skip)
2002 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2003 else if (old_node->clone.combined_args_to_skip)
2004 {
2005 int newi = 0, oldi = 0;
2006 tree arg;
2007 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2008 struct cgraph_node *orig_node;
2009 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2010 ;
2011 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++)
2012 {
2013 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2014 {
2015 bitmap_set_bit (new_args_to_skip, oldi);
2016 continue;
2017 }
2018 if (bitmap_bit_p (args_to_skip, newi))
2019 bitmap_set_bit (new_args_to_skip, oldi);
2020 newi++;
2021 }
2022 new_node->clone.combined_args_to_skip = new_args_to_skip;
2023 }
2024 else
2025 new_node->clone.combined_args_to_skip = args_to_skip;
2026 new_node->local.externally_visible = 0;
2027 new_node->local.local = 1;
2028 new_node->lowered = true;
2029 new_node->reachable = true;
2030
2031 key.decl = new_decl;
2032 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
2033 gcc_assert (!*slot);
2034 *slot = new_node;
2035 if (assembler_name_hash)
2036 {
2037 void **aslot;
2038 tree name = DECL_ASSEMBLER_NAME (new_decl);
2039
2040 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2041 decl_assembler_name_hash (name),
2042 INSERT);
2043 gcc_assert (!*aslot);
2044 *aslot = new_node;
2045 }
2046
2047 return new_node;
1404 } 2048 }
1405 2049
1406 /* NODE is no longer nested function; update cgraph accordingly. */ 2050 /* NODE is no longer nested function; update cgraph accordingly. */
1407 void 2051 void
1408 cgraph_unnest_node (struct cgraph_node *node) 2052 cgraph_unnest_node (struct cgraph_node *node)
1427 avail = AVAIL_NOT_AVAILABLE; 2071 avail = AVAIL_NOT_AVAILABLE;
1428 else if (node->local.local) 2072 else if (node->local.local)
1429 avail = AVAIL_LOCAL; 2073 avail = AVAIL_LOCAL;
1430 else if (!node->local.externally_visible) 2074 else if (!node->local.externally_visible)
1431 avail = AVAIL_AVAILABLE; 2075 avail = AVAIL_AVAILABLE;
2076 /* Inline functions are safe to be analyzed even if their sybol can
2077 be overwritten at runtime. It is not meaningful to enfore any sane
2078 behaviour on replacing inline function by different body. */
2079 else if (DECL_DECLARED_INLINE_P (node->decl))
2080 avail = AVAIL_AVAILABLE;
1432 2081
1433 /* If the function can be overwritten, return OVERWRITABLE. Take 2082 /* If the function can be overwritten, return OVERWRITABLE. Take
1434 care at least of two notable extensions - the COMDAT functions 2083 care at least of two notable extensions - the COMDAT functions
1435 used to share template instantiations in C++ (this is symmetric 2084 used to share template instantiations in C++ (this is symmetric
1436 to code cp_cannot_inline_tree_fn and probably shall be shared and 2085 to code cp_cannot_inline_tree_fn and probably shall be shared and
1437 the inlinability hooks completely eliminated). 2086 the inlinability hooks completely eliminated).
1438 2087
1439 ??? Does the C++ one definition rule allow us to always return 2088 ??? Does the C++ one definition rule allow us to always return
1440 AVAIL_AVAILABLE here? That would be good reason to preserve this 2089 AVAIL_AVAILABLE here? That would be good reason to preserve this
1441 hook Similarly deal with extern inline functions - this is again 2090 bit. */
1442 necessary to get C++ shared functions having keyed templates 2091
1443 right and in the C extension documentation we probably should 2092 else if (DECL_REPLACEABLE_P (node->decl) && !DECL_EXTERNAL (node->decl))
1444 document the requirement of both versions of function (extern
1445 inline and offline) having same side effect characteristics as
1446 good optimization is what this optimization is about. */
1447
1448 else if (!(*targetm.binds_local_p) (node->decl)
1449 && !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl))
1450 avail = AVAIL_OVERWRITABLE; 2093 avail = AVAIL_OVERWRITABLE;
1451 else avail = AVAIL_AVAILABLE; 2094 else avail = AVAIL_AVAILABLE;
1452 2095
1453 return avail; 2096 return avail;
1454 } 2097 }
1458 by middle end and allows insertion of new function at arbitrary point 2101 by middle end and allows insertion of new function at arbitrary point
1459 of compilation. The function can be either in high, low or SSA form 2102 of compilation. The function can be either in high, low or SSA form
1460 GIMPLE. 2103 GIMPLE.
1461 2104
1462 The function is assumed to be reachable and have address taken (so no 2105 The function is assumed to be reachable and have address taken (so no
1463 API breaking optimizations are performed on it). 2106 API breaking optimizations are performed on it).
1464 2107
1465 Main work done by this function is to enqueue the function for later 2108 Main work done by this function is to enqueue the function for later
1466 processing to avoid need the passes to be re-entrant. */ 2109 processing to avoid need the passes to be re-entrant. */
1467 2110
1468 void 2111 void
1525 tree_rest_of_compilation (fndecl); 2168 tree_rest_of_compilation (fndecl);
1526 pop_cfun (); 2169 pop_cfun ();
1527 current_function_decl = NULL; 2170 current_function_decl = NULL;
1528 break; 2171 break;
1529 } 2172 }
2173
2174 /* Set a personality if required and we already passed EH lowering. */
2175 if (lowered
2176 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2177 == eh_personality_lang))
2178 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2179 }
2180
2181 /* Return true if NODE can be made local for API change.
2182 Extern inline functions and C++ COMDAT functions can be made local
2183 at the expense of possible code size growth if function is used in multiple
2184 compilation units. */
2185 bool
2186 cgraph_node_can_be_local_p (struct cgraph_node *node)
2187 {
2188 return (!node->needed
2189 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2190 || !node->local.externally_visible));
2191 }
2192
2193 /* Bring NODE local. */
2194 void
2195 cgraph_make_node_local (struct cgraph_node *node)
2196 {
2197 gcc_assert (cgraph_node_can_be_local_p (node));
2198 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2199 {
2200 DECL_COMDAT (node->decl) = 0;
2201 DECL_COMDAT_GROUP (node->decl) = 0;
2202 TREE_PUBLIC (node->decl) = 0;
2203 DECL_WEAK (node->decl) = 0;
2204 DECL_EXTERNAL (node->decl) = 0;
2205 node->local.externally_visible = false;
2206 node->local.local = true;
2207 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2208 }
2209 }
2210
2211 /* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
2212 if any to NOTHROW. */
2213
2214 void
2215 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2216 {
2217 struct cgraph_node *alias;
2218 TREE_NOTHROW (node->decl) = nothrow;
2219 for (alias = node->same_body; alias; alias = alias->next)
2220 TREE_NOTHROW (alias->decl) = nothrow;
2221 }
2222
2223 /* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
2224 if any to READONLY. */
2225
2226 void
2227 cgraph_set_readonly_flag (struct cgraph_node *node, bool readonly)
2228 {
2229 struct cgraph_node *alias;
2230 TREE_READONLY (node->decl) = readonly;
2231 for (alias = node->same_body; alias; alias = alias->next)
2232 TREE_READONLY (alias->decl) = readonly;
2233 }
2234
2235 /* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
2236 if any to PURE. */
2237
2238 void
2239 cgraph_set_pure_flag (struct cgraph_node *node, bool pure)
2240 {
2241 struct cgraph_node *alias;
2242 DECL_PURE_P (node->decl) = pure;
2243 for (alias = node->same_body; alias; alias = alias->next)
2244 DECL_PURE_P (alias->decl) = pure;
2245 }
2246
2247 /* Set DECL_LOOPING_CONST_OR_PURE_P on NODE's decl and on
2248 same_body aliases of NODE if any to LOOPING_CONST_OR_PURE. */
2249
2250 void
2251 cgraph_set_looping_const_or_pure_flag (struct cgraph_node *node,
2252 bool looping_const_or_pure)
2253 {
2254 struct cgraph_node *alias;
2255 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping_const_or_pure;
2256 for (alias = node->same_body; alias; alias = alias->next)
2257 DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping_const_or_pure;
1530 } 2258 }
1531 2259
1532 #include "gt-cgraph.h" 2260 #include "gt-cgraph.h"