Mercurial > hg > CbC > CbC_gcc
diff gcc/cgraph.c @ 55:77e2b8dfacca gcc-4.4.5
update it from 4.4.3 to 4.5.0
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 12 Feb 2010 23:39:51 +0900 |
parents | a06113de4d67 |
children | b7f97abdc517 |
line wrap: on
line diff
--- a/gcc/cgraph.c Sun Feb 07 18:28:00 2010 +0900 +++ b/gcc/cgraph.c Fri Feb 12 23:39:51 2010 +0900 @@ -1,5 +1,5 @@ /* Callgraph handling code. - Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 + Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc. Contributed by Jan Hubicka @@ -78,13 +78,14 @@ #include "target.h" #include "basic-block.h" #include "cgraph.h" -#include "varray.h" #include "output.h" #include "intl.h" #include "gimple.h" #include "tree-dump.h" #include "tree-flow.h" #include "value-prof.h" +#include "except.h" +#include "diagnostic.h" static void cgraph_node_remove_callers (struct cgraph_node *node); static inline void cgraph_edge_remove_caller (struct cgraph_edge *e); @@ -405,6 +406,7 @@ return (hashval_t) DECL_UID (n->decl); } + /* Returns nonzero if P1 and P2 are equal. */ static int @@ -415,10 +417,10 @@ return DECL_UID (n1->decl) == DECL_UID (n2->decl); } -/* Allocate new callgraph node and insert it into basic data structures. */ +/* Allocate new callgraph node. */ -static struct cgraph_node * -cgraph_create_node (void) +static inline struct cgraph_node * +cgraph_allocate_node (void) { struct cgraph_node *node; @@ -433,6 +435,16 @@ node->uid = cgraph_max_uid++; } + return node; +} + +/* Allocate new callgraph node and insert it into basic data structures. */ + +static struct cgraph_node * +cgraph_create_node (void) +{ + struct cgraph_node *node = cgraph_allocate_node (); + node->next = cgraph_nodes; node->pid = -1; node->order = cgraph_order++; @@ -464,8 +476,8 @@ if (*slot) { node = *slot; - if (!node->master_clone) - node->master_clone = node; + if (node->same_body_alias) + node = node->same_body; return node; } @@ -477,7 +489,6 @@ node->origin = cgraph_node (DECL_CONTEXT (decl)); node->next_nested = node->origin->nested; node->origin->nested = node; - node->master_clone = node; } if (assembler_name_hash) { @@ -497,6 +508,112 @@ return node; } +/* Mark ALIAS as an alias to DECL. */ + +static struct cgraph_node * +cgraph_same_body_alias_1 (tree alias, tree decl) +{ + struct cgraph_node key, *alias_node, *decl_node, **slot; + + gcc_assert (TREE_CODE (decl) == FUNCTION_DECL); + gcc_assert (TREE_CODE (alias) == FUNCTION_DECL); + decl_node = cgraph_node (decl); + + key.decl = alias; + + slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT); + + /* If the cgraph_node has been already created, fail. */ + if (*slot) + return NULL; + + alias_node = cgraph_allocate_node (); + alias_node->decl = alias; + alias_node->same_body_alias = 1; + alias_node->same_body = decl_node; + alias_node->previous = NULL; + if (decl_node->same_body) + decl_node->same_body->previous = alias_node; + alias_node->next = decl_node->same_body; + alias_node->thunk.alias = decl; + decl_node->same_body = alias_node; + *slot = alias_node; + return alias_node; +} + +/* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful. + Same body aliases are output whenever the body of DECL is output, + and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */ + +bool +cgraph_same_body_alias (tree alias, tree decl) +{ +#ifndef ASM_OUTPUT_DEF + /* If aliases aren't supported by the assembler, fail. */ + return false; +#endif + + /*gcc_assert (!assembler_name_hash);*/ + + return cgraph_same_body_alias_1 (alias, decl) != NULL; +} + +void +cgraph_add_thunk (tree alias, tree decl, bool this_adjusting, + HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value, + tree virtual_offset, + tree real_alias) +{ + struct cgraph_node *node = cgraph_get_node (alias); + + if (node) + { + gcc_assert (node->local.finalized); + gcc_assert (!node->same_body); + cgraph_remove_node (node); + } + + node = cgraph_same_body_alias_1 (alias, decl); + gcc_assert (node); +#ifdef ENABLE_CHECKING + gcc_assert (!virtual_offset + || tree_int_cst_equal (virtual_offset, size_int (virtual_value))); +#endif + node->thunk.fixed_offset = fixed_offset; + node->thunk.this_adjusting = this_adjusting; + node->thunk.virtual_value = virtual_value; + node->thunk.virtual_offset_p = virtual_offset != NULL; + node->thunk.alias = real_alias; + node->thunk.thunk_p = true; +} + +/* Returns the cgraph node assigned to DECL or NULL if no cgraph node + is assigned. */ + +struct cgraph_node * +cgraph_get_node (tree decl) +{ + struct cgraph_node key, *node = NULL, **slot; + + gcc_assert (TREE_CODE (decl) == FUNCTION_DECL); + + if (!cgraph_hash) + return NULL; + + key.decl = decl; + + slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, + NO_INSERT); + + if (slot && *slot) + { + node = *slot; + if (node->same_body_alias) + node = node->same_body; + } + return node; +} + /* Insert already constructed node into hashtable. */ void @@ -554,9 +671,23 @@ it is __builtin_strlen and strlen, for instance. Do we need to record them all? Original implementation marked just first one so lets hope for the best. */ - if (*slot) - continue; - *slot = node; + if (!*slot) + *slot = node; + if (node->same_body) + { + struct cgraph_node *alias; + + for (alias = node->same_body; alias; alias = alias->next) + { + hashval_t hash; + name = DECL_ASSEMBLER_NAME (alias->decl); + hash = decl_assembler_name_hash (name); + slot = htab_find_slot_with_hash (assembler_name_hash, name, + hash, INSERT); + if (!*slot) + *slot = alias; + } + } } } @@ -565,7 +696,12 @@ NO_INSERT); if (slot) - return (struct cgraph_node *) *slot; + { + node = (struct cgraph_node *) *slot; + if (node->same_body_alias) + node = node->same_body; + return node; + } return NULL; } @@ -631,7 +767,7 @@ } -/* Change field call_smt of edge E to NEW_STMT. */ +/* Change field call_stmt of edge E to NEW_STMT. */ void cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt) @@ -643,6 +779,9 @@ htab_hash_pointer (e->call_stmt)); } e->call_stmt = new_stmt; + push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl)); + e->can_throw_external = stmt_can_throw_external (new_stmt); + pop_cfun (); if (e->caller->call_site_hash) { void **slot; @@ -655,6 +794,117 @@ } } +/* Like cgraph_set_call_stmt but walk the clone tree and update all + clones sharing the same function body. */ + +void +cgraph_set_call_stmt_including_clones (struct cgraph_node *orig, + gimple old_stmt, gimple new_stmt) +{ + struct cgraph_node *node; + struct cgraph_edge *edge = cgraph_edge (orig, old_stmt); + + if (edge) + cgraph_set_call_stmt (edge, new_stmt); + + node = orig->clones; + if (node) + while (node != orig) + { + struct cgraph_edge *edge = cgraph_edge (node, old_stmt); + if (edge) + cgraph_set_call_stmt (edge, new_stmt); + if (node->clones) + node = node->clones; + else if (node->next_sibling_clone) + node = node->next_sibling_clone; + else + { + while (node != orig && !node->next_sibling_clone) + node = node->clone_of; + if (node != orig) + node = node->next_sibling_clone; + } + } +} + +/* Like cgraph_create_edge walk the clone tree and update all clones sharing + same function body. If clones already have edge for OLD_STMT; only + update the edge same way as cgraph_set_call_stmt_including_clones does. + + TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative + frequencies of the clones. */ + +void +cgraph_create_edge_including_clones (struct cgraph_node *orig, + struct cgraph_node *callee, + gimple old_stmt, + gimple stmt, gcov_type count, + int freq, int loop_depth, + cgraph_inline_failed_t reason) +{ + struct cgraph_node *node; + struct cgraph_edge *edge; + + if (!cgraph_edge (orig, stmt)) + { + edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth); + edge->inline_failed = reason; + } + + node = orig->clones; + if (node) + while (node != orig) + { + struct cgraph_edge *edge = cgraph_edge (node, old_stmt); + + /* It is possible that clones already contain the edge while + master didn't. Either we promoted indirect call into direct + call in the clone or we are processing clones of unreachable + master where edges has been rmeoved. */ + if (edge) + cgraph_set_call_stmt (edge, stmt); + else if (!cgraph_edge (node, stmt)) + { + edge = cgraph_create_edge (node, callee, stmt, count, + freq, loop_depth); + edge->inline_failed = reason; + } + + if (node->clones) + node = node->clones; + else if (node->next_sibling_clone) + node = node->next_sibling_clone; + else + { + while (node != orig && !node->next_sibling_clone) + node = node->clone_of; + if (node != orig) + node = node->next_sibling_clone; + } + } +} + +/* Give initial reasons why inlining would fail on EDGE. This gets either + nullified or usually overwritten by more precise reasons later. */ + +static void +initialize_inline_failed (struct cgraph_edge *e) +{ + struct cgraph_node *callee = e->callee; + + if (!callee->analyzed) + e->inline_failed = CIF_BODY_NOT_AVAILABLE; + else if (callee->local.redefined_extern_inline) + e->inline_failed = CIF_REDEFINED_EXTERN_INLINE; + else if (!callee->local.inlinable) + e->inline_failed = CIF_FUNCTION_NOT_INLINABLE; + else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt)) + e->inline_failed = CIF_MISMATCHED_ARGUMENTS; + else + e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED; +} + /* Create edge from CALLER to CALLEE in the cgraph. */ struct cgraph_edge * @@ -663,13 +913,19 @@ { struct cgraph_edge *edge; + + /* LTO does not actually have access to the call_stmt since these + have not been loaded yet. */ + if (call_stmt) + { #ifdef ENABLE_CHECKING - /* This is rather pricely check possibly trigerring construction of call stmt - hashtable. */ - gcc_assert (!cgraph_edge (caller, call_stmt)); + /* This is rather pricely check possibly trigerring construction of + call stmt hashtable. */ + gcc_assert (!cgraph_edge (caller, call_stmt)); #endif - gcc_assert (is_gimple_call (call_stmt)); + gcc_assert (is_gimple_call (call_stmt)); + } if (free_edges) { @@ -682,21 +938,15 @@ edge->uid = cgraph_edge_max_uid++; } - if (!callee->analyzed) - edge->inline_failed = N_("function body not available"); - else if (callee->local.redefined_extern_inline) - edge->inline_failed = N_("redefined extern inline functions are not " - "considered for inlining"); - else if (callee->local.inlinable) - edge->inline_failed = N_("function not considered for inlining"); - else - edge->inline_failed = N_("function not inlinable"); - edge->aux = NULL; edge->caller = caller; edge->callee = callee; edge->call_stmt = call_stmt; + push_cfun (DECL_STRUCT_FUNCTION (caller->decl)); + edge->can_throw_external + = call_stmt ? stmt_can_throw_external (call_stmt) : false; + pop_cfun (); edge->prev_caller = NULL; edge->next_caller = callee->callers; if (callee->callers) @@ -714,7 +964,9 @@ gcc_assert (freq <= CGRAPH_FREQ_MAX); edge->loop_nest = nest; edge->indirect_call = 0; - if (caller->call_site_hash) + edge->call_stmt_cannot_inline_p = + (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false); + if (call_stmt && caller->call_site_hash) { void **slot; slot = htab_find_slot_with_hash (caller->call_site_hash, @@ -725,6 +977,9 @@ gcc_assert (!*slot); *slot = edge; } + + initialize_inline_failed (edge); + return edge; } @@ -810,48 +1065,93 @@ /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL - OLD_STMT changed into NEW_STMT. */ + OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl + of OLD_STMT if it was previously call statement. */ -void -cgraph_update_edges_for_call_stmt (gimple old_stmt, gimple new_stmt) +static void +cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node, + gimple old_stmt, tree old_call, gimple new_stmt) { - tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fn (new_stmt) : 0; - tree old_call = (is_gimple_call (old_stmt)) ? gimple_call_fn (old_stmt) : 0; - struct cgraph_node *node = cgraph_node (cfun->decl); + tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0; + /* We are seeing indirect calls, then there is nothing to update. */ + if (!new_call && !old_call) + return; + /* See if we turned indirect call into direct call or folded call to one builtin + into different bultin. */ if (old_call != new_call) { struct cgraph_edge *e = cgraph_edge (node, old_stmt); struct cgraph_edge *ne = NULL; - tree new_decl; + gcov_type count; + int frequency; + int loop_nest; if (e) { - gcov_type count = e->count; - int frequency = e->frequency; - int loop_nest = e->loop_nest; + /* See if the call is already there. It might be because of indirect + inlining already found it. */ + if (new_call && e->callee->decl == new_call) + return; + /* Otherwise remove edge and create new one; we can't simply redirect + since function has changed, so inline plan and other information + attached to edge is invalid. */ + count = e->count; + frequency = e->frequency; + loop_nest = e->loop_nest; cgraph_remove_edge (e); - if (new_call) - { - new_decl = gimple_call_fndecl (new_stmt); - if (new_decl) - { - ne = cgraph_create_edge (node, cgraph_node (new_decl), - new_stmt, count, frequency, - loop_nest); - gcc_assert (ne->inline_failed); - } - } + } + else + { + /* We are seeing new direct call; compute profile info based on BB. */ + basic_block bb = gimple_bb (new_stmt); + count = bb->count; + frequency = compute_call_stmt_bb_frequency (current_function_decl, + bb); + loop_nest = bb->loop_depth; + } + + if (new_call) + { + ne = cgraph_create_edge (node, cgraph_node (new_call), + new_stmt, count, frequency, + loop_nest); + gcc_assert (ne->inline_failed); } } + /* We only updated the call stmt; update pointer in cgraph edge.. */ else if (old_stmt != new_stmt) - { - struct cgraph_edge *e = cgraph_edge (node, old_stmt); + cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt); +} + +/* Update or remove the corresponding cgraph edge if a GIMPLE_CALL + OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl + of OLD_STMT before it was updated (updating can happen inplace). */ + +void +cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt) +{ + struct cgraph_node *orig = cgraph_node (cfun->decl); + struct cgraph_node *node; - if (e) - cgraph_set_call_stmt (e, new_stmt); - } + cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt); + if (orig->clones) + for (node = orig->clones; node != orig;) + { + cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt); + if (node->clones) + node = node->clones; + else if (node->next_sibling_clone) + node = node->next_sibling_clone; + else + { + while (node != orig && !node->next_sibling_clone) + node = node->clone_of; + if (node != orig) + node = node->next_sibling_clone; + } + } } @@ -929,7 +1229,7 @@ pop_cfun(); gimple_set_body (node->decl, NULL); VEC_free (ipa_opt_pass, heap, - DECL_STRUCT_FUNCTION (node->decl)->ipa_transforms_to_apply); + node->ipa_transforms_to_apply); /* Struct function hangs a lot of data that would leak if we didn't removed all pointers to it. */ ggc_free (DECL_STRUCT_FUNCTION (node->decl)); @@ -943,6 +1243,44 @@ DECL_INITIAL (node->decl) = error_mark_node; } +/* Remove same body alias node. */ + +void +cgraph_remove_same_body_alias (struct cgraph_node *node) +{ + void **slot; + int uid = node->uid; + + gcc_assert (node->same_body_alias); + if (node->previous) + node->previous->next = node->next; + else + node->same_body->same_body = node->next; + if (node->next) + node->next->previous = node->previous; + node->next = NULL; + node->previous = NULL; + slot = htab_find_slot (cgraph_hash, node, NO_INSERT); + if (*slot == node) + htab_clear_slot (cgraph_hash, slot); + if (assembler_name_hash) + { + tree name = DECL_ASSEMBLER_NAME (node->decl); + slot = htab_find_slot_with_hash (assembler_name_hash, name, + decl_assembler_name_hash (name), + NO_INSERT); + if (slot && *slot == node) + htab_clear_slot (assembler_name_hash, slot); + } + + /* Clear out the node to NULL all pointers and add the node to the free + list. */ + memset (node, 0, sizeof(*node)); + node->uid = uid; + NEXT_FREE_NODE (node) = free_nodes; + free_nodes = node; +} + /* Remove the node from cgraph. */ void @@ -956,6 +1294,8 @@ cgraph_call_node_removal_hooks (node); cgraph_node_remove_callers (node); cgraph_node_remove_callees (node); + VEC_free (ipa_opt_pass, heap, + node->ipa_transforms_to_apply); /* Incremental inlining access removed nodes stored in the postorder list. */ @@ -982,29 +1322,139 @@ slot = htab_find_slot (cgraph_hash, node, NO_INSERT); if (*slot == node) { - if (node->next_clone) - { - struct cgraph_node *new_node = node->next_clone; - struct cgraph_node *n; + struct cgraph_node *next_inline_clone; + + for (next_inline_clone = node->clones; + next_inline_clone && next_inline_clone->decl != node->decl; + next_inline_clone = next_inline_clone->next_sibling_clone) + ; + + /* If there is inline clone of the node being removed, we need + to put it into the position of removed node and reorganize all + other clones to be based on it. */ + if (next_inline_clone) + { + struct cgraph_node *n; + struct cgraph_node *new_clones; + + *slot = next_inline_clone; + + /* Unlink inline clone from the list of clones of removed node. */ + if (next_inline_clone->next_sibling_clone) + next_inline_clone->next_sibling_clone->prev_sibling_clone + = next_inline_clone->prev_sibling_clone; + if (next_inline_clone->prev_sibling_clone) + { + gcc_assert (node->clones != next_inline_clone); + next_inline_clone->prev_sibling_clone->next_sibling_clone + = next_inline_clone->next_sibling_clone; + } + else + { + gcc_assert (node->clones == next_inline_clone); + node->clones = next_inline_clone->next_sibling_clone; + } + + new_clones = node->clones; + node->clones = NULL; - /* Make the next clone be the master clone */ - for (n = new_node; n; n = n->next_clone) - n->master_clone = new_node; + /* Copy clone info. */ + next_inline_clone->clone = node->clone; + + /* Now place it into clone tree at same level at NODE. */ + next_inline_clone->clone_of = node->clone_of; + next_inline_clone->prev_sibling_clone = NULL; + next_inline_clone->next_sibling_clone = NULL; + if (node->clone_of) + { + if (node->clone_of->clones) + node->clone_of->clones->prev_sibling_clone = next_inline_clone; + next_inline_clone->next_sibling_clone = node->clone_of->clones; + node->clone_of->clones = next_inline_clone; + } - *slot = new_node; - node->next_clone->prev_clone = NULL; - } + /* Merge the clone list. */ + if (new_clones) + { + if (!next_inline_clone->clones) + next_inline_clone->clones = new_clones; + else + { + n = next_inline_clone->clones; + while (n->next_sibling_clone) + n = n->next_sibling_clone; + n->next_sibling_clone = new_clones; + new_clones->prev_sibling_clone = n; + } + } + + /* Update clone_of pointers. */ + n = new_clones; + while (n) + { + n->clone_of = next_inline_clone; + n = n->next_sibling_clone; + } + } else { htab_clear_slot (cgraph_hash, slot); kill_body = true; } + } - else + if (node->prev_sibling_clone) + node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone; + else if (node->clone_of) + node->clone_of->clones = node->next_sibling_clone; + if (node->next_sibling_clone) + node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone; + if (node->clones) { - node->prev_clone->next_clone = node->next_clone; - if (node->next_clone) - node->next_clone->prev_clone = node->prev_clone; + struct cgraph_node *n, *next; + + if (node->clone_of) + { + for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone) + n->clone_of = node->clone_of; + n->clone_of = node->clone_of; + n->next_sibling_clone = node->clone_of->clones; + if (node->clone_of->clones) + node->clone_of->clones->prev_sibling_clone = n; + node->clone_of->clones = node->clones; + } + else + { + /* We are removing node with clones. this makes clones inconsistent, + but assume they will be removed subsequently and just keep clone + tree intact. This can happen in unreachable function removal since + we remove unreachable functions in random order, not by bottom-up + walk of clone trees. */ + for (n = node->clones; n; n = next) + { + next = n->next_sibling_clone; + n->next_sibling_clone = NULL; + n->prev_sibling_clone = NULL; + n->clone_of = NULL; + } + } + } + + while (node->same_body) + cgraph_remove_same_body_alias (node->same_body); + + if (node->same_comdat_group) + { + struct cgraph_node *prev; + for (prev = node->same_comdat_group; + prev->same_comdat_group != node; + prev = prev->same_comdat_group) + ; + if (node->same_comdat_group == prev) + prev->same_comdat_group = NULL; + else + prev->same_comdat_group = node->same_comdat_group; + node->same_comdat_group = NULL; } /* While all the clones are removed after being proceeded, the function @@ -1014,7 +1464,7 @@ if (!kill_body && *slot) { struct cgraph_node *n = (struct cgraph_node *) *slot; - if (!n->next_clone && !n->global.inlined_to + if (!n->clones && !n->clone_of && !n->global.inlined_to && (cgraph_global_info_ready && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)))) kill_body = true; @@ -1048,6 +1498,21 @@ free_nodes = node; } +/* Remove the node from cgraph. */ + +void +cgraph_remove_node_and_inline_clones (struct cgraph_node *node) +{ + struct cgraph_edge *e, *next; + for (e = node->callees; e; e = next) + { + next = e->next_callee; + if (!e->inline_failed) + cgraph_remove_node_and_inline_clones (e->callee); + } + cgraph_remove_node (node); +} + /* Notify finalize_compilation_unit that given node is reachable. */ void @@ -1071,9 +1536,19 @@ cgraph_mark_needed_node (struct cgraph_node *node) { node->needed = 1; + gcc_assert (!node->global.inlined_to); cgraph_mark_reachable_node (node); } +/* Likewise indicate that a node is having address taken. */ + +void +cgraph_mark_address_taken_node (struct cgraph_node *node) +{ + node->address_taken = 1; + cgraph_mark_needed_node (node); +} + /* Return local info for the compiled function. */ struct cgraph_local_info * @@ -1113,6 +1588,24 @@ return &node->rtl; } +/* Return a string describing the failure REASON. */ + +const char* +cgraph_inline_failed_string (cgraph_inline_failed_t reason) +{ +#undef DEFCIFCODE +#define DEFCIFCODE(code, string) string, + + static const char *cif_string_table[CIF_N_REASONS] = { +#include "cif-code.def" + }; + + /* Signedness of an enum type is implementation defined, so cast it + to unsigned before testing. */ + gcc_assert ((unsigned) reason < CIF_N_REASONS); + return cif_string_table[reason]; +} + /* Return name of the node used in debug output. */ const char * cgraph_node_name (struct cgraph_node *node) @@ -1131,24 +1624,35 @@ dump_cgraph_node (FILE *f, struct cgraph_node *node) { struct cgraph_edge *edge; - fprintf (f, "%s/%i(%i):", cgraph_node_name (node), node->uid, node->pid); + fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid, + node->pid); + dump_addr (f, " @", (void *)node); if (node->global.inlined_to) fprintf (f, " (inline copy in %s/%i)", cgraph_node_name (node->global.inlined_to), node->global.inlined_to->uid); + if (node->clone_of) + fprintf (f, " (clone of %s/%i)", + cgraph_node_name (node->clone_of), + node->clone_of->uid); if (cgraph_function_flags_ready) fprintf (f, " availability:%s", cgraph_availability_names [cgraph_function_body_availability (node)]); - if (node->master_clone && node->master_clone->uid != node->uid) - fprintf (f, "(%i)", node->master_clone->uid); if (node->count) fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x", (HOST_WIDEST_INT)node->count); - if (node->local.inline_summary.self_insns) - fprintf (f, " %i insns", node->local.inline_summary.self_insns); - if (node->global.insns && node->global.insns - != node->local.inline_summary.self_insns) - fprintf (f, " (%i after inlining)", node->global.insns); + if (node->local.inline_summary.self_time) + fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time, + node->local.inline_summary.time_inlining_benefit); + if (node->global.time && node->global.time + != node->local.inline_summary.self_time) + fprintf (f, " (%i after inlining)", node->global.time); + if (node->local.inline_summary.self_size) + fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size, + node->local.inline_summary.size_inlining_benefit); + if (node->global.size && node->global.size + != node->local.inline_summary.self_size) + fprintf (f, " (%i after inlining)", node->global.size); if (node->local.inline_summary.estimated_self_stack_size) fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size); if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size) @@ -1157,12 +1661,14 @@ fprintf (f, " nested in: %s", cgraph_node_name (node->origin)); if (node->needed) fprintf (f, " needed"); + if (node->address_taken) + fprintf (f, " address_taken"); else if (node->reachable) fprintf (f, " reachable"); if (gimple_has_body_p (node->decl)) fprintf (f, " body"); - if (node->output) - fprintf (f, " output"); + if (node->process) + fprintf (f, " process"); if (node->local.local) fprintf (f, " local"); if (node->local.externally_visible) @@ -1193,6 +1699,8 @@ fprintf(f, "(inlined) "); if (edge->indirect_call) fprintf(f, "(indirect) "); + if (edge->can_throw_external) + fprintf(f, "(can throw external) "); } fprintf (f, "\n calls: "); @@ -1212,8 +1720,31 @@ edge->frequency / (double)CGRAPH_FREQ_BASE); if (edge->loop_nest) fprintf (f, "(nested in %i loops) ", edge->loop_nest); + if (edge->can_throw_external) + fprintf(f, "(can throw external) "); } fprintf (f, "\n"); + + if (node->same_body) + { + struct cgraph_node *n; + fprintf (f, " aliases & thunks:"); + for (n = node->same_body; n; n = n->next) + { + fprintf (f, " %s/%i", cgraph_node_name (n), n->uid); + if (n->thunk.thunk_p) + { + fprintf (f, " (thunk of %s fixed ofset %i virtual value %i has " + "virtual offset %i", + lang_hooks.decl_printable_name (n->thunk.alias, 2), + (int)n->thunk.fixed_offset, + (int)n->thunk.virtual_value, + (int)n->thunk.virtual_offset_p); + fprintf (f, ")"); + } + } + fprintf (f, "\n"); + } } @@ -1300,13 +1831,17 @@ /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */ struct cgraph_edge * cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n, - gimple call_stmt, gcov_type count_scale, int freq_scale, - int loop_nest, bool update_original) + gimple call_stmt, unsigned stmt_uid, gcov_type count_scale, + int freq_scale, int loop_nest, bool update_original) { struct cgraph_edge *new_edge; gcov_type count = e->count * count_scale / REG_BR_PROB_BASE; - gcov_type freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE; + gcov_type freq; + /* We do not want to ignore loop nest after frequency drops to 0. */ + if (!freq_scale) + freq_scale = 1; + freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE; if (freq > CGRAPH_FREQ_MAX) freq = CGRAPH_FREQ_MAX; new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq, @@ -1314,6 +1849,7 @@ new_edge->inline_failed = e->inline_failed; new_edge->indirect_call = e->indirect_call; + new_edge->lto_stmt_uid = stmt_uid; if (update_original) { e->count -= new_edge->count; @@ -1332,11 +1868,13 @@ by node. */ struct cgraph_node * cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq, - int loop_nest, bool update_original) + int loop_nest, bool update_original, + VEC(cgraph_edge_p,heap) *redirect_callers) { struct cgraph_node *new_node = cgraph_create_node (); struct cgraph_edge *e; gcov_type count_scale; + unsigned i; new_node->decl = n->decl; new_node->origin = n->origin; @@ -1347,10 +1885,11 @@ } new_node->analyzed = n->analyzed; new_node->local = n->local; + new_node->local.externally_visible = false; new_node->global = n->global; new_node->rtl = n->rtl; - new_node->master_clone = n->master_clone; new_node->count = count; + new_node->clone = n->clone; if (n->count) { if (new_node->count > n->count) @@ -1367,40 +1906,145 @@ n->count = 0; } - for (e = n->callees;e; e=e->next_callee) - cgraph_clone_edge (e, new_node, e->call_stmt, count_scale, freq, loop_nest, - update_original); + for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++) + { + /* Redirect calls to the old version node to point to its new + version. */ + cgraph_redirect_edge_callee (e, new_node); + } + - new_node->next_clone = n->next_clone; - new_node->prev_clone = n; - n->next_clone = new_node; - if (new_node->next_clone) - new_node->next_clone->prev_clone = new_node; + for (e = n->callees;e; e=e->next_callee) + cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid, + count_scale, freq, loop_nest, update_original); + + new_node->next_sibling_clone = n->clones; + if (n->clones) + n->clones->prev_sibling_clone = new_node; + n->clones = new_node; + new_node->clone_of = n; cgraph_call_node_duplication_hooks (n, new_node); return new_node; } -/* Return true if N is an master_clone, (see cgraph_master_clone). */ +/* Create a new name for omp child function. Returns an identifier. */ + +static GTY(()) unsigned int clone_fn_id_num; + +static tree +clone_function_name (tree decl) +{ + tree name = DECL_ASSEMBLER_NAME (decl); + size_t len = IDENTIFIER_LENGTH (name); + char *tmp_name, *prefix; -bool -cgraph_is_master_clone (struct cgraph_node *n) -{ - return (n == cgraph_master_clone (n)); + prefix = XALLOCAVEC (char, len + strlen ("_clone") + 1); + memcpy (prefix, IDENTIFIER_POINTER (name), len); + strcpy (prefix + len, "_clone"); +#ifndef NO_DOT_IN_LABEL + prefix[len] = '.'; +#elif !defined NO_DOLLAR_IN_LABEL + prefix[len] = '$'; +#endif + ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++); + return get_identifier (tmp_name); } +/* Create callgraph node clone with new declaration. The actual body will + be copied later at compilation stage. + + TODO: after merging in ipa-sra use function call notes instead of args_to_skip + bitmap interface. + */ struct cgraph_node * -cgraph_master_clone (struct cgraph_node *n) +cgraph_create_virtual_clone (struct cgraph_node *old_node, + VEC(cgraph_edge_p,heap) *redirect_callers, + VEC(ipa_replace_map_p,gc) *tree_map, + bitmap args_to_skip) { - enum availability avail = cgraph_function_body_availability (n); + tree old_decl = old_node->decl; + struct cgraph_node *new_node = NULL; + tree new_decl; + struct cgraph_node key, **slot; + + gcc_assert (tree_versionable_function_p (old_decl)); + + /* Make a new FUNCTION_DECL tree node */ + if (!args_to_skip) + new_decl = copy_node (old_decl); + else + new_decl = build_function_decl_skip_args (old_decl, args_to_skip); + DECL_STRUCT_FUNCTION (new_decl) = NULL; + + /* Generate a new name for the new version. */ + DECL_NAME (new_decl) = clone_function_name (old_decl); + SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl)); + SET_DECL_RTL (new_decl, NULL); - if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE) - return NULL; + new_node = cgraph_clone_node (old_node, old_node->count, + CGRAPH_FREQ_BASE, 0, false, + redirect_callers); + new_node->decl = new_decl; + /* Update the properties. + Make clone visible only within this translation unit. Make sure + that is not weak also. + ??? We cannot use COMDAT linkage because there is no + ABI support for this. */ + DECL_EXTERNAL (new_node->decl) = 0; + DECL_COMDAT_GROUP (new_node->decl) = 0; + TREE_PUBLIC (new_node->decl) = 0; + DECL_COMDAT (new_node->decl) = 0; + DECL_WEAK (new_node->decl) = 0; + new_node->clone.tree_map = tree_map; + new_node->clone.args_to_skip = args_to_skip; + if (!args_to_skip) + new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip; + else if (old_node->clone.combined_args_to_skip) + { + int newi = 0, oldi = 0; + tree arg; + bitmap new_args_to_skip = BITMAP_GGC_ALLOC (); + struct cgraph_node *orig_node; + for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of) + ; + for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++) + { + if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi)) + { + bitmap_set_bit (new_args_to_skip, oldi); + continue; + } + if (bitmap_bit_p (args_to_skip, newi)) + bitmap_set_bit (new_args_to_skip, oldi); + newi++; + } + new_node->clone.combined_args_to_skip = new_args_to_skip; + } + else + new_node->clone.combined_args_to_skip = args_to_skip; + new_node->local.externally_visible = 0; + new_node->local.local = 1; + new_node->lowered = true; + new_node->reachable = true; - if (!n->master_clone) - n->master_clone = cgraph_node (n->decl); + key.decl = new_decl; + slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT); + gcc_assert (!*slot); + *slot = new_node; + if (assembler_name_hash) + { + void **aslot; + tree name = DECL_ASSEMBLER_NAME (new_decl); - return n->master_clone; + aslot = htab_find_slot_with_hash (assembler_name_hash, name, + decl_assembler_name_hash (name), + INSERT); + gcc_assert (!*aslot); + *aslot = new_node; + } + + return new_node; } /* NODE is no longer nested function; update cgraph accordingly. */ @@ -1429,6 +2073,11 @@ avail = AVAIL_LOCAL; else if (!node->local.externally_visible) avail = AVAIL_AVAILABLE; + /* Inline functions are safe to be analyzed even if their sybol can + be overwritten at runtime. It is not meaningful to enfore any sane + behaviour on replacing inline function by different body. */ + else if (DECL_DECLARED_INLINE_P (node->decl)) + avail = AVAIL_AVAILABLE; /* If the function can be overwritten, return OVERWRITABLE. Take care at least of two notable extensions - the COMDAT functions @@ -1438,15 +2087,9 @@ ??? Does the C++ one definition rule allow us to always return AVAIL_AVAILABLE here? That would be good reason to preserve this - hook Similarly deal with extern inline functions - this is again - necessary to get C++ shared functions having keyed templates - right and in the C extension documentation we probably should - document the requirement of both versions of function (extern - inline and offline) having same side effect characteristics as - good optimization is what this optimization is about. */ + bit. */ - else if (!(*targetm.binds_local_p) (node->decl) - && !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl)) + else if (DECL_REPLACEABLE_P (node->decl) && !DECL_EXTERNAL (node->decl)) avail = AVAIL_OVERWRITABLE; else avail = AVAIL_AVAILABLE; @@ -1460,7 +2103,7 @@ GIMPLE. The function is assumed to be reachable and have address taken (so no - API breaking optimizations are performed on it). + API breaking optimizations are performed on it). Main work done by this function is to enqueue the function for later processing to avoid need the passes to be re-entrant. */ @@ -1527,6 +2170,91 @@ current_function_decl = NULL; break; } + + /* Set a personality if required and we already passed EH lowering. */ + if (lowered + && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl)) + == eh_personality_lang)) + DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality (); +} + +/* Return true if NODE can be made local for API change. + Extern inline functions and C++ COMDAT functions can be made local + at the expense of possible code size growth if function is used in multiple + compilation units. */ +bool +cgraph_node_can_be_local_p (struct cgraph_node *node) +{ + return (!node->needed + && ((DECL_COMDAT (node->decl) && !node->same_comdat_group) + || !node->local.externally_visible)); +} + +/* Bring NODE local. */ +void +cgraph_make_node_local (struct cgraph_node *node) +{ + gcc_assert (cgraph_node_can_be_local_p (node)); + if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl)) + { + DECL_COMDAT (node->decl) = 0; + DECL_COMDAT_GROUP (node->decl) = 0; + TREE_PUBLIC (node->decl) = 0; + DECL_WEAK (node->decl) = 0; + DECL_EXTERNAL (node->decl) = 0; + node->local.externally_visible = false; + node->local.local = true; + gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL); + } +} + +/* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE + if any to NOTHROW. */ + +void +cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow) +{ + struct cgraph_node *alias; + TREE_NOTHROW (node->decl) = nothrow; + for (alias = node->same_body; alias; alias = alias->next) + TREE_NOTHROW (alias->decl) = nothrow; +} + +/* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE + if any to READONLY. */ + +void +cgraph_set_readonly_flag (struct cgraph_node *node, bool readonly) +{ + struct cgraph_node *alias; + TREE_READONLY (node->decl) = readonly; + for (alias = node->same_body; alias; alias = alias->next) + TREE_READONLY (alias->decl) = readonly; +} + +/* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE + if any to PURE. */ + +void +cgraph_set_pure_flag (struct cgraph_node *node, bool pure) +{ + struct cgraph_node *alias; + DECL_PURE_P (node->decl) = pure; + for (alias = node->same_body; alias; alias = alias->next) + DECL_PURE_P (alias->decl) = pure; +} + +/* Set DECL_LOOPING_CONST_OR_PURE_P on NODE's decl and on + same_body aliases of NODE if any to LOOPING_CONST_OR_PURE. */ + +void +cgraph_set_looping_const_or_pure_flag (struct cgraph_node *node, + bool looping_const_or_pure) +{ + struct cgraph_node *alias; + DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping_const_or_pure; + for (alias = node->same_body; alias; alias = alias->next) + DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping_const_or_pure; } #include "gt-cgraph.h"