Move thunks out of cgraph_node

this patch moves thunk_info out of cgraph_node into a symbol summary.
I also moved it to separate hearder file since cgraph.h became really too
fat.  I plan to contiue with similar breakup in order to cleanup interfaces
and reduce WPA memory footprint (symbol table now consumes more memory than
trees)

gcc/ChangeLog:

2020-10-23  Jan Hubicka  <hubicka@ucw.cz>

	* Makefile.in: Add symtab-thunks.o
	(GTFILES): Add symtab-thunks.h and symtab-thunks.cc; remove cgraphunit.c
	* cgraph.c: Include symtab-thunks.h.
	(cgraph_node::create_thunk): Update
	(symbol_table::create_edge): Update
	(cgraph_node::dump): Update
	(cgraph_node::call_for_symbol_thunks_and_aliases): Update
	(set_nothrow_flag_1): Update
	(set_malloc_flag_1): Update
	(set_const_flag_1): Update
	(collect_callers_of_node_1): Update
	(clone_of_p): Update
	(cgraph_node::verify_node): Update
	(cgraph_node::function_symbol): Update
	(cgraph_c_finalize): Call thunk_info::release.
	(cgraph_node::has_thunk_p): Update
	(cgraph_node::former_thunk_p): Move here from cgraph.h; reimplement.
	* cgraph.h (struct cgraph_thunk_info): Rename to symtab-thunks.h.
	(cgraph_node): Remove thunk field; add thunk bitfield.
	(cgraph_node::expand_thunk): Move to symtab-thunks.h
	(symtab_thunks_cc_finalize): Declare.
	(cgraph_node::has_gimple_body_p): Update.
	(cgraph_node::former_thunk_p): Update.
	* cgraphclones.c: Include symtab-thunks.h.
	(duplicate_thunk_for_node): Update.
	(cgraph_edge::redirect_callee_duplicating_thunks): Update.
	(cgraph_node::expand_all_artificial_thunks): Update.
	(cgraph_node::create_edge_including_clones): Update.
	* cgraphunit.c: Include symtab-thunks.h.
	(vtable_entry_type): Move to symtab-thunks.c.
	(cgraph_node::analyze): Update.
	(analyze_functions): Update.
	(mark_functions_to_output): Update.
	(thunk_adjust): Move to symtab-thunks.c
	(cgraph_node::expand_thunk): Move to symtab-thunks.c
	(cgraph_node::assemble_thunks_and_aliases): Update.
	(output_in_order): Update.
	(cgraphunit_c_finalize): Do not clear vtable_entry_type.
	(cgraph_node::create_wrapper): Update.
	* gengtype.c (open_base_files): Add symtab-thunks.h
	* ipa-comdats.c (propagate_comdat_group): UPdate.
	(ipa_comdats): Update.
	* ipa-cp.c (determine_versionability): UPdate.
	(gather_caller_stats): Update.
	(count_callers): Update
	(set_single_call_flag): Update
	(initialize_node_lattices): Update
	(call_passes_through_thunk_p): Update
	(call_passes_through_thunk): Update
	(propagate_constants_across_call): Update
	(find_more_scalar_values_for_callers_subset): Update
	(has_undead_caller_from_outside_scc_p): Update
	* ipa-fnsummary.c (evaluate_properties_for_edge): Update.
	(compute_fn_summary): Update.
	(inline_analyze_function): Update.
	* ipa-icf.c: Include symtab-thunks.h.
	(sem_function::equals_wpa): Update.
	(redirect_all_callers): Update.
	(sem_function::init): Update.
	(sem_function::parse): Update.
	* ipa-inline-transform.c: Include symtab-thunks.h.
	(inline_call): Update.
	(save_inline_function_body): Update.
	(preserve_function_body_p): Update.
	* ipa-inline.c (inline_small_functions): Update.
	* ipa-polymorphic-call.c: Include alloc-pool.h, symbol-summary.h,
	symtab-thunks.h
	(ipa_polymorphic_call_context::ipa_polymorphic_call_context): Update.
	* ipa-pure-const.c: Include symtab-thunks.h.
	(analyze_function): Update.
	* ipa-sra.c (check_for_caller_issues): Update.
	* ipa-utils.c (ipa_reverse_postorder): Update.
	(ipa_merge_profiles): Update.
	* ipa-visibility.c (non_local_p): Update.
	(cgraph_node::local_p): Update.
	(function_and_variable_visibility): Update.
	* ipa.c (symbol_table::remove_unreachable_nodes): Update.
	* lto-cgraph.c: Include alloc-pool.h, symbol-summary.h and
	symtab-thunks.h
	(lto_output_edge): Update.
	(lto_output_node): Update.
	(compute_ltrans_boundary): Update.
	(output_symtab): Update.
	(verify_node_partition): Update.
	(input_overwrite_node): Update.
	(input_node): Update.
	* lto-streamer-in.c (fixup_call_stmt_edges): Update.
	* symtab-thunks.cc: New file.
	* symtab-thunks.h: New file.
	* toplev.c (toplev::finalize): Call symtab_thunks_cc_finalize.
	* trans-mem.c (ipa_tm_mayenterirr_function): Update.
	(ipa_tm_execute): Update.
	* tree-inline.c (expand_call_inline): Update.
	* tree-nested.c (create_nesting_tree): Update.
	(convert_all_function_calls): Update.
	(gimplify_all_functions): Update.
	* tree-profile.c (tree_profiling): Update.
	* tree-ssa-structalias.c (associate_varinfo_to_alias): Update.
	* tree.c (free_lang_data_in_decl): Update.
	* value-prof.c (init_node_map): Update.

gcc/c-family/ChangeLog:

2020-10-23  Jan Hubicka  <hubicka@ucw.cz>

	* c-common.c (c_common_finalize_early_debug): Update for new thunk api.

gcc/d/ChangeLog:

2020-10-23  Jan Hubicka  <hubicka@ucw.cz>

	* decl.cc (finish_thunk): Update for new thunk api.

gcc/lto/ChangeLog:

2020-10-23  Jan Hubicka  <hubicka@ucw.cz>

	* lto-partition.c (add_symbol_to_partition_1): Update for new thunk
	api.
This commit is contained in:
Jan Hubicka 2020-10-23 21:44:23 +02:00
parent 83f83ddfe0
commit 67f3791f7d
33 changed files with 1045 additions and 761 deletions

View File

@ -1297,6 +1297,7 @@ OBJS = \
cfgloopmanip.o \
cfgrtl.o \
symtab.o \
symtab-thunks.o \
cgraph.o \
cgraphbuild.o \
cgraphunit.o \
@ -2591,6 +2592,7 @@ GTFILES = $(CPPLIB_H) $(srcdir)/input.h $(srcdir)/coretypes.h \
$(srcdir)/function-abi.h \
$(srcdir)/output.h $(srcdir)/cfgloop.h $(srcdir)/cfg.h $(srcdir)/profile-count.h \
$(srcdir)/cselib.h $(srcdir)/basic-block.h $(srcdir)/ipa-ref.h $(srcdir)/cgraph.h \
$(srcdir)/symtab-thunks.h $(srcdir)/symtab-thunks.cc \
$(srcdir)/reload.h $(srcdir)/caller-save.c $(srcdir)/symtab.c \
$(srcdir)/alias.c $(srcdir)/bitmap.c $(srcdir)/cselib.c $(srcdir)/cgraph.c \
$(srcdir)/ipa-prop.c $(srcdir)/ipa-cp.c $(srcdir)/ipa-utils.h \
@ -2629,7 +2631,7 @@ GTFILES = $(CPPLIB_H) $(srcdir)/input.h $(srcdir)/coretypes.h \
$(srcdir)/omp-offload.c \
$(srcdir)/omp-general.c \
$(srcdir)/omp-low.c \
$(srcdir)/targhooks.c $(out_file) $(srcdir)/passes.c $(srcdir)/cgraphunit.c \
$(srcdir)/targhooks.c $(out_file) $(srcdir)/passes.c \
$(srcdir)/cgraphclones.c \
$(srcdir)/tree-phinodes.c \
$(srcdir)/tree-ssa-alias.h \

View File

@ -9134,7 +9134,7 @@ c_common_finalize_early_debug (void)
functions that are still reachable at this point. */
struct cgraph_node *cnode;
FOR_EACH_FUNCTION (cnode)
if (!cnode->alias && !cnode->thunk.thunk_p
if (!cnode->alias && !cnode->thunk
&& (cnode->has_gimple_body_p () || !DECL_IS_BUILTIN (cnode->decl)))
(*debug_hooks->early_global_decl) (cnode->decl);
}

View File

@ -65,6 +65,7 @@ along with GCC; see the file COPYING3. If not see
#include "tree-into-ssa.h"
#include "ipa-inline.h"
#include "tree-nested.h"
#include "symtab-thunks.h"
/* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
#include "tree-pass.h"
@ -629,15 +630,17 @@ cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
? virtual_value == wi::to_wide (virtual_offset)
: virtual_value == 0);
node->thunk.fixed_offset = fixed_offset;
node->thunk.virtual_value = virtual_value;
node->thunk.indirect_offset = indirect_offset;
node->thunk.alias = real_alias;
node->thunk.this_adjusting = this_adjusting;
node->thunk.virtual_offset_p = virtual_offset != NULL;
node->thunk.thunk_p = true;
node->thunk = true;
node->definition = true;
thunk_info *i = thunk_info::get_create (node);
i->fixed_offset = fixed_offset;
i->virtual_value = virtual_value;
i->indirect_offset = indirect_offset;
i->alias = real_alias;
i->this_adjusting = this_adjusting;
i->virtual_offset_p = virtual_offset != NULL;
return node;
}
@ -910,7 +913,7 @@ symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
= decl_maybe_in_construction_p (NULL, NULL, call_stmt,
caller->decl);
else
edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
edge->in_polymorphic_cdtor = caller->thunk;
if (callee)
caller->calls_declare_variant_alt |= callee->declare_variant_alt;
@ -2173,37 +2176,17 @@ cgraph_node::dump (FILE *f)
fprintf (f, "\n");
if (thunk.thunk_p)
if (thunk)
{
fprintf (f, " Thunk");
if (thunk.alias)
fprintf (f, " of %s (asm:%s)",
lang_hooks.decl_printable_name (thunk.alias, 2),
IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
fprintf (f, " fixed offset %i virtual value %i indirect_offset %i "
"has virtual offset %i\n",
(int)thunk.fixed_offset,
(int)thunk.virtual_value,
(int)thunk.indirect_offset,
(int)thunk.virtual_offset_p);
thunk_info::get (this)->dump (f);
}
else if (former_thunk_p ())
fprintf (f, " Former thunk fixed offset %i virtual value %i "
"indirect_offset %i has virtual offset %i\n",
(int)thunk.fixed_offset,
(int)thunk.virtual_value,
(int)thunk.indirect_offset,
(int)thunk.virtual_offset_p);
if (alias && thunk.alias
&& DECL_P (thunk.alias))
{
fprintf (f, " Alias of %s",
lang_hooks.decl_printable_name (thunk.alias, 2));
if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
fprintf (f, " (asm:%s)",
IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
fprintf (f, "\n");
fprintf (f, " Former thunk ");
thunk_info::get (this)->dump (f);
}
else gcc_checking_assert (!thunk_info::get (this));
fprintf (f, " Called by: ");
@ -2444,11 +2427,11 @@ cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
if (avail <= AVAIL_INTERPOSABLE)
return false;
for (e = callers; e; e = e->next_caller)
if (e->caller->thunk.thunk_p
if (e->caller->thunk
&& (include_overwritable
|| e->caller->get_availability () > AVAIL_INTERPOSABLE)
&& !(exclude_virtual_thunks
&& e->caller->thunk.virtual_offset_p))
&& thunk_info::get (e->caller)->virtual_offset_p))
if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
include_overwritable,
exclude_virtual_thunks))
@ -2522,7 +2505,7 @@ set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
set_nothrow_flag_1 (alias, nothrow, non_call, changed);
}
for (cgraph_edge *e = node->callers; e; e = e->next_caller)
if (e->caller->thunk.thunk_p
if (e->caller->thunk
&& (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
}
@ -2571,7 +2554,7 @@ set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
}
for (cgraph_edge *e = node->callers; e; e = e->next_caller)
if (e->caller->thunk.thunk_p
if (e->caller->thunk
&& (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
set_malloc_flag_1 (e->caller, malloc_p, changed);
}
@ -2690,13 +2673,13 @@ set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
set_const_flag_1 (alias, set_const, looping, changed);
}
for (cgraph_edge *e = node->callers; e; e = e->next_caller)
if (e->caller->thunk.thunk_p
if (e->caller->thunk
&& (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
{
/* Virtual thunks access virtual offset in the vtable, so they can
only be pure, never const. */
if (set_const
&& (e->caller->thunk.virtual_offset_p
&& (thunk_info::get (e->caller)->virtual_offset_p
|| !node->binds_to_current_def_p (e->caller)))
*changed |= e->caller->set_pure_flag (true, looping);
else
@ -3040,7 +3023,7 @@ collect_callers_of_node_1 (cgraph_node *node, void *data)
if (avail > AVAIL_INTERPOSABLE)
for (cs = node->callers; cs != NULL; cs = cs->next_caller)
if (!cs->indirect_inlining_edge
&& !cs->caller->thunk.thunk_p)
&& !cs->caller->thunk)
redirect_callers->safe_push (cs);
return false;
}
@ -3071,7 +3054,7 @@ clone_of_p (cgraph_node *node, cgraph_node *node2)
|| node2->former_clone_of == node->decl)
return true;
if (!node->thunk.thunk_p && !node->former_thunk_p ())
if (!node->thunk && !node->former_thunk_p ())
{
while (node2 && node->decl != node2->decl)
node2 = node2->clone_of;
@ -3081,9 +3064,9 @@ clone_of_p (cgraph_node *node, cgraph_node *node2)
/* There are no virtual clones of thunks so check former_clone_of or if we
might have skipped thunks because this adjustments are no longer
necessary. */
while (node->thunk.thunk_p || node->former_thunk_p ())
while (node->thunk || node->former_thunk_p ())
{
if (!node->thunk.this_adjusting)
if (!thunk_info::get (node)->this_adjusting)
return false;
/* In case of instrumented expanded thunks, which can have multiple calls
in them, we do not know how to continue and just have to be
@ -3647,7 +3630,7 @@ cgraph_node::verify_node (void)
}
}
if (analyzed && thunk.thunk_p)
if (analyzed && thunk)
{
if (!callees)
{
@ -3831,7 +3814,7 @@ cgraph_node::function_symbol (enum availability *availability,
{
cgraph_node *node = ultimate_alias_target (availability, ref);
while (node->thunk.thunk_p)
while (node->thunk)
{
enum availability a;
@ -3858,7 +3841,7 @@ cgraph_node::function_or_virtual_thunk_symbol
{
cgraph_node *node = ultimate_alias_target (availability, ref);
while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
while (node->thunk && !thunk_info::get (node)->virtual_offset_p)
{
enum availability a;
@ -4007,6 +3990,7 @@ void
cgraph_c_finalize (void)
{
nested_function_info::release ();
thunk_info::release ();
symtab = NULL;
x_cgraph_nodes_queue = NULL;
@ -4042,7 +4026,7 @@ bool
cgraph_node::has_thunk_p (cgraph_node *node, void *)
{
for (cgraph_edge *e = node->callers; e; e = e->next_caller)
if (e->caller->thunk.thunk_p)
if (e->caller->thunk)
return true;
return false;
}
@ -4178,6 +4162,21 @@ cgraph_c_tests ()
} // namespace selftest
/* Return true if this node represents a former, i.e. an expanded, thunk. */
bool
cgraph_node::former_thunk_p (void)
{
if (thunk)
return false;
thunk_info *i = thunk_info::get (this);
if (!i)
return false;
gcc_checking_assert (i->fixed_offset || i->virtual_offset_p
|| i->indirect_offset);
return true;
}
#endif /* CHECKING_P */
#include "gt-cgraph.h"

View File

@ -681,52 +681,6 @@ extern const char * const cgraph_availability_names[];
extern const char * const ld_plugin_symbol_resolution_names[];
extern const char * const tls_model_names[];
/* Sub-structure of cgraph_node. Holds information about thunk, used only for
same body aliases.
Thunks are basically wrappers around methods which are introduced in case
of multiple inheritance in order to adjust the value of the "this" pointer
or of the returned value.
In the case of this-adjusting thunks, each back-end can override the
can_output_mi_thunk/output_mi_thunk target hooks to generate a minimal thunk
(with a tail call for instance) directly as assembly. For the default hook
or for the case where the can_output_mi_thunk hooks return false, the thunk
is gimplified and lowered using the regular machinery. */
struct GTY(()) cgraph_thunk_info {
/* Offset used to adjust "this". */
HOST_WIDE_INT fixed_offset;
/* Offset in the virtual table to get the offset to adjust "this". Valid iff
VIRTUAL_OFFSET_P is true. */
HOST_WIDE_INT virtual_value;
/* Offset from "this" to get the offset to adjust "this". Zero means: this
offset is to be ignored. */
HOST_WIDE_INT indirect_offset;
/* Thunk target, i.e. the method that this thunk wraps. Depending on the
TARGET_USE_LOCAL_THUNK_ALIAS_P macro, this may have to be a new alias. */
tree alias;
/* Nonzero for a "this" adjusting thunk and zero for a result adjusting
thunk. */
bool this_adjusting;
/* If true, this thunk is what we call a virtual thunk. In this case:
* for this-adjusting thunks, after the FIXED_OFFSET based adjustment is
done, add to the result the offset found in the vtable at:
vptr + VIRTUAL_VALUE
* for result-adjusting thunks, the FIXED_OFFSET adjustment is done after
the virtual one. */
bool virtual_offset_p;
/* Set to true when alias node (the cgraph_node to which this struct belong)
is a thunk. Access to any other fields is invalid if this is false. */
bool thunk_p;
};
/* Represent which DECL tree (or reference to such tree)
will be replaced by another tree while versioning. */
struct GTY(()) ipa_replace_map
@ -925,10 +879,11 @@ struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node
next_sibling_clone (NULL), prev_sibling_clone (NULL), clones (NULL),
clone_of (NULL), call_site_hash (NULL), former_clone_of (NULL),
simdclone (NULL), simd_clones (NULL), ipa_transforms_to_apply (vNULL),
inlined_to (NULL), rtl (NULL), clone (), thunk (),
inlined_to (NULL), rtl (NULL), clone (),
count (profile_count::uninitialized ()),
count_materialization_scale (REG_BR_PROB_BASE), profile_id (0),
unit_id (0), tp_first_run (0), used_as_abstract_origin (false),
unit_id (0), tp_first_run (0), thunk (false),
used_as_abstract_origin (false),
lowered (false), process (false), frequency (NODE_FREQUENCY_NORMAL),
only_called_at_startup (false), only_called_at_exit (false),
tm_clone (false), dispatcher_function (false), calls_comdat_local (false),
@ -1078,7 +1033,7 @@ struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node
/* Add thunk alias into callgraph. The alias declaration is ALIAS and it
aliases DECL with an adjustments made into the first parameter.
See comments in struct cgraph_thunk_info for detail on the parameters. */
See comments in struct symtab-thunks.h for detail on the parameters. */
cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
HOST_WIDE_INT fixed_offset,
HOST_WIDE_INT virtual_value,
@ -1099,13 +1054,6 @@ struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node
cgraph_node *ultimate_alias_target (availability *availability = NULL,
symtab_node *ref = NULL);
/* Expand thunk NODE to gimple if possible.
When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
no assembler is produced.
When OUTPUT_ASM_THUNK is true, also produce assembler for
thunks that are not lowered. */
bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
/* Call expand_thunk on all callers that are thunks and analyze those
nodes that were expanded. */
void expand_all_artificial_thunks ();
@ -1324,7 +1272,7 @@ struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node
inline bool has_gimple_body_p (void);
/* Return true if this node represents a former, i.e. an expanded, thunk. */
inline bool former_thunk_p (void);
bool former_thunk_p (void);
/* Check if function calls comdat local. This is used to recompute
calls_comdat_local flag after function transformations. */
@ -1462,7 +1410,6 @@ struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node
struct cgraph_rtl_info *rtl;
cgraph_clone_info clone;
cgraph_thunk_info thunk;
/* Expected number of executions: calculated in profile.c. */
profile_count count;
@ -1476,6 +1423,8 @@ struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node
/* Time profiler: first run of function. */
int tp_first_run;
/* True when symbol is a thunk. */
unsigned thunk : 1;
/* Set when decl is an abstract function pointed to by the
ABSTRACT_DECL_ORIGIN of a reachable function. */
unsigned used_as_abstract_origin : 1;
@ -2241,6 +2190,10 @@ struct asmname_hasher : ggc_ptr_hash <symtab_node>
static bool equal (symtab_node *n, const_tree t);
};
struct thunk_info;
template <class T> class function_summary;
typedef function_summary <thunk_info *> thunk_summary;
class GTY((tag ("SYMTAB"))) symbol_table
{
public:
@ -2257,6 +2210,7 @@ public:
function_flags_ready (false), cpp_implicit_aliases_done (false),
section_hash (NULL), assembler_name_hash (NULL), init_priority_hash (NULL),
dump_file (NULL), ipa_clones_dump_file (NULL), cloned_nodes (),
m_thunks (NULL),
m_first_edge_removal_hook (NULL), m_first_cgraph_removal_hook (NULL),
m_first_edge_duplicated_hook (NULL), m_first_cgraph_duplicated_hook (NULL),
m_first_cgraph_insertion_hook (NULL), m_first_varpool_insertion_hook (NULL),
@ -2538,6 +2492,9 @@ public:
hash_set <const cgraph_node *> GTY ((skip)) cloned_nodes;
/* Thunk annotations. */
thunk_summary *m_thunks;
private:
/* Allocate a cgraph_edge structure and fill it with data according to the
parameters of which only CALLEE can be NULL (when creating an indirect
@ -2612,6 +2569,9 @@ cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
void cgraphunit_c_finalize (void);
int tp_first_run_node_cmp (const void *pa, const void *pb);
/* In symtab-thunks.cc */
void symtab_thunks_cc_finalize (void);
/* Initialize datastructures so DECL is a function in lowered gimple form.
IN_SSA is true if the gimple is in SSA. */
basic_block init_lowered_empty_function (tree, bool, profile_count);
@ -3082,18 +3042,7 @@ symbol_table::next_function_with_gimple_body (cgraph_node *node)
inline bool
cgraph_node::has_gimple_body_p (void)
{
return definition && !thunk.thunk_p && !alias;
}
/* Return true if this node represents a former, i.e. an expanded, thunk. */
inline bool
cgraph_node::former_thunk_p (void)
{
return (!thunk.thunk_p
&& (thunk.fixed_offset
|| thunk.virtual_offset_p
|| thunk.indirect_offset));
return definition && !thunk && !alias;
}
/* Walk all functions with body defined. */

View File

@ -85,6 +85,7 @@ along with GCC; see the file COPYING3. If not see
#include "tree-vrp.h"
#include "ipa-prop.h"
#include "ipa-fnsummary.h"
#include "symtab-thunks.h"
/* Create clone of edge in the node N represented by CALL_EXPR
the callgraph. */
@ -183,28 +184,28 @@ duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
cgraph_node *new_thunk, *thunk_of;
thunk_of = thunk->callees->callee->ultimate_alias_target ();
if (thunk_of->thunk.thunk_p)
if (thunk_of->thunk)
node = duplicate_thunk_for_node (thunk_of, node);
if (!DECL_ARGUMENTS (thunk->decl))
thunk->get_untransformed_body ();
thunk_info *i = thunk_info::get (thunk);
cgraph_edge *cs;
for (cs = node->callers; cs; cs = cs->next_caller)
if (cs->caller->thunk.thunk_p
&& cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset
&& cs->caller->thunk.virtual_value == thunk->thunk.virtual_value
&& cs->caller->thunk.indirect_offset == thunk->thunk.indirect_offset
&& cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting
&& cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p)
return cs->caller;
if (cs->caller->thunk)
{
thunk_info *i2 = thunk_info::get (cs->caller);
if (*i2 == *i)
return cs->caller;
}
tree new_decl;
if (node->clone.param_adjustments)
{
/* We do not need to duplicate this_adjusting thunks if we have removed
this. */
if (thunk->thunk.this_adjusting
if (i->this_adjusting
&& !node->clone.param_adjustments->first_param_intact_p ())
return node;
@ -256,7 +257,7 @@ void
cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node *n)
{
cgraph_node *orig_to = callee->ultimate_alias_target ();
if (orig_to->thunk.thunk_p)
if (orig_to->thunk)
n = duplicate_thunk_for_node (orig_to, n);
redirect_callee (n);
@ -270,14 +271,14 @@ cgraph_node::expand_all_artificial_thunks ()
{
cgraph_edge *e;
for (e = callers; e;)
if (e->caller->thunk.thunk_p)
if (e->caller->thunk)
{
cgraph_node *thunk = e->caller;
e = e->next_caller;
if (thunk->expand_thunk (false, false))
if (expand_thunk (thunk, false, false))
{
thunk->thunk.thunk_p = false;
thunk->thunk = false;
thunk->analyze ();
ipa_analyze_node (thunk);
inline_analyze_function (thunk);
@ -812,7 +813,7 @@ cgraph_node::create_edge_including_clones (cgraph_node *callee,
if (node)
while (node != this)
/* Thunk clones do not get updated while copying inline function body. */
if (!node->thunk.thunk_p)
if (!node->thunk)
{
cgraph_edge *edge = node->get_edge (old_stmt);

View File

@ -207,6 +207,7 @@ along with GCC; see the file COPYING3. If not see
#include "attribs.h"
#include "ipa-inline.h"
#include "omp-offload.h"
#include "symtab-thunks.h"
/* Queue of cgraph nodes scheduled to be added into cgraph. This is a
secondary queue used during optimization to accommodate passes that
@ -217,9 +218,6 @@ static void expand_all_functions (void);
static void mark_functions_to_output (void);
static void handle_alias_pairs (void);
/* Used for vtable lookup in thunk adjusting. */
static GTY (()) tree vtable_entry_type;
/* Return true if this symbol is a function from the C frontend specified
directly in RTL form (with "__RTL"). */
@ -619,9 +617,10 @@ cgraph_node::analyze (void)
location_t saved_loc = input_location;
input_location = DECL_SOURCE_LOCATION (decl);
if (thunk.thunk_p)
if (thunk)
{
cgraph_node *t = cgraph_node::get (thunk.alias);
thunk_info *info = thunk_info::get (this);
cgraph_node *t = cgraph_node::get (info->alias);
create_edge (t, NULL, t->count);
callees->can_throw_external = !TREE_NOTHROW (t->decl);
@ -635,8 +634,8 @@ cgraph_node::analyze (void)
if (!t->analyzed && t->definition)
t->analyze ();
}
bool ret = expand_thunk (false, false);
thunk.alias = NULL;
bool ret = expand_thunk (this, false, false);
thunk_info::get (this)->alias = NULL;
if (!ret)
return;
}
@ -1222,7 +1221,7 @@ analyze_functions (bool first_time)
See gcc.c-torture/compile/20011119-1.c */
if (!DECL_STRUCT_FUNCTION (decl)
&& !cnode->alias
&& !cnode->thunk.thunk_p
&& !cnode->thunk
&& !cnode->dispatcher_function)
{
cnode->reset ();
@ -1377,10 +1376,10 @@ analyze_functions (bool first_time)
if (cnode->definition && !gimple_has_body_p (decl)
&& !cnode->alias
&& !cnode->thunk.thunk_p)
&& !cnode->thunk)
cnode->reset ();
gcc_assert (!cnode->definition || cnode->thunk.thunk_p
gcc_assert (!cnode->definition || cnode->thunk
|| cnode->alias
|| gimple_has_body_p (decl)
|| cnode->native_rtl_p ());
@ -1611,7 +1610,7 @@ mark_functions_to_output (void)
always inlined, as well as those that are reachable from
outside the current compilation unit. */
if (node->analyzed
&& !node->thunk.thunk_p
&& !node->thunk
&& !node->alias
&& !node->inlined_to
&& !TREE_ASM_WRITTEN (decl)
@ -1624,7 +1623,7 @@ mark_functions_to_output (void)
for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
next != node;
next = dyn_cast<cgraph_node *> (next->same_comdat_group))
if (!next->thunk.thunk_p && !next->alias
if (!next->thunk && !next->alias
&& !next->comdat_local_p ())
next->process = 1;
}
@ -1733,487 +1732,6 @@ init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
return bb;
}
/* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
for a result adjusting thunk. */
tree
thunk_adjust (gimple_stmt_iterator * bsi,
tree ptr, bool this_adjusting,
HOST_WIDE_INT fixed_offset, tree virtual_offset,
HOST_WIDE_INT indirect_offset)
{
gassign *stmt;
tree ret;
if (this_adjusting
&& fixed_offset != 0)
{
stmt = gimple_build_assign
(ptr, fold_build_pointer_plus_hwi_loc (input_location,
ptr,
fixed_offset));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
}
if (!vtable_entry_type && (virtual_offset || indirect_offset != 0))
{
tree vfunc_type = make_node (FUNCTION_TYPE);
TREE_TYPE (vfunc_type) = integer_type_node;
TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
layout_type (vfunc_type);
vtable_entry_type = build_pointer_type (vfunc_type);
}
/* If there's a virtual offset, look up that value in the vtable and
adjust the pointer again. */
if (virtual_offset)
{
tree vtabletmp;
tree vtabletmp2;
tree vtabletmp3;
vtabletmp =
create_tmp_reg (build_pointer_type
(build_pointer_type (vtable_entry_type)), "vptr");
/* The vptr is always at offset zero in the object. */
stmt = gimple_build_assign (vtabletmp,
build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
ptr));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Form the vtable address. */
vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
"vtableaddr");
stmt = gimple_build_assign (vtabletmp2,
build_simple_mem_ref (vtabletmp));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Find the entry with the vcall offset. */
stmt = gimple_build_assign (vtabletmp2,
fold_build_pointer_plus_loc (input_location,
vtabletmp2,
virtual_offset));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Get the offset itself. */
vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
"vcalloffset");
stmt = gimple_build_assign (vtabletmp3,
build_simple_mem_ref (vtabletmp2));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Adjust the `this' pointer. */
ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
GSI_CONTINUE_LINKING);
}
/* Likewise for an offset that is stored in the object that contains the
vtable. */
if (indirect_offset != 0)
{
tree offset_ptr, offset_tree;
/* Get the address of the offset. */
offset_ptr
= create_tmp_reg (build_pointer_type
(build_pointer_type (vtable_entry_type)),
"offset_ptr");
stmt = gimple_build_assign (offset_ptr,
build1 (NOP_EXPR, TREE_TYPE (offset_ptr),
ptr));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
stmt = gimple_build_assign
(offset_ptr,
fold_build_pointer_plus_hwi_loc (input_location, offset_ptr,
indirect_offset));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Get the offset itself. */
offset_tree = create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr)),
"offset");
stmt = gimple_build_assign (offset_tree,
build_simple_mem_ref (offset_ptr));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Adjust the `this' pointer. */
ptr = fold_build_pointer_plus_loc (input_location, ptr, offset_tree);
ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
GSI_CONTINUE_LINKING);
}
if (!this_adjusting
&& fixed_offset != 0)
/* Adjust the pointer by the constant. */
{
tree ptrtmp;
if (VAR_P (ptr))
ptrtmp = ptr;
else
{
ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
stmt = gimple_build_assign (ptrtmp, ptr);
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
}
ptr = fold_build_pointer_plus_hwi_loc (input_location,
ptrtmp, fixed_offset);
}
/* Emit the statement and gimplify the adjustment expression. */
ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
stmt = gimple_build_assign (ret, ptr);
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
return ret;
}
/* Expand thunk NODE to gimple if possible.
When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
no assembler is produced.
When OUTPUT_ASM_THUNK is true, also produce assembler for
thunks that are not lowered. */
bool
cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
{
bool this_adjusting = thunk.this_adjusting;
HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
HOST_WIDE_INT virtual_value = thunk.virtual_value;
HOST_WIDE_INT indirect_offset = thunk.indirect_offset;
tree virtual_offset = NULL;
tree alias = callees->callee->decl;
tree thunk_fndecl = decl;
tree a;
if (!force_gimple_thunk
&& this_adjusting
&& indirect_offset == 0
&& !DECL_EXTERNAL (alias)
&& !DECL_STATIC_CHAIN (alias)
&& targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
virtual_value, alias))
{
tree fn_block;
tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
if (!output_asm_thunks)
{
analyzed = true;
return false;
}
if (in_lto_p)
get_untransformed_body ();
a = DECL_ARGUMENTS (thunk_fndecl);
current_function_decl = thunk_fndecl;
/* Ensure thunks are emitted in their correct sections. */
resolve_unique_section (thunk_fndecl, 0,
flag_function_sections);
DECL_RESULT (thunk_fndecl)
= build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
RESULT_DECL, 0, restype);
DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
/* The back end expects DECL_INITIAL to contain a BLOCK, so we
create one. */
fn_block = make_node (BLOCK);
BLOCK_VARS (fn_block) = a;
DECL_INITIAL (thunk_fndecl) = fn_block;
BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
allocate_struct_function (thunk_fndecl, false);
init_function_start (thunk_fndecl);
cfun->is_thunk = 1;
insn_locations_init ();
set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
prologue_location = curr_insn_location ();
targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
fixed_offset, virtual_value, alias);
insn_locations_finalize ();
init_insn_lengths ();
free_after_compilation (cfun);
TREE_ASM_WRITTEN (thunk_fndecl) = 1;
thunk.thunk_p = false;
analyzed = false;
}
else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
{
error ("generic thunk code fails for method %qD which uses %<...%>",
thunk_fndecl);
TREE_ASM_WRITTEN (thunk_fndecl) = 1;
analyzed = true;
return false;
}
else
{
tree restype;
basic_block bb, then_bb, else_bb, return_bb;
gimple_stmt_iterator bsi;
int nargs = 0;
tree arg;
int i;
tree resdecl;
tree restmp = NULL;
gcall *call;
greturn *ret;
bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
/* We may be called from expand_thunk that releases body except for
DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
if (in_lto_p && !force_gimple_thunk)
get_untransformed_body ();
/* We need to force DECL_IGNORED_P when the thunk is created
after early debug was run. */
if (force_gimple_thunk)
DECL_IGNORED_P (thunk_fndecl) = 1;
a = DECL_ARGUMENTS (thunk_fndecl);
current_function_decl = thunk_fndecl;
/* Ensure thunks are emitted in their correct sections. */
resolve_unique_section (thunk_fndecl, 0,
flag_function_sections);
bitmap_obstack_initialize (NULL);
if (thunk.virtual_offset_p)
virtual_offset = size_int (virtual_value);
/* Build the return declaration for the function. */
restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
{
resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
DECL_ARTIFICIAL (resdecl) = 1;
DECL_IGNORED_P (resdecl) = 1;
DECL_CONTEXT (resdecl) = thunk_fndecl;
DECL_RESULT (thunk_fndecl) = resdecl;
}
else
resdecl = DECL_RESULT (thunk_fndecl);
profile_count cfg_count = count;
if (!cfg_count.initialized_p ())
cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
bb = then_bb = else_bb = return_bb
= init_lowered_empty_function (thunk_fndecl, true, cfg_count);
bsi = gsi_start_bb (bb);
/* Build call to the function being thunked. */
if (!VOID_TYPE_P (restype)
&& (!alias_is_noreturn
|| TREE_ADDRESSABLE (restype)
|| TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
{
if (DECL_BY_REFERENCE (resdecl))
{
restmp = gimple_fold_indirect_ref (resdecl);
if (!restmp)
restmp = build2 (MEM_REF,
TREE_TYPE (TREE_TYPE (resdecl)),
resdecl,
build_int_cst (TREE_TYPE (resdecl), 0));
}
else if (!is_gimple_reg_type (restype))
{
if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
{
restmp = resdecl;
if (VAR_P (restmp))
{
add_local_decl (cfun, restmp);
BLOCK_VARS (DECL_INITIAL (current_function_decl))
= restmp;
}
}
else
restmp = create_tmp_var (restype, "retval");
}
else
restmp = create_tmp_reg (restype, "retval");
}
for (arg = a; arg; arg = DECL_CHAIN (arg))
nargs++;
auto_vec<tree> vargs (nargs);
i = 0;
arg = a;
if (this_adjusting)
{
vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
virtual_offset, indirect_offset));
arg = DECL_CHAIN (a);
i = 1;
}
if (nargs)
for (; i < nargs; i++, arg = DECL_CHAIN (arg))
{
tree tmp = arg;
DECL_NOT_GIMPLE_REG_P (arg) = 0;
if (!is_gimple_val (arg))
{
tmp = create_tmp_reg (TYPE_MAIN_VARIANT
(TREE_TYPE (arg)), "arg");
gimple *stmt = gimple_build_assign (tmp, arg);
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
}
vargs.quick_push (tmp);
}
call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
callees->call_stmt = call;
gimple_call_set_from_thunk (call, true);
if (DECL_STATIC_CHAIN (alias))
{
tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
tree type = TREE_TYPE (p);
tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
PARM_DECL, create_tmp_var_name ("CHAIN"),
type);
DECL_ARTIFICIAL (decl) = 1;
DECL_IGNORED_P (decl) = 1;
TREE_USED (decl) = 1;
DECL_CONTEXT (decl) = thunk_fndecl;
DECL_ARG_TYPE (decl) = type;
TREE_READONLY (decl) = 1;
struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
sf->static_chain_decl = decl;
gimple_call_set_chain (call, decl);
}
/* Return slot optimization is always possible and in fact required to
return values with DECL_BY_REFERENCE. */
if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
&& (!is_gimple_reg_type (TREE_TYPE (resdecl))
|| DECL_BY_REFERENCE (resdecl)))
gimple_call_set_return_slot_opt (call, true);
if (restmp)
{
gimple_call_set_lhs (call, restmp);
gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
TREE_TYPE (TREE_TYPE (alias))));
}
gsi_insert_after (&bsi, call, GSI_NEW_STMT);
if (!alias_is_noreturn)
{
if (restmp && !this_adjusting
&& (fixed_offset || virtual_offset))
{
tree true_label = NULL_TREE;
if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
{
gimple *stmt;
edge e;
/* If the return type is a pointer, we need to
protect against NULL. We know there will be an
adjustment, because that's why we're emitting a
thunk. */
then_bb = create_basic_block (NULL, bb);
then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
return_bb = create_basic_block (NULL, then_bb);
return_bb->count = cfg_count;
else_bb = create_basic_block (NULL, else_bb);
else_bb->count = cfg_count.apply_scale (1, 16);
add_bb_to_loop (then_bb, bb->loop_father);
add_bb_to_loop (return_bb, bb->loop_father);
add_bb_to_loop (else_bb, bb->loop_father);
remove_edge (single_succ_edge (bb));
true_label = gimple_block_label (then_bb);
stmt = gimple_build_cond (NE_EXPR, restmp,
build_zero_cst (TREE_TYPE (restmp)),
NULL_TREE, NULL_TREE);
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
e->probability = profile_probability::guessed_always ()
.apply_scale (1, 16);
e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
e->probability = profile_probability::guessed_always ()
.apply_scale (1, 16);
make_single_succ_edge (return_bb,
EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
e->probability = profile_probability::always ();
bsi = gsi_last_bb (then_bb);
}
restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
fixed_offset, virtual_offset,
indirect_offset);
if (true_label)
{
gimple *stmt;
bsi = gsi_last_bb (else_bb);
stmt = gimple_build_assign (restmp,
build_zero_cst (TREE_TYPE (restmp)));
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
bsi = gsi_last_bb (return_bb);
}
}
else
{
gimple_call_set_tail (call, true);
cfun->tail_call_marked = true;
}
/* Build return value. */
if (!DECL_BY_REFERENCE (resdecl))
ret = gimple_build_return (restmp);
else
ret = gimple_build_return (resdecl);
gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
}
else
{
gimple_call_set_tail (call, true);
cfun->tail_call_marked = true;
remove_edge (single_succ_edge (bb));
}
cfun->gimple_df->in_ssa_p = true;
update_max_bb_count ();
profile_status_for_fn (cfun)
= cfg_count.initialized_p () && cfg_count.ipa_p ()
? PROFILE_READ : PROFILE_GUESSED;
/* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
TREE_ASM_WRITTEN (thunk_fndecl) = false;
delete_unreachable_blocks ();
update_ssa (TODO_update_ssa);
checking_verify_flow_info ();
free_dominance_info (CDI_DOMINATORS);
/* Since we want to emit the thunk, we explicitly mark its name as
referenced. */
thunk.thunk_p = false;
lowered = true;
bitmap_obstack_release (NULL);
}
current_function_decl = NULL;
set_cfun (NULL);
return true;
}
/* Assemble thunks and aliases associated to node. */
void
@ -2223,13 +1741,13 @@ cgraph_node::assemble_thunks_and_aliases (void)
ipa_ref *ref;
for (e = callers; e;)
if (e->caller->thunk.thunk_p
if (e->caller->thunk
&& !e->caller->inlined_to)
{
cgraph_node *thunk = e->caller;
e = e->next_caller;
thunk->expand_thunk (true, false);
expand_thunk (thunk, true, false);
thunk->assemble_thunks_and_aliases ();
}
else
@ -2594,7 +2112,7 @@ output_in_order (void)
cgraph_order_sort *node;
FOR_EACH_DEFINED_FUNCTION (cnode)
if (cnode->process && !cnode->thunk.thunk_p
if (cnode->process && !cnode->thunk
&& !cnode->alias && cnode->no_reorder)
nodes.safe_push (cgraph_order_sort (cnode));
@ -3034,7 +2552,6 @@ cgraphunit_c_finalize (void)
gcc_assert (cgraph_new_nodes.length () == 0);
cgraph_new_nodes.truncate (0);
vtable_entry_type = NULL;
queued_nodes = &symtab_terminator;
first_analyzed = NULL;
@ -3064,8 +2581,11 @@ cgraph_node::create_wrapper (cgraph_node *target)
/* Turn alias into thunk and expand it into GIMPLE representation. */
definition = true;
memset (&thunk, 0, sizeof (cgraph_thunk_info));
thunk.thunk_p = true;
/* Create empty thunk, but be sure we did not keep former thunk around.
In that case we would need to preserve the info. */
gcc_checking_assert (!thunk_info::get (this));
thunk_info::get_create (this);
thunk = true;
create_edge (target, NULL, count);
callees->can_throw_external = !TREE_NOTHROW (target->decl);
@ -3077,11 +2597,10 @@ cgraph_node::create_wrapper (cgraph_node *target)
arguments = TREE_CHAIN (arguments);
}
expand_thunk (false, true);
expand_thunk (this, false, true);
thunk_info::remove (this);
/* Inline summary set-up. */
analyze ();
inline_analyze_function (this);
}
#include "gt-cgraphunit.h"

View File

@ -54,6 +54,9 @@ along with GCC; see the file COPYING3. If not see
#include "debug.h"
#include "tree-pretty-print.h"
#include "tree-nested.h"
#include "alloc-pool.h"
#include "symbol-summary.h"
#include "symtab-thunks.h"
#include "d-tree.h"
@ -1702,7 +1705,7 @@ finish_thunk (tree thunk, tree function)
if (!stdarg_p (TREE_TYPE (thunk)))
{
thunk_node->create_edge (funcn, NULL, thunk_node->count);
thunk_node->expand_thunk (false, true);
expand_thunk (thunk_node, false, true);
}
/* Tell the back-end to not bother inlining the function, this is

View File

@ -1726,7 +1726,8 @@ open_base_files (void)
"except.h", "output.h", "cfgloop.h", "target.h", "lto-streamer.h",
"target-globals.h", "ipa-ref.h", "cgraph.h", "symbol-summary.h",
"ipa-prop.h", "ipa-fnsummary.h", "dwarf2out.h", "omp-general.h",
"omp-offload.h", "ipa-modref-tree.h", "ipa-modref.h", NULL
"omp-offload.h", "ipa-modref-tree.h", "ipa-modref.h", "symtab-thunks.h",
NULL
};
const char *const *ifp;
outf_p gtype_desc_c;

View File

@ -128,7 +128,7 @@ propagate_comdat_group (struct symtab_node *symbol,
if (cgraph_node * cn = dyn_cast <cgraph_node *> (symbol2))
{
/* Thunks cannot call across section boundary. */
if (cn->thunk.thunk_p)
if (cn->thunk)
newgroup = propagate_comdat_group (symbol2, newgroup, map);
/* If we see inline clone, its comdat group actually
corresponds to the comdat group of the function it
@ -344,7 +344,7 @@ ipa_comdats (void)
if (!symbol->get_comdat_group ()
&& !symbol->alias
&& (!(fun = dyn_cast <cgraph_node *> (symbol))
|| !fun->thunk.thunk_p)
|| !fun->thunk)
&& symbol->real_symbol_p ())
{
tree *val = map.get (symbol);

View File

@ -595,7 +595,7 @@ determine_versionability (struct cgraph_node *node,
/* There are a number of generic reasons functions cannot be versioned. We
also cannot remove parameters if there are type attributes such as fnspec
present. */
if (node->alias || node->thunk.thunk_p)
if (node->alias || node->thunk)
reason = "alias or thunk";
else if (!node->versionable)
reason = "not a tree_versionable_function";
@ -646,7 +646,7 @@ determine_versionability (struct cgraph_node *node,
reason = "external function which calls va_arg_pack_len";
}
if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
if (reason && dump_file && !node->alias && !node->thunk)
fprintf (dump_file, "Function %s is not versionable, reason: %s.\n",
node->dump_name (), reason);
@ -691,7 +691,7 @@ gather_caller_stats (struct cgraph_node *node, void *data)
struct cgraph_edge *cs;
for (cs = node->callers; cs; cs = cs->next_caller)
if (!cs->caller->thunk.thunk_p)
if (!cs->caller->thunk)
{
if (cs->count.ipa ().initialized_p ())
stats->count_sum += cs->count.ipa ();
@ -1155,7 +1155,7 @@ count_callers (cgraph_node *node, void *data)
for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
/* Local thunks can be handled transparently, but if the thunk cannot
be optimized out, count it as a real use. */
if (!cs->caller->thunk.thunk_p || !cs->caller->local)
if (!cs->caller->thunk || !cs->caller->local)
++*caller_count;
return false;
}
@ -1168,7 +1168,7 @@ set_single_call_flag (cgraph_node *node, void *)
{
cgraph_edge *cs = node->callers;
/* Local thunks can be handled transparently, skip them. */
while (cs && cs->caller->thunk.thunk_p && cs->caller->local)
while (cs && cs->caller->thunk && cs->caller->local)
cs = cs->next_caller;
if (cs && IPA_NODE_REF (cs->caller))
{
@ -1215,7 +1215,7 @@ initialize_node_lattices (struct cgraph_node *node)
}
if (dump_file && (dump_flags & TDF_DETAILS)
&& !node->alias && !node->thunk.thunk_p)
&& !node->alias && !node->thunk)
{
fprintf (dump_file, "Initializing lattices of %s\n",
node->dump_name ());
@ -1240,7 +1240,7 @@ initialize_node_lattices (struct cgraph_node *node)
node->clone.param_adjustments->get_surviving_params (&surviving_params);
if (dump_file && (dump_flags & TDF_DETAILS)
&& !node->alias && !node->thunk.thunk_p)
&& !node->alias && !node->thunk)
{
bool first = true;
for (int j = 0; j < ipa_get_param_count (info); j++)
@ -2806,12 +2806,12 @@ propagate_aggs_across_jump_function (struct cgraph_edge *cs,
non-thunk) destination, the call passes through a thunk. */
static bool
call_passes_through_thunk_p (cgraph_edge *cs)
call_passes_through_thunk (cgraph_edge *cs)
{
cgraph_node *alias_or_thunk = cs->callee;
while (alias_or_thunk->alias)
alias_or_thunk = alias_or_thunk->get_alias_target ();
return alias_or_thunk->thunk.thunk_p;
return alias_or_thunk->thunk;
}
/* Propagate constants from the caller to the callee of CS. INFO describes the
@ -2853,7 +2853,7 @@ propagate_constants_across_call (struct cgraph_edge *cs)
/* If this call goes through a thunk we must not propagate to the first (0th)
parameter. However, we might need to uncover a thunk from below a series
of aliases first. */
if (call_passes_through_thunk_p (cs))
if (call_passes_through_thunk (cs))
{
ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
0));
@ -4698,7 +4698,7 @@ find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
if (!IPA_EDGE_REF (cs)
|| i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
|| (i == 0
&& call_passes_through_thunk_p (cs)))
&& call_passes_through_thunk (cs)))
{
newval = NULL_TREE;
break;
@ -5661,7 +5661,7 @@ has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
struct cgraph_edge *cs;
for (cs = node->callers; cs; cs = cs->next_caller)
if (cs->caller->thunk.thunk_p
if (cs->caller->thunk
&& cs->caller->call_for_symbol_thunks_and_aliases
(has_undead_caller_from_outside_scc_p, NULL, true))
return true;

View File

@ -699,7 +699,7 @@ evaluate_properties_for_edge (struct cgraph_edge *e, bool inline_p,
}
}
else
gcc_assert (!count || callee->thunk.thunk_p);
gcc_assert (!count || callee->thunk);
}
else if (e->call_stmt && !e->call_stmt_cannot_inline_p && info->conds)
{
@ -3051,12 +3051,12 @@ compute_fn_summary (struct cgraph_node *node, bool early)
class ipa_size_summary *size_info = ipa_size_summaries->get_create (node);
/* Estimate the stack size for the function if we're optimizing. */
self_stack_size = optimize && !node->thunk.thunk_p
self_stack_size = optimize && !node->thunk
? estimated_stack_frame_size (node) : 0;
size_info->estimated_self_stack_size = self_stack_size;
info->estimated_stack_size = self_stack_size;
if (node->thunk.thunk_p)
if (node->thunk)
{
ipa_call_summary *es = ipa_call_summaries->get_create (node->callees);
predicate t = true;
@ -4221,7 +4221,7 @@ inline_analyze_function (struct cgraph_node *node)
if (dump_file)
fprintf (dump_file, "\nAnalyzing function: %s\n", node->dump_name ());
if (opt_for_fn (node->decl, optimize) && !node->thunk.thunk_p)
if (opt_for_fn (node->decl, optimize) && !node->thunk)
inline_indirect_intraprocedural_analysis (node);
compute_fn_summary (node, false);
if (!optimize)

View File

@ -84,6 +84,7 @@ along with GCC; see the file COPYING3. If not see
#include "stor-layout.h"
#include "dbgcnt.h"
#include "tree-vector-builder.h"
#include "symtab-thunks.h"
using namespace ipa_icf_gimple;
@ -530,22 +531,14 @@ sem_function::equals_wpa (sem_item *item,
m_compared_func = static_cast<sem_function *> (item);
if (cnode->thunk.thunk_p != cnode2->thunk.thunk_p)
return return_false_with_msg ("thunk_p mismatch");
if (cnode->thunk != cnode2->thunk)
return return_false_with_msg ("thunk mismatch");
if (cnode->former_thunk_p () != cnode2->former_thunk_p ())
return return_false_with_msg ("former_thunk_p mismatch");
if (cnode->thunk.thunk_p)
{
if (cnode->thunk.fixed_offset != cnode2->thunk.fixed_offset)
return return_false_with_msg ("thunk fixed_offset mismatch");
if (cnode->thunk.virtual_value != cnode2->thunk.virtual_value)
return return_false_with_msg ("thunk virtual_value mismatch");
if (cnode->thunk.indirect_offset != cnode2->thunk.indirect_offset)
return return_false_with_msg ("thunk indirect_offset mismatch");
if (cnode->thunk.this_adjusting != cnode2->thunk.this_adjusting)
return return_false_with_msg ("thunk this_adjusting mismatch");
if (cnode->thunk.virtual_offset_p != cnode2->thunk.virtual_offset_p)
return return_false_with_msg ("thunk virtual_offset_p mismatch");
}
if ((cnode->thunk || cnode->former_thunk_p ())
&& thunk_info::get (cnode) != thunk_info::get (cnode2))
return return_false_with_msg ("thunk_info mismatch");
/* Compare special function DECL attributes. */
if (DECL_FUNCTION_PERSONALITY (decl)
@ -968,7 +961,7 @@ redirect_all_callers (cgraph_node *n, cgraph_node *to)
/* Redirecting thunks to interposable symbols or symbols in other sections
may not be supported by target output code. Play safe for now and
punt on redirection. */
if (!e->caller->thunk.thunk_p)
if (!e->caller->thunk)
{
struct cgraph_edge *nexte = e->next_caller;
e->redirect_callee (to);
@ -1362,7 +1355,7 @@ sem_function::init (ipa_icf_gimple::func_checker *checker)
edge_count = n_edges_for_fn (func);
cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
if (!cnode->thunk.thunk_p)
if (!cnode->thunk)
{
cfg_checksum = coverage_compute_cfg_checksum (func);
@ -1407,12 +1400,7 @@ sem_function::init (ipa_icf_gimple::func_checker *checker)
else
{
cfg_checksum = 0;
inchash::hash hstate;
hstate.add_hwi (cnode->thunk.fixed_offset);
hstate.add_hwi (cnode->thunk.virtual_value);
hstate.add_flag (cnode->thunk.this_adjusting);
hstate.add_flag (cnode->thunk.virtual_offset_p);
gcode_hash = hstate.end ();
gcode_hash = thunk_info::get (cnode)->hash ();
}
m_checker = NULL;
@ -1494,7 +1482,7 @@ sem_function::parse (cgraph_node *node, bitmap_obstack *stack,
tree fndecl = node->decl;
function *func = DECL_STRUCT_FUNCTION (fndecl);
if (!func || (!node->has_gimple_body_p () && !node->thunk.thunk_p))
if (!func || (!node->has_gimple_body_p () && !node->thunk))
return NULL;
if (lookup_attribute_by_prefix ("omp ", DECL_ATTRIBUTES (node->decl)) != NULL)

View File

@ -50,6 +50,7 @@ along with GCC; see the file COPYING3. If not see
#include "ipa-utils.h"
#include "ipa-modref-tree.h"
#include "ipa-modref.h"
#include "symtab-thunks.h"
int ncalls_inlined;
int nfunctions_inlined;
@ -353,14 +354,18 @@ inline_call (struct cgraph_edge *e, bool update_original,
to = e->caller;
if (to->inlined_to)
to = to->inlined_to;
if (to->thunk.thunk_p)
if (to->thunk)
{
struct cgraph_node *target = to->callees->callee;
thunk_expansion = true;
/* Remove all annotations, but keep thunk info. */
thunk_info info = *thunk_info::get (to);
symtab->call_cgraph_removal_hooks (to);
*thunk_info::get_create (to) = info;
if (in_lto_p)
to->get_untransformed_body ();
to->expand_thunk (false, true);
expand_thunk (to, false, true);
/* When thunk is instrumented we may have multiple callees. */
for (e = to->callees; e && e->callee != target; e = e->next_callee)
;
@ -564,9 +569,9 @@ save_inline_function_body (struct cgraph_node *node)
first_clone = node->clones;
/* Arrange first clone to not be thunk as those do not have bodies. */
if (first_clone->thunk.thunk_p)
if (first_clone->thunk)
{
while (first_clone->thunk.thunk_p)
while (first_clone->thunk)
first_clone = first_clone->next_sibling_clone;
first_clone->prev_sibling_clone->next_sibling_clone
= first_clone->next_sibling_clone;
@ -673,11 +678,11 @@ static bool
preserve_function_body_p (struct cgraph_node *node)
{
gcc_assert (symtab->global_info_ready);
gcc_assert (!node->alias && !node->thunk.thunk_p);
gcc_assert (!node->alias && !node->thunk);
/* Look if there is any non-thunk clone around. */
for (node = node->clones; node; node = node->next_sibling_clone)
if (!node->thunk.thunk_p)
if (!node->thunk)
return true;
return false;
}

View File

@ -1947,7 +1947,7 @@ inline_small_functions (void)
if (!node->inlined_to)
{
if (!node->alias && node->analyzed
&& (node->has_gimple_body_p () || node->thunk.thunk_p)
&& (node->has_gimple_body_p () || node->thunk)
&& opt_for_fn (node->decl, optimize))
{
class ipa_fn_summary *info = ipa_fn_summaries->get (node);

View File

@ -38,6 +38,9 @@ along with GCC; see the file COPYING3. If not see
#include "tree-dfa.h"
#include "gimple-pretty-print.h"
#include "tree-into-ssa.h"
#include "alloc-pool.h"
#include "symbol-summary.h"
#include "symtab-thunks.h"
/* Return true when TYPE contains an polymorphic type and thus is interesting
for devirtualization machinery. */
@ -1007,8 +1010,9 @@ ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
Because we walked all the way to the beginning of thunk, we now
see pointer &bar-thunk.fixed_offset and need to compensate
for it. */
if (node->thunk.fixed_offset)
offset -= node->thunk.fixed_offset * BITS_PER_UNIT;
thunk_info *info = thunk_info::get (node);
if (info && info->fixed_offset)
offset -= info->fixed_offset * BITS_PER_UNIT;
/* Dynamic casting has possibly upcasted the type
in the hierarchy. In this case outer type is less
@ -1021,7 +1025,7 @@ ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
/* If we compile thunk with virtual offset, the THIS pointer
is adjusted by unknown value. We can't thus use outer info
at all. */
|| node->thunk.virtual_offset_p)
|| (info && info->virtual_offset_p))
{
outer_type = NULL;
if (instance)
@ -1047,10 +1051,10 @@ ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
}
if (instance)
{
thunk_info *info = thunk_info::get (node);
/* If method is expanded thunk, we need to apply thunk offset
to instance pointer. */
if (node->thunk.virtual_offset_p
|| node->thunk.fixed_offset)
if (info && (info->virtual_offset_p || info->fixed_offset))
*instance = NULL;
else
*instance = base_pointer;

View File

@ -61,6 +61,7 @@ along with GCC; see the file COPYING3. If not see
#include "symbol-summary.h"
#include "ipa-prop.h"
#include "ipa-fnsummary.h"
#include "symtab-thunks.h"
/* Lattice values for const and pure functions. Everything starts out
being const, then may drop to pure and then neither depending on
@ -1025,11 +1026,11 @@ analyze_function (struct cgraph_node *fn, bool ipa)
flags_from_decl_or_type (fn->decl),
fn->cannot_return_p ());
if (fn->thunk.thunk_p || fn->alias)
if (fn->thunk || fn->alias)
{
/* Thunk gets propagated through, so nothing interesting happens. */
gcc_assert (ipa);
if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
l->pure_const_state = IPA_NEITHER;
return l;
}

View File

@ -2935,7 +2935,7 @@ check_for_caller_issues (struct cgraph_node *node, void *data)
for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
{
if (cs->caller->thunk.thunk_p)
if (cs->caller->thunk)
{
issues->thunk = true;
/* TODO: We should be able to process at least some types of

View File

@ -297,7 +297,7 @@ ipa_reverse_postorder (struct cgraph_node **order)
&& (pass
|| (!node->address_taken
&& !node->inlined_to
&& !node->alias && !node->thunk.thunk_p
&& !node->alias && !node->thunk
&& !node->only_called_directly_p ())))
{
stack_size = 0;
@ -460,8 +460,8 @@ ipa_merge_profiles (struct cgraph_node *dst,
}
/* First handle functions with no gimple body. */
if (dst->thunk.thunk_p || dst->alias
|| src->thunk.thunk_p || src->alias)
if (dst->thunk || dst->alias
|| src->thunk || src->alias)
{
scale_ipa_profile_for_fn (dst, orig_count);
return;

View File

@ -95,7 +95,7 @@ non_local_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
return !(node->only_called_directly_or_aliased_p ()
/* i386 would need update to output thunk with local calling
conventions. */
&& !node->thunk.thunk_p
&& !node->thunk
&& node->definition
&& !DECL_EXTERNAL (node->decl)
&& !lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl))
@ -112,7 +112,7 @@ cgraph_node::local_p (void)
{
cgraph_node *n = ultimate_alias_target ();
if (n->thunk.thunk_p)
if (n->thunk)
return n->callees->callee->local_p ();
return !n->call_for_symbol_thunks_and_aliases (non_local_p,
NULL, true);
@ -729,7 +729,7 @@ function_and_variable_visibility (bool whole_program)
&& !DECL_EXTERNAL (node->decl))
localize_node (whole_program, node);
if (node->thunk.thunk_p
if (node->thunk
&& TREE_PUBLIC (node->decl))
{
struct cgraph_node *decl_node = node;

View File

@ -475,7 +475,7 @@ symbol_table::remove_unreachable_nodes (FILE *file)
}
}
else if (cnode->thunk.thunk_p)
else if (cnode->thunk)
enqueue_node (cnode->callees->callee, &first, &reachable);
/* If any reachable function has simd clones, mark them as
@ -525,7 +525,7 @@ symbol_table::remove_unreachable_nodes (FILE *file)
/* We keep definitions of thunks and aliases in the boundary so
we can walk to the ultimate alias targets and function symbols
reliably. */
if (node->alias || node->thunk.thunk_p)
if (node->alias || node->thunk)
;
else if (!body_needed_for_clonning.contains (node->decl))
{
@ -537,7 +537,7 @@ symbol_table::remove_unreachable_nodes (FILE *file)
}
else if (!node->clone_of)
gcc_assert (in_lto_p || DECL_RESULT (node->decl));
if (node->definition && !node->alias && !node->thunk.thunk_p)
if (node->definition && !node->alias && !node->thunk)
{
if (file)
fprintf (file, " %s", node->dump_name ());
@ -547,7 +547,7 @@ symbol_table::remove_unreachable_nodes (FILE *file)
node->cpp_implicit_alias = false;
node->alias = false;
node->transparent_alias = false;
node->thunk.thunk_p = false;
node->thunk = false;
node->weakref = false;
/* After early inlining we drop always_inline attributes on
bodies of functions that are still referenced (have their

View File

@ -39,6 +39,9 @@ along with GCC; see the file COPYING3. If not see
#include "omp-offload.h"
#include "stringpool.h"
#include "attribs.h"
#include "alloc-pool.h"
#include "symbol-summary.h"
#include "symtab-thunks.h"
/* True when asm nodes has been output. */
bool asm_nodes_output = false;
@ -261,7 +264,7 @@ lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
: gimple_uid (edge->call_stmt) + 1;
bp_pack_enum (&bp, cgraph_inline_failed_t,
CIF_N_REASONS, edge->inline_failed);
gcc_checking_assert (uid || edge->caller->thunk.thunk_p);
gcc_checking_assert (uid || edge->caller->thunk);
bp_pack_var_len_unsigned (&bp, uid);
bp_pack_value (&bp, edge->speculative_id, 16);
bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
@ -393,7 +396,7 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
if (node->analyzed && (!boundary_p || node->alias
|| (node->thunk.thunk_p && !node->inlined_to)))
|| (node->thunk && !node->inlined_to)))
tag = LTO_symtab_analyzed_node;
else
tag = LTO_symtab_unavail_node;
@ -533,10 +536,19 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
bp_pack_value (&bp, node->nonfreeing_fn, 1);
bp_pack_value (&bp, node->merged_comdat, 1);
bp_pack_value (&bp, node->merged_extern_inline, 1);
bp_pack_value (&bp, node->thunk.thunk_p, 1);
bp_pack_value (&bp, node->thunk, 1);
bp_pack_value (&bp, node->parallelized_function, 1);
bp_pack_value (&bp, node->declare_variant_alt, 1);
bp_pack_value (&bp, node->calls_declare_variant_alt, 1);
/* Stream thunk info always because we use it in
ipa_polymorphic_call_context::ipa_polymorphic_call_context
to properly interpret THIS pointers for thunks that has been converted
to Gimple. */
struct thunk_info *thunk = node->definition ? thunk_info::get (node) : NULL;
bp_pack_value (&bp, thunk != NULL, 1);
bp_pack_enum (&bp, ld_plugin_symbol_resolution,
LDPR_NUM_KNOWN,
/* When doing incremental link, we will get new resolution
@ -546,26 +558,15 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
streamer_write_bitpack (&bp);
streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
/* Stream thunk info always because we use it in
ipa_polymorphic_call_context::ipa_polymorphic_call_context
to properly interpret THIS pointers for thunks that has been converted
to Gimple. */
if (node->definition)
{
streamer_write_uhwi_stream
(ob->main_stream,
1 + (node->thunk.this_adjusting != 0) * 2
+ (node->thunk.virtual_offset_p != 0) * 4);
streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
streamer_write_uhwi_stream (ob->main_stream, node->thunk.indirect_offset);
}
streamer_write_hwi_stream (ob->main_stream, node->profile_id);
streamer_write_hwi_stream (ob->main_stream, node->unit_id);
if (DECL_STATIC_CONSTRUCTOR (node->decl))
streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
if (DECL_STATIC_DESTRUCTOR (node->decl))
streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
if (thunk)
thunk_info::get (node)->stream_out (ob);
}
/* Output the varpool NODE to OB.
@ -931,7 +932,7 @@ compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
if (node->alias && node->analyzed)
create_references (encoder, node);
if (cnode
&& cnode->thunk.thunk_p && !cnode->inlined_to)
&& cnode->thunk && !cnode->inlined_to)
add_node_to (encoder, cnode->callees->callee, false);
while (node->transparent_alias && node->analyzed)
{
@ -987,7 +988,7 @@ output_symtab (void)
{
node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
if (node
&& ((node->thunk.thunk_p && !node->inlined_to)
&& ((node->thunk && !node->inlined_to)
|| lto_symtab_encoder_in_partition_p (encoder, node)))
{
output_outgoing_cgraph_edges (node->callees, ob, encoder);
@ -1139,13 +1140,14 @@ verify_node_partition (symtab_node *node)
NODE or to replace the values in it, for instance because the first
time we saw it, the function body was not available but now it
is. BP is a bitpack with all the bitflags for NODE read from the
stream. */
stream. Initialize HAS_THUNK_INFO to indicate if thunk info should
be streamed in. */
static void
input_overwrite_node (struct lto_file_decl_data *file_data,
struct cgraph_node *node,
enum LTO_symtab_tags tag,
struct bitpack_d *bp)
struct bitpack_d *bp, bool *has_thunk_info)
{
node->aux = (void *) tag;
node->lto_file_data = file_data;
@ -1194,10 +1196,11 @@ input_overwrite_node (struct lto_file_decl_data *file_data,
node->nonfreeing_fn = bp_unpack_value (bp, 1);
node->merged_comdat = bp_unpack_value (bp, 1);
node->merged_extern_inline = bp_unpack_value (bp, 1);
node->thunk.thunk_p = bp_unpack_value (bp, 1);
node->thunk = bp_unpack_value (bp, 1);
node->parallelized_function = bp_unpack_value (bp, 1);
node->declare_variant_alt = bp_unpack_value (bp, 1);
node->calls_declare_variant_alt = bp_unpack_value (bp, 1);
*has_thunk_info = bp_unpack_value (bp, 1);
node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
LDPR_NUM_KNOWN);
node->split_part = bp_unpack_value (bp, 1);
@ -1235,6 +1238,7 @@ input_node (struct lto_file_decl_data *file_data,
const char *section;
order = streamer_read_hwi (ib) + file_data->order_base;
clone_ref = streamer_read_hwi (ib);
bool has_thunk_info;
fn_decl = lto_input_fn_decl_ref (ib, file_data);
@ -1294,7 +1298,7 @@ input_node (struct lto_file_decl_data *file_data,
bp = streamer_read_bitpack (ib);
input_overwrite_node (file_data, node, tag, &bp);
input_overwrite_node (file_data, node, tag, &bp, &has_thunk_info);
/* Store a reference for now, and fix up later to be a pointer. */
node->inlined_to = (cgraph_node *) (intptr_t) ref;
@ -1311,19 +1315,6 @@ input_node (struct lto_file_decl_data *file_data,
if (section)
node->set_section_for_node (section);
if (node->definition)
{
int type = streamer_read_uhwi (ib);
HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
HOST_WIDE_INT indirect_offset = streamer_read_uhwi (ib);
node->thunk.fixed_offset = fixed_offset;
node->thunk.virtual_value = virtual_value;
node->thunk.indirect_offset = indirect_offset;
node->thunk.this_adjusting = (type & 2);
node->thunk.virtual_offset_p = (type & 4);
}
if (node->alias && !node->analyzed && node->weakref)
node->alias_target = get_alias_symbol (node->decl);
node->profile_id = streamer_read_hwi (ib);
@ -1335,6 +1326,9 @@ input_node (struct lto_file_decl_data *file_data,
if (DECL_STATIC_DESTRUCTOR (node->decl))
node->set_fini_priority (streamer_read_hwi (ib));
if (has_thunk_info)
thunk_info::get_create (node)->stream_in (ib);
return node;
}

View File

@ -1232,12 +1232,12 @@ fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
orig = orig->clone_of;
fn = DECL_STRUCT_FUNCTION (orig->decl);
if (!orig->thunk.thunk_p)
if (!orig->thunk)
fixup_call_stmt_edges_1 (orig, stmts, fn);
if (orig->clones)
for (node = orig->clones; node != orig;)
{
if (!node->thunk.thunk_p)
if (!node->thunk)
fixup_call_stmt_edges_1 (node, stmts, fn);
if (node->clones)
node = node->clones;

View File

@ -181,7 +181,7 @@ add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
/* Add all thunks associated with the function. */
for (e = cnode->callers; e; e = e->next_caller)
if (e->caller->thunk.thunk_p && !e->caller->inlined_to)
if (e->caller->thunk && !e->caller->inlined_to)
add_symbol_to_partition_1 (part, e->caller);
}

639
gcc/symtab-thunks.cc Normal file
View File

@ -0,0 +1,639 @@
/* Support for thunks in symbol table.
Copyright (C) 2003-2020 Free Software Foundation, Inc.
Contributed by Jan Hubicka
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "tree.h"
#include "gimple.h"
#include "predict.h"
#include "target.h"
#include "rtl.h"
#include "alloc-pool.h"
#include "cgraph.h"
#include "symbol-summary.h"
#include "symtab-thunks.h"
#include "lto-streamer.h"
#include "fold-const.h"
#include "gimple-iterator.h"
#include "stor-layout.h"
#include "gimplify-me.h"
#include "varasm.h"
#include "output.h"
#include "cfg.h"
#include "cfghooks.h"
#include "gimple-ssa.h"
#include "gimple-fold.h"
#include "cfgloop.h"
#include "tree-into-ssa.h"
#include "tree-cfg.h"
#include "cfgcleanup.h"
#include "tree-pass.h"
#include "data-streamer.h"
#include "langhooks.h"
/* Used for vtable lookup in thunk adjusting. */
static GTY (()) tree vtable_entry_type;
namespace {
/* Function summary for thunk_infos. */
class GTY((user)) thunk_infos_t: public function_summary <thunk_info *>
{
public:
thunk_infos_t (symbol_table *table, bool ggc):
function_summary<thunk_info *> (table, ggc) { }
/* Hook that is called by summary when a node is duplicated. */
virtual void duplicate (cgraph_node *node,
cgraph_node *node2,
thunk_info *data,
thunk_info *data2);
};
/* Duplication hook. */
void
thunk_infos_t::duplicate (cgraph_node *, cgraph_node *,
thunk_info *src, thunk_info *dst)
{
*dst = *src;
}
} /* anon namespace */
/* Return thunk_info possibly creating new one. */
thunk_info *
thunk_info::get_create (cgraph_node *node)
{
if (!symtab->m_thunks)
{
symtab->m_thunks
= new (ggc_alloc_no_dtor <thunk_infos_t> ())
thunk_infos_t (symtab, true);
symtab->m_thunks->disable_insertion_hook ();
}
return symtab->m_thunks->get_create (node);
}
/* Stream out THIS to OB. */
void
thunk_info::stream_out (lto_simple_output_block *ob)
{
streamer_write_uhwi_stream
(ob->main_stream,
1 + (this_adjusting != 0) * 2
+ (virtual_offset_p != 0) * 4);
streamer_write_uhwi_stream (ob->main_stream, fixed_offset);
streamer_write_uhwi_stream (ob->main_stream, virtual_value);
streamer_write_uhwi_stream (ob->main_stream, indirect_offset);
}
/* Stream in THIS from IB. */
void
thunk_info::stream_in (class lto_input_block *ib)
{
int type = streamer_read_uhwi (ib);
fixed_offset = streamer_read_uhwi (ib);
virtual_value = streamer_read_uhwi (ib);
indirect_offset = streamer_read_uhwi (ib);
this_adjusting = (type & 2);
virtual_offset_p = (type & 4);
}
/* Dump THIS to F. */
void
thunk_info::dump (FILE *f)
{
if (alias)
fprintf (f, " of %s (asm:%s)",
lang_hooks.decl_printable_name (alias, 2),
IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (alias)));
fprintf (f, " fixed offset %i virtual value %i indirect_offset %i "
"has virtual offset %i\n",
(int)fixed_offset,
(int)virtual_value,
(int)indirect_offset,
(int)virtual_offset_p);
}
/* Hash THIS. */
hashval_t
thunk_info::hash ()
{
inchash::hash hstate;
hstate.add_hwi (fixed_offset);
hstate.add_hwi (virtual_value);
hstate.add_flag (this_adjusting);
hstate.add_flag (virtual_offset_p);
return hstate.end ();
}
/* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
for a result adjusting thunk. */
tree
thunk_adjust (gimple_stmt_iterator * bsi,
tree ptr, bool this_adjusting,
HOST_WIDE_INT fixed_offset, tree virtual_offset,
HOST_WIDE_INT indirect_offset)
{
gassign *stmt;
tree ret;
if (this_adjusting
&& fixed_offset != 0)
{
stmt = gimple_build_assign
(ptr, fold_build_pointer_plus_hwi_loc (input_location,
ptr,
fixed_offset));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
}
if (!vtable_entry_type && (virtual_offset || indirect_offset != 0))
{
tree vfunc_type = make_node (FUNCTION_TYPE);
TREE_TYPE (vfunc_type) = integer_type_node;
TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
layout_type (vfunc_type);
vtable_entry_type = build_pointer_type (vfunc_type);
}
/* If there's a virtual offset, look up that value in the vtable and
adjust the pointer again. */
if (virtual_offset)
{
tree vtabletmp;
tree vtabletmp2;
tree vtabletmp3;
vtabletmp = create_tmp_reg
(build_pointer_type
(build_pointer_type (vtable_entry_type)), "vptr");
/* The vptr is always at offset zero in the object. */
stmt = gimple_build_assign (vtabletmp,
build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
ptr));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Form the vtable address. */
vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
"vtableaddr");
stmt = gimple_build_assign (vtabletmp2,
build_simple_mem_ref (vtabletmp));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Find the entry with the vcall offset. */
stmt = gimple_build_assign (vtabletmp2,
fold_build_pointer_plus_loc (input_location,
vtabletmp2,
virtual_offset));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Get the offset itself. */
vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
"vcalloffset");
stmt = gimple_build_assign (vtabletmp3,
build_simple_mem_ref (vtabletmp2));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Adjust the `this' pointer. */
ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
GSI_CONTINUE_LINKING);
}
/* Likewise for an offset that is stored in the object that contains the
vtable. */
if (indirect_offset != 0)
{
tree offset_ptr, offset_tree;
/* Get the address of the offset. */
offset_ptr
= create_tmp_reg (build_pointer_type
(build_pointer_type (vtable_entry_type)),
"offset_ptr");
stmt = gimple_build_assign (offset_ptr,
build1 (NOP_EXPR, TREE_TYPE (offset_ptr),
ptr));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
stmt = gimple_build_assign
(offset_ptr,
fold_build_pointer_plus_hwi_loc (input_location, offset_ptr,
indirect_offset));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Get the offset itself. */
offset_tree = create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr)),
"offset");
stmt = gimple_build_assign (offset_tree,
build_simple_mem_ref (offset_ptr));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Adjust the `this' pointer. */
ptr = fold_build_pointer_plus_loc (input_location, ptr, offset_tree);
ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
GSI_CONTINUE_LINKING);
}
if (!this_adjusting
&& fixed_offset != 0)
/* Adjust the pointer by the constant. */
{
tree ptrtmp;
if (VAR_P (ptr))
ptrtmp = ptr;
else
{
ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
stmt = gimple_build_assign (ptrtmp, ptr);
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
}
ptr = fold_build_pointer_plus_hwi_loc (input_location,
ptrtmp, fixed_offset);
}
/* Emit the statement and gimplify the adjustment expression. */
ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
stmt = gimple_build_assign (ret, ptr);
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
return ret;
}
/* Expand thunk NODE to gimple if possible.
When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
no assembler is produced.
When OUTPUT_ASM_THUNK is true, also produce assembler for
thunks that are not lowered. */
bool
expand_thunk (cgraph_node *node, bool output_asm_thunks,
bool force_gimple_thunk)
{
thunk_info *info = thunk_info::get (node);
bool this_adjusting = info->this_adjusting;
HOST_WIDE_INT fixed_offset = info->fixed_offset;
HOST_WIDE_INT virtual_value = info->virtual_value;
HOST_WIDE_INT indirect_offset = info->indirect_offset;
tree virtual_offset = NULL;
tree alias = node->callees->callee->decl;
tree thunk_fndecl = node->decl;
tree a;
if (!force_gimple_thunk
&& this_adjusting
&& indirect_offset == 0
&& !DECL_EXTERNAL (alias)
&& !DECL_STATIC_CHAIN (alias)
&& targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
virtual_value, alias))
{
tree fn_block;
tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
if (!output_asm_thunks)
{
node->analyzed = true;
return false;
}
if (in_lto_p)
node->get_untransformed_body ();
a = DECL_ARGUMENTS (thunk_fndecl);
current_function_decl = thunk_fndecl;
/* Ensure thunks are emitted in their correct sections. */
resolve_unique_section (thunk_fndecl, 0,
flag_function_sections);
DECL_RESULT (thunk_fndecl)
= build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
RESULT_DECL, 0, restype);
DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
/* The back end expects DECL_INITIAL to contain a BLOCK, so we
create one. */
fn_block = make_node (BLOCK);
BLOCK_VARS (fn_block) = a;
DECL_INITIAL (thunk_fndecl) = fn_block;
BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
allocate_struct_function (thunk_fndecl, false);
init_function_start (thunk_fndecl);
cfun->is_thunk = 1;
insn_locations_init ();
set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
prologue_location = curr_insn_location ();
targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
fixed_offset, virtual_value, alias);
insn_locations_finalize ();
init_insn_lengths ();
free_after_compilation (cfun);
TREE_ASM_WRITTEN (thunk_fndecl) = 1;
node->thunk = false;
node->analyzed = false;
}
else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
{
error ("generic thunk code fails for method %qD which uses %<...%>",
thunk_fndecl);
TREE_ASM_WRITTEN (thunk_fndecl) = 1;
node->analyzed = true;
return false;
}
else
{
tree restype;
basic_block bb, then_bb, else_bb, return_bb;
gimple_stmt_iterator bsi;
int nargs = 0;
tree arg;
int i;
tree resdecl;
tree restmp = NULL;
gcall *call;
greturn *ret;
bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
/* We may be called from expand_thunk that releases body except for
DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
if (in_lto_p && !force_gimple_thunk)
node->get_untransformed_body ();
/* We need to force DECL_IGNORED_P when the thunk is created
after early debug was run. */
if (force_gimple_thunk)
DECL_IGNORED_P (thunk_fndecl) = 1;
a = DECL_ARGUMENTS (thunk_fndecl);
current_function_decl = thunk_fndecl;
/* Ensure thunks are emitted in their correct sections. */
resolve_unique_section (thunk_fndecl, 0,
flag_function_sections);
bitmap_obstack_initialize (NULL);
if (info->virtual_offset_p)
virtual_offset = size_int (virtual_value);
/* Build the return declaration for the function. */
restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
{
resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
DECL_ARTIFICIAL (resdecl) = 1;
DECL_IGNORED_P (resdecl) = 1;
DECL_CONTEXT (resdecl) = thunk_fndecl;
DECL_RESULT (thunk_fndecl) = resdecl;
}
else
resdecl = DECL_RESULT (thunk_fndecl);
profile_count cfg_count = node->count;
if (!cfg_count.initialized_p ())
cfg_count = profile_count::from_gcov_type
(BB_FREQ_MAX).guessed_local ();
bb = then_bb = else_bb = return_bb
= init_lowered_empty_function (thunk_fndecl, true, cfg_count);
bsi = gsi_start_bb (bb);
/* Build call to the function being thunked. */
if (!VOID_TYPE_P (restype)
&& (!alias_is_noreturn
|| TREE_ADDRESSABLE (restype)
|| TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
{
if (DECL_BY_REFERENCE (resdecl))
{
restmp = gimple_fold_indirect_ref (resdecl);
if (!restmp)
restmp = build2 (MEM_REF,
TREE_TYPE (TREE_TYPE (resdecl)),
resdecl,
build_int_cst (TREE_TYPE (resdecl), 0));
}
else if (!is_gimple_reg_type (restype))
{
if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
{
restmp = resdecl;
if (VAR_P (restmp))
{
add_local_decl (cfun, restmp);
BLOCK_VARS (DECL_INITIAL (current_function_decl))
= restmp;
}
}
else
restmp = create_tmp_var (restype, "retval");
}
else
restmp = create_tmp_reg (restype, "retval");
}
for (arg = a; arg; arg = DECL_CHAIN (arg))
nargs++;
auto_vec<tree> vargs (nargs);
i = 0;
arg = a;
if (this_adjusting)
{
vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
virtual_offset, indirect_offset));
arg = DECL_CHAIN (a);
i = 1;
}
if (nargs)
for (; i < nargs; i++, arg = DECL_CHAIN (arg))
{
tree tmp = arg;
DECL_NOT_GIMPLE_REG_P (arg) = 0;
if (!is_gimple_val (arg))
{
tmp = create_tmp_reg (TYPE_MAIN_VARIANT
(TREE_TYPE (arg)), "arg");
gimple *stmt = gimple_build_assign (tmp, arg);
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
}
vargs.quick_push (tmp);
}
call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
node->callees->call_stmt = call;
gimple_call_set_from_thunk (call, true);
if (DECL_STATIC_CHAIN (alias))
{
tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
tree type = TREE_TYPE (p);
tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
PARM_DECL, create_tmp_var_name ("CHAIN"),
type);
DECL_ARTIFICIAL (decl) = 1;
DECL_IGNORED_P (decl) = 1;
TREE_USED (decl) = 1;
DECL_CONTEXT (decl) = thunk_fndecl;
DECL_ARG_TYPE (decl) = type;
TREE_READONLY (decl) = 1;
struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
sf->static_chain_decl = decl;
gimple_call_set_chain (call, decl);
}
/* Return slot optimization is always possible and in fact required to
return values with DECL_BY_REFERENCE. */
if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
&& (!is_gimple_reg_type (TREE_TYPE (resdecl))
|| DECL_BY_REFERENCE (resdecl)))
gimple_call_set_return_slot_opt (call, true);
if (restmp)
{
gimple_call_set_lhs (call, restmp);
gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
TREE_TYPE (TREE_TYPE (alias))));
}
gsi_insert_after (&bsi, call, GSI_NEW_STMT);
if (!alias_is_noreturn)
{
if (restmp && !this_adjusting
&& (fixed_offset || virtual_offset))
{
tree true_label = NULL_TREE;
if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
{
gimple *stmt;
edge e;
/* If the return type is a pointer, we need to
protect against NULL. We know there will be an
adjustment, because that's why we're emitting a
thunk. */
then_bb = create_basic_block (NULL, bb);
then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
return_bb = create_basic_block (NULL, then_bb);
return_bb->count = cfg_count;
else_bb = create_basic_block (NULL, else_bb);
else_bb->count = cfg_count.apply_scale (1, 16);
add_bb_to_loop (then_bb, bb->loop_father);
add_bb_to_loop (return_bb, bb->loop_father);
add_bb_to_loop (else_bb, bb->loop_father);
remove_edge (single_succ_edge (bb));
true_label = gimple_block_label (then_bb);
stmt = gimple_build_cond (NE_EXPR, restmp,
build_zero_cst (TREE_TYPE (restmp)),
NULL_TREE, NULL_TREE);
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
e->probability = profile_probability::guessed_always ()
.apply_scale (1, 16);
e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
e->probability = profile_probability::guessed_always ()
.apply_scale (1, 16);
make_single_succ_edge (return_bb,
EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
e->probability = profile_probability::always ();
bsi = gsi_last_bb (then_bb);
}
restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
fixed_offset, virtual_offset,
indirect_offset);
if (true_label)
{
gimple *stmt;
bsi = gsi_last_bb (else_bb);
stmt = gimple_build_assign (restmp,
build_zero_cst
(TREE_TYPE (restmp)));
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
bsi = gsi_last_bb (return_bb);
}
}
else
{
gimple_call_set_tail (call, true);
cfun->tail_call_marked = true;
}
/* Build return value. */
if (!DECL_BY_REFERENCE (resdecl))
ret = gimple_build_return (restmp);
else
ret = gimple_build_return (resdecl);
gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
}
else
{
gimple_call_set_tail (call, true);
cfun->tail_call_marked = true;
remove_edge (single_succ_edge (bb));
}
cfun->gimple_df->in_ssa_p = true;
update_max_bb_count ();
profile_status_for_fn (cfun)
= cfg_count.initialized_p () && cfg_count.ipa_p ()
? PROFILE_READ : PROFILE_GUESSED;
/* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
TREE_ASM_WRITTEN (thunk_fndecl) = false;
delete_unreachable_blocks ();
update_ssa (TODO_update_ssa);
checking_verify_flow_info ();
free_dominance_info (CDI_DOMINATORS);
/* Since we want to emit the thunk, we explicitly mark its name as
referenced. */
node->thunk = false;
node->lowered = true;
bitmap_obstack_release (NULL);
}
current_function_decl = NULL;
set_cfun (NULL);
return true;
}
void
symtab_thunks_cc_finalize (void)
{
vtable_entry_type = NULL;
}
#include "gt-symtab-thunks.h"

166
gcc/symtab-thunks.h Normal file
View File

@ -0,0 +1,166 @@
/* Representation of thunks inside symbol table.
Copyright (C) 2003-2020 Free Software Foundation, Inc.
Contributed by Jan Hubicka
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
#ifndef GCC_SYMTAB_THUNKS_H
#define GCC_SYMTAB_THUNKS_H
/* This symbol annotation holds information about thunk.
Thunks are basically wrappers around methods which are introduced in case
of multiple inheritance in order to adjust the value of the "this" pointer
or of the returned value.
In the case of this-adjusting thunks, each back-end can override the
can_output_mi_thunk/output_mi_thunk target hooks to generate a minimal thunk
(with a tail call for instance) directly as assembly. For the default hook
or for the case where the can_output_mi_thunk hooks return false, the thunk
is gimplified and lowered using the regular machinery. */
struct GTY(()) thunk_info {
/* Constructor. */
thunk_info ()
: fixed_offset (0),
virtual_value (0),
indirect_offset (0),
alias (NULL),
this_adjusting (false),
virtual_offset_p (false)
{
}
/* Copy constructor. */
thunk_info (const thunk_info &t)
: fixed_offset (t.fixed_offset),
virtual_value (t.virtual_value),
indirect_offset (t.indirect_offset),
alias (t.alias),
this_adjusting (t.this_adjusting),
virtual_offset_p (t.virtual_offset_p)
{
}
/* Compare for equiality. */
bool
operator==(const thunk_info &other) const
{
return fixed_offset == other.fixed_offset
&& virtual_value == other.virtual_value
&& indirect_offset == other.indirect_offset
&& this_adjusting == other.this_adjusting
&& virtual_offset_p == other.virtual_offset_p;
}
bool
operator!=(const thunk_info &other) const
{
return !(*this == other);
}
/* Copy operator. */
thunk_info &
operator=(const thunk_info &other)
{
fixed_offset = other.fixed_offset;
virtual_value = other.virtual_value;
indirect_offset = other.indirect_offset;
this_adjusting = other.this_adjusting;
virtual_offset_p = other.virtual_offset_p;
return *this;
}
/* Offset used to adjust "this". */
HOST_WIDE_INT fixed_offset;
/* Offset in the virtual table to get the offset to adjust "this". Valid iff
VIRTUAL_OFFSET_P is true. */
HOST_WIDE_INT virtual_value;
/* Offset from "this" to get the offset to adjust "this". Zero means: this
offset is to be ignored. */
HOST_WIDE_INT indirect_offset;
/* Thunk target, i.e. the method that this thunk wraps. Depending on the
TARGET_USE_LOCAL_THUNK_ALIAS_P macro, this may have to be a new alias. */
tree alias;
/* Nonzero for a "this" adjusting thunk and zero for a result adjusting
thunk. */
bool this_adjusting;
/* If true, this thunk is what we call a virtual thunk. In this case:
* for this-adjusting thunks, after the FIXED_OFFSET based adjustment is
done, add to the result the offset found in the vtable at:
vptr + VIRTUAL_VALUE
* for result-adjusting thunks, the FIXED_OFFSET adjustment is done after
the virtual one. */
bool virtual_offset_p;
/* Dump thunk_info. */
void dump (FILE *);
/* Stream out thunk_info. */
void stream_out (class lto_simple_output_block *);
/* Stream in trunk_info. */
void stream_in (class lto_input_block *);
hashval_t hash ();
/* Return thunk_info, if available. */
static thunk_info *get (cgraph_node *node);
/* Return thunk_info possibly creating new one. */
static thunk_info *get_create (cgraph_node *node);
/* Remove thunk_info. */
static void remove (cgraph_node *node);
/* Release all thunk_infos. */
static void release (void);
};
bool expand_thunk (cgraph_node *, bool, bool);
/* Return thunk_info, if available. */
inline thunk_info *
thunk_info::get (cgraph_node *node)
{
if (!symtab->m_thunks)
return NULL;
return symtab->m_thunks->get (node);
}
/* Remove thunk_info association for NODE. */
inline void
thunk_info::remove (cgraph_node *node)
{
symtab->m_thunks->remove (node);
}
/* Free thunk info summaries. */
inline void
thunk_info::release ()
{
if (symtab->m_thunks)
delete (symtab->m_thunks);
symtab->m_thunks = NULL;
}
#endif /* GCC_SYMTAB_THUNKS_H */

View File

@ -2507,6 +2507,7 @@ toplev::finalize (void)
cgraph_c_finalize ();
cgraphunit_c_finalize ();
symtab_thunks_cc_finalize ();
dwarf2out_c_finalize ();
gcse_c_finalize ();
ipa_cp_c_finalize ();

View File

@ -51,6 +51,9 @@
#include "tree-ssa-address.h"
#include "stringpool.h"
#include "attribs.h"
#include "alloc-pool.h"
#include "symbol-summary.h"
#include "symtab-thunks.h"
#define A_RUNINSTRUMENTEDCODE 0x0001
#define A_RUNUNINSTRUMENTEDCODE 0x0002
@ -4729,7 +4732,8 @@ ipa_tm_mayenterirr_function (struct cgraph_node *node)
result in one of the bits above being set so that we will not
have to recurse next time. */
if (node->alias)
return ipa_tm_mayenterirr_function (cgraph_node::get (node->thunk.alias));
return ipa_tm_mayenterirr_function
(cgraph_node::get (thunk_info::get (node)->alias));
/* What remains is unmarked local functions without items that force
the function to go irrevocable. */
@ -5475,7 +5479,7 @@ ipa_tm_execute (void)
we need not scan the callees now, as the base will do. */
if (node->alias)
{
node = cgraph_node::get (node->thunk.alias);
node = cgraph_node::get (thunk_info::get (node)->alias);
d = get_cg_data (&node, true);
maybe_push_queue (node, &tm_callees, &d->in_callee_queue);
continue;

View File

@ -61,6 +61,9 @@ along with GCC; see the file COPYING3. If not see
#include "sreal.h"
#include "tree-cfgcleanup.h"
#include "tree-ssa-live.h"
#include "alloc-pool.h"
#include "symbol-summary.h"
#include "symtab-thunks.h"
/* I'm not real happy about this, but we need to handle gimple and
non-gimple trees. */
@ -4792,13 +4795,14 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
/* If callee is thunk, all we need is to adjust the THIS pointer
and redirect to function being thunked. */
if (id->src_node->thunk.thunk_p)
if (id->src_node->thunk)
{
cgraph_edge *edge;
tree virtual_offset = NULL;
profile_count count = cg_edge->count;
tree op;
gimple_stmt_iterator iter = gsi_for_stmt (stmt);
thunk_info *info = thunk_info::get (id->src_node);
cgraph_edge::remove (cg_edge);
edge = id->src_node->callees->clone (id->dst_node, call_stmt,
@ -4807,16 +4811,16 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
profile_count::one (),
true);
edge->count = count;
if (id->src_node->thunk.virtual_offset_p)
virtual_offset = size_int (id->src_node->thunk.virtual_value);
if (info->virtual_offset_p)
virtual_offset = size_int (info->virtual_value);
op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
NULL);
gsi_insert_before (&iter, gimple_build_assign (op,
gimple_call_arg (stmt, 0)),
GSI_NEW_STMT);
gcc_assert (id->src_node->thunk.this_adjusting);
op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
virtual_offset, id->src_node->thunk.indirect_offset);
gcc_assert (info->this_adjusting);
op = thunk_adjust (&iter, op, 1, info->fixed_offset,
virtual_offset, info->indirect_offset);
gimple_call_set_arg (stmt, 0, op);
gimple_call_set_fndecl (stmt, edge->callee->decl);

View File

@ -45,6 +45,7 @@
#include "alloc-pool.h"
#include "tree-nested.h"
#include "symbol-summary.h"
#include "symtab-thunks.h"
/* Summary of nested functions. */
static function_summary <nested_function_info *>
@ -937,7 +938,7 @@ create_nesting_tree (struct cgraph_node *cgn)
info->mem_refs = new hash_set<tree *>;
info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
info->context = cgn->decl;
info->thunk_p = cgn->thunk.thunk_p;
info->thunk_p = cgn->thunk;
for (cgn = first_nested_function (cgn); cgn;
cgn = next_nested_function (cgn))
@ -3047,7 +3048,7 @@ convert_all_function_calls (struct nesting_info *root)
if (n->thunk_p)
{
tree decl = n->context;
tree alias = cgraph_node::get (decl)->thunk.alias;
tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
}
@ -3083,7 +3084,7 @@ convert_all_function_calls (struct nesting_info *root)
if (n->thunk_p)
{
tree decl = n->context;
tree alias = cgraph_node::get (decl)->thunk.alias;
tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
}
}
@ -3638,7 +3639,7 @@ gimplify_all_functions (struct cgraph_node *root)
gimplify_function_tree (root->decl);
for (iter = first_nested_function (root); iter;
iter = next_nested_function (iter))
if (!iter->thunk.thunk_p)
if (!iter->thunk)
gimplify_all_functions (iter);
}

View File

@ -55,6 +55,9 @@ along with GCC; see the file COPYING3. If not see
#include "langhooks.h"
#include "stor-layout.h"
#include "xregex.h"
#include "alloc-pool.h"
#include "symbol-summary.h"
#include "symtab-thunks.h"
static GTY(()) tree gcov_type_node;
static GTY(()) tree tree_interval_profiler_fn;
@ -726,7 +729,7 @@ tree_profiling (void)
FOR_EACH_DEFINED_FUNCTION (node)
{
bool thunk = false;
if (!gimple_has_body_p (node->decl) && !node->thunk.thunk_p)
if (!gimple_has_body_p (node->decl) && !node->thunk)
continue;
/* Don't profile functions produced for builtin stuff. */
@ -747,7 +750,7 @@ tree_profiling (void)
if (!include_source_file_for_profile (file))
continue;
if (node->thunk.thunk_p)
if (node->thunk)
{
/* We cannot expand variadic thunks to Gimple. */
if (stdarg_p (TREE_TYPE (node->decl)))
@ -756,7 +759,7 @@ tree_profiling (void)
/* When generate profile, expand thunk to gimple so it can be
instrumented same way as other functions. */
if (profile_arc_flag)
node->expand_thunk (false, true);
expand_thunk (node, false, true);
/* Read cgraph profile but keep function as thunk at profile-use
time. */
else

View File

@ -7972,7 +7972,7 @@ static bool
associate_varinfo_to_alias (struct cgraph_node *node, void *data)
{
if ((node->alias
|| (node->thunk.thunk_p
|| (node->thunk
&& ! node->inlined_to))
&& node->analyzed
&& !node->ifunc_resolver)

View File

@ -5777,7 +5777,7 @@ free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
DECL_INITIAL (decl) = error_mark_node;
}
}
if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
if (gimple_has_body_p (decl) || (node && node->thunk))
{
tree t;

View File

@ -1222,7 +1222,7 @@ init_node_map (bool local)
cgraph_node_map = new hash_map<profile_id_hash, cgraph_node *>;
FOR_EACH_DEFINED_FUNCTION (n)
if (n->has_gimple_body_p () || n->thunk.thunk_p)
if (n->has_gimple_body_p () || n->thunk)
{
cgraph_node **val;
dump_user_location_t loc