1 /* Basic IPA utilities for type inheritance graph construction and
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
84 build_type_inheritance_graph triggers a construction of the type inheritance
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
110 #include "coretypes.h"
113 #include "print-tree.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
133 static bool odr_violation_reported
= false;
135 /* Dummy polymorphic call context. */
137 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
138 = {0, NULL
, false, true};
140 /* Pointer set of all call targets appearing in the cache. */
141 static pointer_set_t
*cached_polymorphic_call_targets
;
143 /* The node of type inheritance graph. For each type unique in
144 One Defintion Rule (ODR) sense, we produce one node linking all
145 main variants of types equivalent to it, bases and derived types. */
147 struct GTY(()) odr_type_d
152 vec
<odr_type
> GTY((skip
)) bases
;
153 /* All derrived types with virtual methods seen in unit. */
154 vec
<odr_type
> GTY((skip
)) derived_types
;
156 /* All equivalent types, if more than one. */
157 vec
<tree
, va_gc
> *types
;
158 /* Set of all equivalent types, if NON-NULL. */
159 pointer_set_t
* GTY((skip
)) types_set
;
161 /* Unique ID indexing the type in odr_types array. */
163 /* Is it in anonymous namespace? */
164 bool anonymous_namespace
;
165 /* Do we know about all derivations of given type? */
166 bool all_derivations_known
;
170 /* Return true if BINFO corresponds to a type with virtual methods.
172 Every type has several BINFOs. One is the BINFO associated by the type
173 while other represents bases of derived types. The BINFOs representing
174 bases do not have BINFO_VTABLE pointer set when this is the single
175 inheritance (because vtables are shared). Look up the BINFO of type
176 and check presence of its vtable. */
179 polymorphic_type_binfo_p (tree binfo
)
181 /* See if BINFO's type has an virtual table associtated with it. */
182 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo
)));
185 /* Return TRUE if all derived types of T are known and thus
186 we may consider the walk of derived type complete.
188 This is typically true only for final anonymous namespace types and types
189 defined within functions (that may be COMDAT and thus shared across units,
190 but with the same set of derived types). */
193 type_all_derivations_known_p (tree t
)
195 if (TYPE_FINAL_P (t
))
199 if (type_in_anonymous_namespace_p (t
))
201 return (decl_function_context (TYPE_NAME (t
)) != NULL
);
204 /* Return TURE if type's constructors are all visible. */
207 type_all_ctors_visible_p (tree t
)
210 && cgraph_state
>= CGRAPH_STATE_CONSTRUCTION
211 /* We can not always use type_all_derivations_known_p.
212 For function local types we must assume case where
213 the function is COMDAT and shared in between units.
215 TODO: These cases are quite easy to get, but we need
216 to keep track of C++ privatizing via -Wno-weak
217 as well as the IPA privatizing. */
218 && type_in_anonymous_namespace_p (t
);
221 /* Return TRUE if type may have instance. */
224 type_possibly_instantiated_p (tree t
)
229 /* TODO: Add abstract types here. */
230 if (!type_all_ctors_visible_p (t
))
233 vtable
= BINFO_VTABLE (TYPE_BINFO (t
));
234 if (TREE_CODE (vtable
) == POINTER_PLUS_EXPR
)
235 vtable
= TREE_OPERAND (TREE_OPERAND (vtable
, 0), 0);
236 vnode
= varpool_get_node (vtable
);
237 return vnode
&& vnode
->definition
;
240 /* One Definition Rule hashtable helpers. */
244 typedef odr_type_d value_type
;
245 typedef union tree_node compare_type
;
246 static inline hashval_t
hash (const value_type
*);
247 static inline bool equal (const value_type
*, const compare_type
*);
248 static inline void remove (value_type
*);
251 /* Produce hash based on type name. */
254 hash_type_name (tree t
)
256 gcc_checking_assert (TYPE_MAIN_VARIANT (t
) == t
);
258 /* If not in LTO, all main variants are unique, so we can do
261 return htab_hash_pointer (t
);
263 /* Anonymous types are unique. */
264 if (type_in_anonymous_namespace_p (t
))
265 return htab_hash_pointer (t
);
267 /* For polymorphic types, we can simply hash the virtual table. */
268 if (TYPE_BINFO (t
) && BINFO_VTABLE (TYPE_BINFO (t
)))
270 tree v
= BINFO_VTABLE (TYPE_BINFO (t
));
273 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
275 hash
= TREE_INT_CST_LOW (TREE_OPERAND (v
, 1));
276 v
= TREE_OPERAND (TREE_OPERAND (v
, 0), 0);
279 v
= DECL_ASSEMBLER_NAME (v
);
280 hash
= iterative_hash_hashval_t (hash
, htab_hash_pointer (v
));
284 /* Rest is not implemented yet. */
288 /* Return the computed hashcode for ODR_TYPE. */
291 odr_hasher::hash (const value_type
*odr_type
)
293 return hash_type_name (odr_type
->type
);
296 /* Compare types T1 and T2 and return true if they are
300 odr_hasher::equal (const value_type
*t1
, const compare_type
*ct2
)
302 tree t2
= const_cast <tree
> (ct2
);
304 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2
) == ct2
);
309 return types_same_for_odr (t1
->type
, t2
);
312 /* Free ODR type V. */
315 odr_hasher::remove (value_type
*v
)
318 v
->derived_types
.release ();
320 pointer_set_destroy (v
->types_set
);
324 /* ODR type hash used to lookup ODR type based on tree type node. */
326 typedef hash_table
<odr_hasher
> odr_hash_type
;
327 static odr_hash_type odr_hash
;
329 /* ODR types are also stored into ODR_TYPE vector to allow consistent
330 walking. Bases appear before derived types. Vector is garbage collected
331 so we won't end up visiting empty types. */
333 static GTY(()) vec
<odr_type
, va_gc
> *odr_types_ptr
;
334 #define odr_types (*odr_types_ptr)
336 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
337 from VAL->type. This may happen in LTO where tree merging did not merge
338 all variants of the same type. It may or may not mean the ODR violation.
339 Add it to the list of duplicates and warn on some violations. */
342 add_type_duplicate (odr_type val
, tree type
)
345 val
->types_set
= pointer_set_create ();
347 /* See if this duplicate is new. */
348 if (!pointer_set_insert (val
->types_set
, type
))
351 bool base_mismatch
= false;
352 gcc_assert (in_lto_p
);
353 vec_safe_push (val
->types
, type
);
356 /* First we compare memory layout. */
357 if (!types_compatible_p (val
->type
, type
))
360 odr_violation_reported
= true;
361 if (BINFO_VTABLE (TYPE_BINFO (val
->type
))
362 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type
)), 0,
363 "type %qD violates one definition rule ",
365 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val
->type
)),
366 "a type with the same name but different layout is "
367 "defined in another translation unit");
368 if (cgraph_dump_file
)
370 fprintf (cgraph_dump_file
, "ODR violation or merging or ODR type bug?\n");
372 print_node (cgraph_dump_file
, "", val
->type
, 0);
373 putc ('\n',cgraph_dump_file
);
374 print_node (cgraph_dump_file
, "", type
, 0);
375 putc ('\n',cgraph_dump_file
);
379 /* Next sanity check that bases are the same. If not, we will end
380 up producing wrong answers. */
381 for (j
= 0, i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
382 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type
), i
)))
384 odr_type base
= get_odr_type
386 (BINFO_BASE_BINFO (TYPE_BINFO (type
),
389 if (val
->bases
.length () <= j
|| val
->bases
[j
] != base
)
390 base_mismatch
= true;
396 odr_violation_reported
= true;
398 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type
)), 0,
399 "type %qD violates one definition rule ",
401 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val
->type
)),
402 "a type with the same name but different bases is "
403 "defined in another translation unit");
404 if (cgraph_dump_file
)
406 fprintf (cgraph_dump_file
, "ODR bse violation or merging bug?\n");
408 print_node (cgraph_dump_file
, "", val
->type
, 0);
409 putc ('\n',cgraph_dump_file
);
410 print_node (cgraph_dump_file
, "", type
, 0);
411 putc ('\n',cgraph_dump_file
);
415 /* Regularize things a little. During LTO same types may come with
416 different BINFOs. Either because their virtual table was
417 not merged by tree merging and only later at decl merging or
418 because one type comes with external vtable, while other
419 with internal. We want to merge equivalent binfos to conserve
420 memory and streaming overhead.
422 The external vtables are more harmful: they contain references
423 to external declarations of methods that may be defined in the
424 merged LTO unit. For this reason we absolutely need to remove
425 them and replace by internal variants. Not doing so will lead
426 to incomplete answers from possible_polymorphic_call_targets. */
427 if (!flag_ltrans
&& merge
)
429 tree master_binfo
= TYPE_BINFO (val
->type
);
430 tree v1
= BINFO_VTABLE (master_binfo
);
431 tree v2
= BINFO_VTABLE (TYPE_BINFO (type
));
433 if (TREE_CODE (v1
) == POINTER_PLUS_EXPR
)
435 gcc_assert (TREE_CODE (v2
) == POINTER_PLUS_EXPR
436 && operand_equal_p (TREE_OPERAND (v1
, 1),
437 TREE_OPERAND (v2
, 1), 0));
438 v1
= TREE_OPERAND (TREE_OPERAND (v1
, 0), 0);
439 v2
= TREE_OPERAND (TREE_OPERAND (v2
, 0), 0);
441 gcc_assert (DECL_ASSEMBLER_NAME (v1
)
442 == DECL_ASSEMBLER_NAME (v2
));
444 if (DECL_EXTERNAL (v1
) && !DECL_EXTERNAL (v2
))
448 TYPE_BINFO (val
->type
) = TYPE_BINFO (type
);
449 for (i
= 0; i
< val
->types
->length (); i
++)
451 if (TYPE_BINFO ((*val
->types
)[i
])
453 TYPE_BINFO ((*val
->types
)[i
]) = TYPE_BINFO (type
);
457 TYPE_BINFO (type
) = master_binfo
;
462 /* Get ODR type hash entry for TYPE. If INSERT is true, create
463 possibly new entry. */
466 get_odr_type (tree type
, bool insert
)
472 type
= TYPE_MAIN_VARIANT (type
);
473 gcc_checking_assert (TYPE_MAIN_VARIANT (type
) == type
);
474 hash
= hash_type_name (type
);
475 slot
= odr_hash
.find_slot_with_hash (type
, hash
, insert
? INSERT
: NO_INSERT
);
479 /* See if we already have entry for type. */
484 /* With LTO we need to support multiple tree representation of
485 the same ODR type. */
486 if (val
->type
!= type
)
487 add_type_duplicate (val
, type
);
491 tree binfo
= TYPE_BINFO (type
);
494 val
= ggc_alloc_cleared_odr_type_d ();
497 val
->derived_types
= vNULL
;
498 val
->anonymous_namespace
= type_in_anonymous_namespace_p (type
);
499 val
->all_derivations_known
= type_all_derivations_known_p (type
);
501 for (i
= 0; i
< BINFO_N_BASE_BINFOS (binfo
); i
++)
502 /* For now record only polymorphic types. other are
503 pointless for devirtualization and we can not precisely
504 determine ODR equivalency of these during LTO. */
505 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo
, i
)))
507 odr_type base
= get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo
,
510 base
->derived_types
.safe_push (val
);
511 val
->bases
.safe_push (base
);
513 /* First record bases, then add into array so ids are increasing. */
515 val
->id
= odr_types
.length ();
516 vec_safe_push (odr_types_ptr
, val
);
521 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
522 recusive printing. */
525 dump_odr_type (FILE *f
, odr_type t
, int indent
=0)
528 fprintf (f
, "%*s type %i: ", indent
* 2, "", t
->id
);
529 print_generic_expr (f
, t
->type
, TDF_SLIM
);
530 fprintf (f
, "%s", t
->anonymous_namespace
? " (anonymous namespace)":"");
531 fprintf (f
, "%s\n", t
->all_derivations_known
? " (derivations known)":"");
532 if (TYPE_NAME (t
->type
))
534 fprintf (f
, "%*s defined at: %s:%i\n", indent
* 2, "",
535 DECL_SOURCE_FILE (TYPE_NAME (t
->type
)),
536 DECL_SOURCE_LINE (TYPE_NAME (t
->type
)));
538 if (t
->bases
.length ())
540 fprintf (f
, "%*s base odr type ids: ", indent
* 2, "");
541 for (i
= 0; i
< t
->bases
.length (); i
++)
542 fprintf (f
, " %i", t
->bases
[i
]->id
);
545 if (t
->derived_types
.length ())
547 fprintf (f
, "%*s derived types:\n", indent
* 2, "");
548 for (i
= 0; i
< t
->derived_types
.length (); i
++)
549 dump_odr_type (f
, t
->derived_types
[i
], indent
+ 1);
554 /* Dump the type inheritance graph. */
557 dump_type_inheritance_graph (FILE *f
)
562 fprintf (f
, "\n\nType inheritance graph:\n");
563 for (i
= 0; i
< odr_types
.length (); i
++)
565 if (odr_types
[i
]->bases
.length () == 0)
566 dump_odr_type (f
, odr_types
[i
]);
568 for (i
= 0; i
< odr_types
.length (); i
++)
570 if (odr_types
[i
]->types
&& odr_types
[i
]->types
->length ())
573 fprintf (f
, "Duplicate tree types for odr type %i\n", i
);
574 print_node (f
, "", odr_types
[i
]->type
, 0);
575 for (j
= 0; j
< odr_types
[i
]->types
->length (); j
++)
578 fprintf (f
, "duplicate #%i\n", j
);
579 print_node (f
, "", (*odr_types
[i
]->types
)[j
], 0);
580 t
= (*odr_types
[i
]->types
)[j
];
581 while (TYPE_P (t
) && TYPE_CONTEXT (t
))
583 t
= TYPE_CONTEXT (t
);
584 print_node (f
, "", t
, 0);
592 /* Given method type T, return type of class it belongs to.
593 Lookup this pointer and get its type. */
596 method_class_type (tree t
)
598 tree first_parm_type
= TREE_VALUE (TYPE_ARG_TYPES (t
));
599 gcc_assert (TREE_CODE (t
) == METHOD_TYPE
);
601 return TREE_TYPE (first_parm_type
);
604 /* Initialize IPA devirt and build inheritance tree graph. */
607 build_type_inheritance_graph (void)
609 struct symtab_node
*n
;
610 FILE *inheritance_dump_file
;
613 if (odr_hash
.is_created ())
615 timevar_push (TV_IPA_INHERITANCE
);
616 inheritance_dump_file
= dump_begin (TDI_inheritance
, &flags
);
617 odr_hash
.create (23);
619 /* We reconstruct the graph starting of types of all methods seen in the
622 if (is_a
<cgraph_node
*> (n
)
623 && DECL_VIRTUAL_P (n
->decl
)
624 && symtab_real_symbol_p (n
))
625 get_odr_type (method_class_type (TREE_TYPE (n
->decl
)), true);
627 /* Look also for virtual tables of types that do not define any methods.
629 We need it in a case where class B has virtual base of class A
630 re-defining its virtual method and there is class C with no virtual
631 methods with B as virtual base.
633 Here we output B's virtual method in two variant - for non-virtual
634 and virtual inheritance. B's virtual table has non-virtual version,
635 while C's has virtual.
637 For this reason we need to know about C in order to include both
638 variants of B. More correctly, record_target_from_binfo should
639 add both variants of the method when walking B, but we have no
640 link in between them.
642 We rely on fact that either the method is exported and thus we
643 assume it is called externally or C is in anonymous namespace and
644 thus we will see the vtable. */
646 else if (is_a
<varpool_node
*> (n
)
647 && DECL_VIRTUAL_P (n
->decl
)
648 && TREE_CODE (DECL_CONTEXT (n
->decl
)) == RECORD_TYPE
649 && TYPE_BINFO (DECL_CONTEXT (n
->decl
))
650 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n
->decl
))))
651 get_odr_type (DECL_CONTEXT (n
->decl
), true);
652 if (inheritance_dump_file
)
654 dump_type_inheritance_graph (inheritance_dump_file
);
655 dump_end (TDI_inheritance
, inheritance_dump_file
);
657 timevar_pop (TV_IPA_INHERITANCE
);
660 /* Return true if N has reference from live virtual table
661 (and thus can be a destination of polymorphic call).
662 Be conservatively correct when callgraph is not built or
663 if the method may be referred externally. */
666 referenced_from_vtable_p (struct cgraph_node
*node
)
672 if (node
->externally_visible
673 || node
->used_from_other_partition
)
676 /* Keep this test constant time.
677 It is unlikely this can happen except for the case where speculative
678 devirtualization introduced many speculative edges to this node.
679 In this case the target is very likely alive anyway. */
680 if (node
->ref_list
.referring
.length () > 100)
683 /* We need references built. */
684 if (cgraph_state
<= CGRAPH_STATE_CONSTRUCTION
)
687 for (i
= 0; ipa_ref_list_referring_iterate (&node
->ref_list
,
690 if ((ref
->use
== IPA_REF_ALIAS
691 && referenced_from_vtable_p (cgraph (ref
->referring
)))
692 || (ref
->use
== IPA_REF_ADDR
693 && TREE_CODE (ref
->referring
->decl
) == VAR_DECL
694 && DECL_VIRTUAL_P (ref
->referring
->decl
)))
702 /* If TARGET has associated node, record it in the NODES array.
703 CAN_REFER specify if program can refer to the target directly.
704 if TARGET is unknown (NULL) or it can not be inserted (for example because
705 its body was already removed and there is no way to refer to it), clear
709 maybe_record_node (vec
<cgraph_node
*> &nodes
,
710 tree target
, pointer_set_t
*inserted
,
714 struct cgraph_node
*target_node
;
716 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
717 list of targets; the runtime effect of calling them is undefined.
718 Only "real" virtual methods should be accounted. */
719 if (target
&& TREE_CODE (TREE_TYPE (target
)) != METHOD_TYPE
)
724 /* The only case when method of anonymous namespace becomes unreferable
725 is when we completely optimized it out. */
728 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target
)))
736 target_node
= cgraph_get_node (target
);
738 /* Method can only be called by polymorphic call if any
739 of vtables refering to it are alive.
741 While this holds for non-anonymous functions, too, there are
742 cases where we want to keep them in the list; for example
743 inline functions with -fno-weak are static, but we still
744 may devirtualize them when instance comes from other unit.
745 The same holds for LTO.
747 Currently we ignore these functions in speculative devirtualization.
748 ??? Maybe it would make sense to be more aggressive for LTO even
751 && type_in_anonymous_namespace_p (DECL_CONTEXT (target
))
753 || !referenced_from_vtable_p (target_node
)))
755 /* See if TARGET is useful function we can deal with. */
756 else if (target_node
!= NULL
757 && (TREE_PUBLIC (target
)
758 || DECL_EXTERNAL (target
)
759 || target_node
->definition
)
760 && symtab_real_symbol_p (target_node
))
762 gcc_assert (!target_node
->global
.inlined_to
);
763 gcc_assert (symtab_real_symbol_p (target_node
));
764 if (!pointer_set_insert (inserted
, target
))
766 pointer_set_insert (cached_polymorphic_call_targets
,
768 nodes
.safe_push (target_node
);
772 && (!type_in_anonymous_namespace_p
773 (DECL_CONTEXT (target
))
778 /* See if BINFO's type match OUTER_TYPE. If so, lookup
779 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
780 method in vtable and insert method to NODES array
781 or BASES_TO_CONSIDER if this array is non-NULL.
782 Otherwise recurse to base BINFOs.
783 This match what get_binfo_at_offset does, but with offset
786 TYPE_BINFOS is a stack of BINFOS of types with defined
787 virtual table seen on way from class type to BINFO.
789 MATCHED_VTABLES tracks virtual tables we already did lookup
790 for virtual function in. INSERTED tracks nodes we already
793 ANONYMOUS is true if BINFO is part of anonymous namespace.
795 Clear COMPLETEP when we hit unreferable target.
799 record_target_from_binfo (vec
<cgraph_node
*> &nodes
,
800 vec
<tree
> *bases_to_consider
,
803 vec
<tree
> &type_binfos
,
804 HOST_WIDE_INT otr_token
,
806 HOST_WIDE_INT offset
,
807 pointer_set_t
*inserted
,
808 pointer_set_t
*matched_vtables
,
812 tree type
= BINFO_TYPE (binfo
);
817 if (BINFO_VTABLE (binfo
))
818 type_binfos
.safe_push (binfo
);
819 if (types_same_for_odr (type
, outer_type
))
822 tree type_binfo
= NULL
;
824 /* Lookup BINFO with virtual table. For normal types it is always last
826 for (i
= type_binfos
.length () - 1; i
>= 0; i
--)
827 if (BINFO_OFFSET (type_binfos
[i
]) == BINFO_OFFSET (binfo
))
829 type_binfo
= type_binfos
[i
];
832 if (BINFO_VTABLE (binfo
))
834 /* If this is duplicated BINFO for base shared by virtual inheritance,
835 we may not have its associated vtable. This is not a problem, since
836 we will walk it on the other path. */
839 tree inner_binfo
= get_binfo_at_offset (type_binfo
,
843 gcc_assert (odr_violation_reported
);
846 /* For types in anonymous namespace first check if the respective vtable
847 is alive. If not, we know the type can't be called. */
848 if (!flag_ltrans
&& anonymous
)
850 tree vtable
= BINFO_VTABLE (inner_binfo
);
853 if (TREE_CODE (vtable
) == POINTER_PLUS_EXPR
)
854 vtable
= TREE_OPERAND (TREE_OPERAND (vtable
, 0), 0);
855 vnode
= varpool_get_node (vtable
);
856 if (!vnode
|| !vnode
->definition
)
859 gcc_assert (inner_binfo
);
860 if (bases_to_consider
861 ? !pointer_set_contains (matched_vtables
, BINFO_VTABLE (inner_binfo
))
862 : !pointer_set_insert (matched_vtables
, BINFO_VTABLE (inner_binfo
)))
865 tree target
= gimple_get_virt_method_for_binfo (otr_token
,
868 if (!bases_to_consider
)
869 maybe_record_node (nodes
, target
, inserted
, can_refer
, completep
);
870 /* Destructors are never called via construction vtables. */
871 else if (!target
|| !DECL_CXX_DESTRUCTOR_P (target
))
872 bases_to_consider
->safe_push (target
);
878 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
879 /* Walking bases that have no virtual method is pointless excercise. */
880 if (polymorphic_type_binfo_p (base_binfo
))
881 record_target_from_binfo (nodes
, bases_to_consider
, base_binfo
, otr_type
,
883 otr_token
, outer_type
, offset
, inserted
,
884 matched_vtables
, anonymous
, completep
);
885 if (BINFO_VTABLE (binfo
))
889 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
890 of TYPE, insert them to NODES, recurse into derived nodes.
891 INSERTED is used to avoid duplicate insertions of methods into NODES.
892 MATCHED_VTABLES are used to avoid duplicate walking vtables.
893 Clear COMPLETEP if unreferable target is found.
895 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
896 all cases where BASE_SKIPPED is true (because the base is abstract
900 possible_polymorphic_call_targets_1 (vec
<cgraph_node
*> &nodes
,
901 pointer_set_t
*inserted
,
902 pointer_set_t
*matched_vtables
,
905 HOST_WIDE_INT otr_token
,
907 HOST_WIDE_INT offset
,
909 vec
<tree
> &bases_to_consider
,
910 bool consider_construction
)
912 tree binfo
= TYPE_BINFO (type
->type
);
914 vec
<tree
> type_binfos
= vNULL
;
915 bool possibly_instantiated
= type_possibly_instantiated_p (type
->type
);
917 /* We may need to consider types w/o instances because of possible derived
918 types using their methods either directly or via construction vtables.
919 We are safe to skip them when all derivations are known, since we will
921 This is done by recording them to BASES_TO_CONSIDER array. */
922 if (possibly_instantiated
|| consider_construction
)
924 record_target_from_binfo (nodes
,
925 (!possibly_instantiated
926 && type_all_derivations_known_p (type
->type
))
927 ? &bases_to_consider
: NULL
,
928 binfo
, otr_type
, type_binfos
, otr_token
,
930 inserted
, matched_vtables
,
931 type
->anonymous_namespace
, completep
);
933 type_binfos
.release ();
934 for (i
= 0; i
< type
->derived_types
.length (); i
++)
935 possible_polymorphic_call_targets_1 (nodes
, inserted
,
938 type
->derived_types
[i
],
939 otr_token
, outer_type
, offset
, completep
,
940 bases_to_consider
, consider_construction
);
943 /* Cache of queries for polymorphic call targets.
945 Enumerating all call targets may get expensive when there are many
946 polymorphic calls in the program, so we memoize all the previous
947 queries and avoid duplicated work. */
949 struct polymorphic_call_target_d
951 HOST_WIDE_INT otr_token
;
952 ipa_polymorphic_call_context context
;
954 vec
<cgraph_node
*> targets
;
955 int nonconstruction_targets
;
959 /* Polymorphic call target cache helpers. */
961 struct polymorphic_call_target_hasher
963 typedef polymorphic_call_target_d value_type
;
964 typedef polymorphic_call_target_d compare_type
;
965 static inline hashval_t
hash (const value_type
*);
966 static inline bool equal (const value_type
*, const compare_type
*);
967 static inline void remove (value_type
*);
970 /* Return the computed hashcode for ODR_QUERY. */
973 polymorphic_call_target_hasher::hash (const value_type
*odr_query
)
977 hash
= iterative_hash_host_wide_int
978 (odr_query
->otr_token
,
979 odr_query
->type
->id
);
980 hash
= iterative_hash_hashval_t (TYPE_UID (odr_query
->context
.outer_type
),
982 hash
= iterative_hash_host_wide_int (odr_query
->context
.offset
, hash
);
983 return iterative_hash_hashval_t
984 (((int)odr_query
->context
.maybe_in_construction
<< 1)
985 | (int)odr_query
->context
.maybe_derived_type
, hash
);
988 /* Compare cache entries T1 and T2. */
991 polymorphic_call_target_hasher::equal (const value_type
*t1
,
992 const compare_type
*t2
)
994 return (t1
->type
== t2
->type
&& t1
->otr_token
== t2
->otr_token
995 && t1
->context
.offset
== t2
->context
.offset
996 && t1
->context
.outer_type
== t2
->context
.outer_type
997 && t1
->context
.maybe_in_construction
998 == t2
->context
.maybe_in_construction
999 && t1
->context
.maybe_derived_type
== t2
->context
.maybe_derived_type
);
1002 /* Remove entry in polymorphic call target cache hash. */
1005 polymorphic_call_target_hasher::remove (value_type
*v
)
1007 v
->targets
.release ();
1011 /* Polymorphic call target query cache. */
1013 typedef hash_table
<polymorphic_call_target_hasher
>
1014 polymorphic_call_target_hash_type
;
1015 static polymorphic_call_target_hash_type polymorphic_call_target_hash
;
1017 /* Destroy polymorphic call target query cache. */
1020 free_polymorphic_call_targets_hash ()
1022 if (cached_polymorphic_call_targets
)
1024 polymorphic_call_target_hash
.dispose ();
1025 pointer_set_destroy (cached_polymorphic_call_targets
);
1026 cached_polymorphic_call_targets
= NULL
;
1030 /* When virtual function is removed, we may need to flush the cache. */
1033 devirt_node_removal_hook (struct cgraph_node
*n
, void *d ATTRIBUTE_UNUSED
)
1035 if (cached_polymorphic_call_targets
1036 && pointer_set_contains (cached_polymorphic_call_targets
, n
))
1037 free_polymorphic_call_targets_hash ();
1040 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1041 is contained at CONTEXT->OFFSET. Walk the memory representation of
1042 CONTEXT->OUTER_TYPE and find the outermost class type that match
1043 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1046 For example when CONTEXT represents type
1052 and we look for type at offset sizeof(int), we end up with B and offset 0.
1053 If the same is produced by multiple inheritance, we end up with A and offset
1056 If we can not find corresponding class, give up by setting
1057 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1058 Return true when lookup was sucesful. */
1061 get_class_context (ipa_polymorphic_call_context
*context
,
1064 tree type
= context
->outer_type
;
1065 HOST_WIDE_INT offset
= context
->offset
;
1067 /* Find the sub-object the constant actually refers to and mark whether it is
1068 an artificial one (as opposed to a user-defined one). */
1071 HOST_WIDE_INT pos
, size
;
1074 /* On a match, just return what we found. */
1075 if (TREE_CODE (type
) == TREE_CODE (expected_type
)
1076 && types_same_for_odr (type
, expected_type
))
1078 /* Type can not contain itself on an non-zero offset. In that case
1082 gcc_assert (offset
== 0);
1086 /* Walk fields and find corresponding on at OFFSET. */
1087 if (TREE_CODE (type
) == RECORD_TYPE
)
1089 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1091 if (TREE_CODE (fld
) != FIELD_DECL
)
1094 pos
= int_bit_position (fld
);
1095 size
= tree_to_uhwi (DECL_SIZE (fld
));
1096 if (pos
<= offset
&& (pos
+ size
) > offset
)
1103 type
= TREE_TYPE (fld
);
1105 /* DECL_ARTIFICIAL represents a basetype. */
1106 if (!DECL_ARTIFICIAL (fld
))
1108 context
->outer_type
= type
;
1109 context
->offset
= offset
;
1110 /* As soon as we se an field containing the type,
1111 we know we are not looking for derivations. */
1112 context
->maybe_derived_type
= false;
1115 else if (TREE_CODE (type
) == ARRAY_TYPE
)
1117 tree subtype
= TREE_TYPE (type
);
1119 /* Give up if we don't know array size. */
1120 if (!tree_fits_shwi_p (TYPE_SIZE (subtype
))
1121 || !tree_to_shwi (TYPE_SIZE (subtype
)) <= 0)
1123 offset
= offset
% tree_to_shwi (TYPE_SIZE (subtype
));
1125 context
->outer_type
= type
;
1126 context
->offset
= offset
;
1127 context
->maybe_derived_type
= false;
1129 /* Give up on anything else. */
1134 /* If we failed to find subtype we look for, give up and fall back to the
1135 most generic query. */
1137 context
->outer_type
= expected_type
;
1138 context
->offset
= 0;
1139 context
->maybe_derived_type
= true;
1143 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1146 contains_type_p (tree outer_type
, HOST_WIDE_INT offset
,
1149 ipa_polymorphic_call_context context
= {offset
, outer_type
,
1151 return get_class_context (&context
, otr_type
);
1154 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1157 subbinfo_with_vtable_at_offset (tree binfo
, unsigned HOST_WIDE_INT offset
,
1160 tree v
= BINFO_VTABLE (binfo
);
1163 unsigned HOST_WIDE_INT this_offset
;
1167 if (!vtable_pointer_value_to_vtable (v
, &v
, &this_offset
))
1170 if (offset
== this_offset
1171 && DECL_ASSEMBLER_NAME (v
) == DECL_ASSEMBLER_NAME (vtable
))
1175 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
1176 if (polymorphic_type_binfo_p (base_binfo
))
1178 base_binfo
= subbinfo_with_vtable_at_offset (base_binfo
, offset
, vtable
);
1185 /* T is known constant value of virtual table pointer.
1186 Store virtual table to V and its offset to OFFSET.
1187 Return false if T does not look like virtual table reference. */
1190 vtable_pointer_value_to_vtable (tree t
, tree
*v
, unsigned HOST_WIDE_INT
*offset
)
1192 /* We expect &MEM[(void *)&virtual_table + 16B].
1193 We obtain object's BINFO from the context of the virtual table.
1194 This one contains pointer to virtual table represented via
1195 POINTER_PLUS_EXPR. Verify that this pointer match to what
1196 we propagated through.
1198 In the case of virtual inheritance, the virtual tables may
1199 be nested, i.e. the offset may be different from 16 and we may
1200 need to dive into the type representation. */
1201 if (TREE_CODE (t
) == ADDR_EXPR
1202 && TREE_CODE (TREE_OPERAND (t
, 0)) == MEM_REF
1203 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == ADDR_EXPR
1204 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
1205 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 0))
1207 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1208 (TREE_OPERAND (t
, 0), 0), 0)))
1210 *v
= TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 0);
1211 *offset
= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t
, 0), 1));
1215 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1216 We need to handle it when T comes from static variable initializer or
1218 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1220 *offset
= tree_to_uhwi (TREE_OPERAND (t
, 1));
1221 t
= TREE_OPERAND (t
, 0);
1226 if (TREE_CODE (t
) != ADDR_EXPR
)
1228 *v
= TREE_OPERAND (t
, 0);
1232 /* T is known constant value of virtual table pointer. Return BINFO of the
1236 vtable_pointer_value_to_binfo (tree t
)
1239 unsigned HOST_WIDE_INT offset
;
1241 if (!vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
1244 /* FIXME: for stores of construction vtables we return NULL,
1245 because we do not have BINFO for those. Eventually we should fix
1246 our representation to allow this case to be handled, too.
1247 In the case we see store of BINFO we however may assume
1248 that standard folding will be ale to cope with it. */
1249 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable
)),
1253 /* Proudce polymorphic call context for call method of instance
1254 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1257 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context
*context
,
1258 tree base
, HOST_WIDE_INT offset
)
1260 gcc_assert (DECL_P (base
));
1262 context
->outer_type
= TREE_TYPE (base
);
1263 context
->offset
= offset
;
1264 /* Make very conservative assumption that all objects
1265 may be in construction.
1266 TODO: ipa-prop already contains code to tell better.
1268 context
->maybe_in_construction
= true;
1269 context
->maybe_derived_type
= false;
1272 /* CST is an invariant (address of decl), try to get meaningful
1273 polymorphic call context for polymorphic call of method
1274 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1275 Return FALSE if nothing meaningful can be found. */
1278 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context
*context
,
1281 HOST_WIDE_INT offset
)
1283 HOST_WIDE_INT offset2
, size
, max_size
;
1286 if (TREE_CODE (cst
) != ADDR_EXPR
)
1289 cst
= TREE_OPERAND (cst
, 0);
1290 base
= get_ref_base_and_extent (cst
, &offset2
, &size
, &max_size
);
1291 if (!DECL_P (base
) || max_size
== -1 || max_size
!= size
)
1294 /* Only type inconsistent programs can have otr_type that is
1295 not part of outer type. */
1296 if (!contains_type_p (TREE_TYPE (base
), offset
, otr_type
))
1299 get_polymorphic_call_info_for_decl (context
, base
, offset
);
1303 /* Given REF call in FNDECL, determine class of the polymorphic
1304 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1305 Return pointer to object described by the context */
1308 get_polymorphic_call_info (tree fndecl
,
1311 HOST_WIDE_INT
*otr_token
,
1312 ipa_polymorphic_call_context
*context
)
1315 *otr_type
= obj_type_ref_class (ref
);
1316 *otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref
));
1318 /* Set up basic info in case we find nothing interesting in the analysis. */
1319 context
->outer_type
= *otr_type
;
1320 context
->offset
= 0;
1321 base_pointer
= OBJ_TYPE_REF_OBJECT (ref
);
1322 context
->maybe_derived_type
= true;
1323 context
->maybe_in_construction
= true;
1325 /* Walk SSA for outer object. */
1328 if (TREE_CODE (base_pointer
) == SSA_NAME
1329 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
1330 && SSA_NAME_DEF_STMT (base_pointer
)
1331 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer
)))
1333 base_pointer
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer
));
1334 STRIP_NOPS (base_pointer
);
1336 else if (TREE_CODE (base_pointer
) == ADDR_EXPR
)
1338 HOST_WIDE_INT size
, max_size
;
1339 HOST_WIDE_INT offset2
;
1340 tree base
= get_ref_base_and_extent (TREE_OPERAND (base_pointer
, 0),
1341 &offset2
, &size
, &max_size
);
1343 /* If this is a varying address, punt. */
1344 if ((TREE_CODE (base
) == MEM_REF
|| DECL_P (base
))
1346 && max_size
== size
)
1348 /* We found dereference of a pointer. Type of the pointer
1349 and MEM_REF is meaningless, but we can look futher. */
1350 if (TREE_CODE (base
) == MEM_REF
)
1352 base_pointer
= TREE_OPERAND (base
, 0);
1354 += offset2
+ mem_ref_offset (base
).low
* BITS_PER_UNIT
;
1355 context
->outer_type
= NULL
;
1357 /* We found base object. In this case the outer_type
1359 else if (DECL_P (base
))
1361 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base
)));
1363 /* Only type inconsistent programs can have otr_type that is
1364 not part of outer type. */
1365 if (!contains_type_p (TREE_TYPE (base
),
1366 context
->offset
+ offset2
, *otr_type
))
1368 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1369 code sequences; we arrange the calls to be builtin_unreachable
1371 *otr_token
= INT_MAX
;
1372 return base_pointer
;
1374 get_polymorphic_call_info_for_decl (context
, base
,
1375 context
->offset
+ offset2
);
1384 else if (TREE_CODE (base_pointer
) == POINTER_PLUS_EXPR
1385 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer
, 1)))
1387 context
->offset
+= tree_to_shwi (TREE_OPERAND (base_pointer
, 1))
1389 base_pointer
= TREE_OPERAND (base_pointer
, 0);
1396 /* Try to determine type of the outer object. */
1397 if (TREE_CODE (base_pointer
) == SSA_NAME
1398 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
1399 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) == PARM_DECL
)
1401 /* See if parameter is THIS pointer of a method. */
1402 if (TREE_CODE (TREE_TYPE (fndecl
)) == METHOD_TYPE
1403 && SSA_NAME_VAR (base_pointer
) == DECL_ARGUMENTS (fndecl
))
1405 context
->outer_type
= TREE_TYPE (TREE_TYPE (base_pointer
));
1406 gcc_assert (TREE_CODE (context
->outer_type
) == RECORD_TYPE
);
1408 /* Dynamic casting has possibly upcasted the type
1409 in the hiearchy. In this case outer type is less
1410 informative than inner type and we should forget
1412 if (!contains_type_p (context
->outer_type
, context
->offset
,
1415 context
->outer_type
= NULL
;
1416 return base_pointer
;
1419 /* If the function is constructor or destructor, then
1420 the type is possibly in construction, but we know
1421 it is not derived type. */
1422 if (DECL_CXX_CONSTRUCTOR_P (fndecl
)
1423 || DECL_CXX_DESTRUCTOR_P (fndecl
))
1425 context
->maybe_in_construction
= true;
1426 context
->maybe_derived_type
= false;
1430 context
->maybe_derived_type
= true;
1431 context
->maybe_in_construction
= false;
1433 return base_pointer
;
1435 /* Non-PODs passed by value are really passed by invisible
1436 reference. In this case we also know the type of the
1438 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer
)))
1440 context
->outer_type
= TREE_TYPE (TREE_TYPE (base_pointer
));
1441 gcc_assert (!POINTER_TYPE_P (context
->outer_type
));
1442 /* Only type inconsistent programs can have otr_type that is
1443 not part of outer type. */
1444 if (!contains_type_p (context
->outer_type
, context
->offset
,
1447 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1448 code sequences; we arrange the calls to be builtin_unreachable
1450 *otr_token
= INT_MAX
;
1451 return base_pointer
;
1453 context
->maybe_derived_type
= false;
1454 context
->maybe_in_construction
= false;
1455 return base_pointer
;
1458 /* TODO: There are multiple ways to derive a type. For instance
1459 if BASE_POINTER is passed to an constructor call prior our refernece.
1460 We do not make this type of flow sensitive analysis yet. */
1461 return base_pointer
;
1464 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1465 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1466 and insert them to NODES.
1468 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1471 record_targets_from_bases (tree otr_type
,
1472 HOST_WIDE_INT otr_token
,
1474 HOST_WIDE_INT offset
,
1475 vec
<cgraph_node
*> &nodes
,
1476 pointer_set_t
*inserted
,
1477 pointer_set_t
*matched_vtables
,
1482 HOST_WIDE_INT pos
, size
;
1486 if (types_same_for_odr (outer_type
, otr_type
))
1489 for (fld
= TYPE_FIELDS (outer_type
); fld
; fld
= DECL_CHAIN (fld
))
1491 if (TREE_CODE (fld
) != FIELD_DECL
)
1494 pos
= int_bit_position (fld
);
1495 size
= tree_to_shwi (DECL_SIZE (fld
));
1496 if (pos
<= offset
&& (pos
+ size
) > offset
1497 /* Do not get confused by zero sized bases. */
1498 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld
))))
1501 /* Within a class type we should always find correcponding fields. */
1502 gcc_assert (fld
&& TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
);
1504 /* Nonbasetypes should have been stripped by outer_class_type. */
1505 gcc_assert (DECL_ARTIFICIAL (fld
));
1507 outer_type
= TREE_TYPE (fld
);
1510 base_binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
),
1514 gcc_assert (odr_violation_reported
);
1517 gcc_assert (base_binfo
);
1518 if (!pointer_set_insert (matched_vtables
, BINFO_VTABLE (base_binfo
)))
1521 tree target
= gimple_get_virt_method_for_binfo (otr_token
,
1524 if (!target
|| ! DECL_CXX_DESTRUCTOR_P (target
))
1525 maybe_record_node (nodes
, target
, inserted
, can_refer
, completep
);
1526 pointer_set_insert (matched_vtables
, BINFO_VTABLE (base_binfo
));
1531 /* When virtual table is removed, we may need to flush the cache. */
1534 devirt_variable_node_removal_hook (varpool_node
*n
,
1535 void *d ATTRIBUTE_UNUSED
)
1537 if (cached_polymorphic_call_targets
1538 && DECL_VIRTUAL_P (n
->decl
)
1539 && type_in_anonymous_namespace_p (DECL_CONTEXT (n
->decl
)))
1540 free_polymorphic_call_targets_hash ();
1543 /* Return vector containing possible targets of polymorphic call of type
1544 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1545 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1546 OTR_TYPE and include their virtual method. This is useful for types
1547 possibly in construction or destruction where the virtual table may
1548 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1549 us to walk the inheritance graph for all derivations.
1551 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1552 undefined and should be redirected to unreachable.
1554 If COMPLETEP is non-NULL, store true if the list is complete.
1555 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1556 in the target cache. If user needs to visit every target list
1557 just once, it can memoize them.
1559 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1560 the type is not in the construction. Those targets appear first in the
1563 Returned vector is placed into cache. It is NOT caller's responsibility
1564 to free it. The vector can be freed on cgraph_remove_node call if
1565 the particular node is a virtual function present in the cache. */
1568 possible_polymorphic_call_targets (tree otr_type
,
1569 HOST_WIDE_INT otr_token
,
1570 ipa_polymorphic_call_context context
,
1573 int *nonconstruction_targetsp
)
1575 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
1576 pointer_set_t
*inserted
;
1577 pointer_set_t
*matched_vtables
;
1578 vec
<cgraph_node
*> nodes
= vNULL
;
1579 vec
<tree
> bases_to_consider
= vNULL
;
1580 odr_type type
, outer_type
;
1581 polymorphic_call_target_d key
;
1582 polymorphic_call_target_d
**slot
;
1587 bool skipped
= false;
1589 /* If ODR is not initialized, return empty incomplete list. */
1590 if (!odr_hash
.is_created ())
1594 if (nonconstruction_targetsp
)
1595 *nonconstruction_targetsp
= 0;
1599 /* If we hit type inconsistency, just return empty list of targets. */
1600 if (otr_token
== INT_MAX
)
1604 if (nonconstruction_targetsp
)
1605 *nonconstruction_targetsp
= 0;
1609 type
= get_odr_type (otr_type
, true);
1611 /* Lookup the outer class type we want to walk. */
1612 if (context
.outer_type
1613 && !get_class_context (&context
, otr_type
))
1617 if (nonconstruction_targetsp
)
1618 *nonconstruction_targetsp
= 0;
1622 /* We canonicalize our query, so we do not need extra hashtable entries. */
1624 /* Without outer type, we have no use for offset. Just do the
1625 basic search from innter type */
1626 if (!context
.outer_type
)
1628 context
.outer_type
= otr_type
;
1631 /* We need to update our hiearchy if the type does not exist. */
1632 outer_type
= get_odr_type (context
.outer_type
, true);
1633 /* If the type is complete, there are no derivations. */
1634 if (TYPE_FINAL_P (outer_type
->type
))
1635 context
.maybe_derived_type
= false;
1637 /* Initialize query cache. */
1638 if (!cached_polymorphic_call_targets
)
1640 cached_polymorphic_call_targets
= pointer_set_create ();
1641 polymorphic_call_target_hash
.create (23);
1642 if (!node_removal_hook_holder
)
1644 node_removal_hook_holder
=
1645 cgraph_add_node_removal_hook (&devirt_node_removal_hook
, NULL
);
1646 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook
,
1651 /* Lookup cached answer. */
1653 key
.otr_token
= otr_token
;
1654 key
.context
= context
;
1655 slot
= polymorphic_call_target_hash
.find_slot (&key
, INSERT
);
1657 *cache_token
= (void *)*slot
;
1661 *completep
= (*slot
)->complete
;
1662 if (nonconstruction_targetsp
)
1663 *nonconstruction_targetsp
= (*slot
)->nonconstruction_targets
;
1664 return (*slot
)->targets
;
1669 /* Do actual search. */
1670 timevar_push (TV_IPA_VIRTUAL_CALL
);
1671 *slot
= XCNEW (polymorphic_call_target_d
);
1673 *cache_token
= (void *)*slot
;
1674 (*slot
)->type
= type
;
1675 (*slot
)->otr_token
= otr_token
;
1676 (*slot
)->context
= context
;
1678 inserted
= pointer_set_create ();
1679 matched_vtables
= pointer_set_create ();
1681 /* First see virtual method of type itself. */
1682 binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
->type
),
1683 context
.offset
, otr_type
);
1685 target
= gimple_get_virt_method_for_binfo (otr_token
, binfo
,
1689 gcc_assert (odr_violation_reported
);
1693 /* Destructors are never called through construction virtual tables,
1694 because the type is always known. */
1695 if (target
&& DECL_CXX_DESTRUCTOR_P (target
))
1696 context
.maybe_in_construction
= false;
1700 /* In the case we get complete method, we don't need
1701 to walk derivations. */
1702 if (DECL_FINAL_P (target
))
1703 context
.maybe_derived_type
= false;
1706 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
1707 if (type_possibly_instantiated_p (outer_type
->type
))
1708 maybe_record_node (nodes
, target
, inserted
, can_refer
, &complete
);
1712 gcc_assert (in_lto_p
|| context
.maybe_derived_type
);
1715 pointer_set_insert (matched_vtables
, BINFO_VTABLE (binfo
));
1717 /* Next walk recursively all derived types. */
1718 if (context
.maybe_derived_type
)
1720 /* For anonymous namespace types we can attempt to build full type.
1721 All derivations must be in this unit (unless we see partial unit). */
1722 if (!type
->all_derivations_known
)
1724 for (i
= 0; i
< outer_type
->derived_types
.length(); i
++)
1725 possible_polymorphic_call_targets_1 (nodes
, inserted
,
1728 outer_type
->derived_types
[i
],
1729 otr_token
, outer_type
->type
,
1730 context
.offset
, &complete
,
1732 context
.maybe_in_construction
);
1735 /* Finally walk bases, if asked to. */
1736 (*slot
)->nonconstruction_targets
= nodes
.length();
1738 /* Destructors are never called through construction virtual tables,
1739 because the type is always known. One of entries may be cxa_pure_virtual
1740 so look to at least two of them. */
1741 if (context
.maybe_in_construction
)
1742 for (i
=0 ; i
< MIN (nodes
.length (), 2); i
++)
1743 if (DECL_CXX_DESTRUCTOR_P (nodes
[i
]->decl
))
1744 context
.maybe_in_construction
= false;
1745 if (context
.maybe_in_construction
)
1747 if (type
!= outer_type
1749 || (context
.maybe_derived_type
1750 && !type_all_derivations_known_p (outer_type
->type
))))
1751 record_targets_from_bases (otr_type
, otr_token
, outer_type
->type
,
1752 context
.offset
, nodes
, inserted
,
1753 matched_vtables
, &complete
);
1755 maybe_record_node (nodes
, target
, inserted
, can_refer
, &complete
);
1756 for (i
= 0; i
< bases_to_consider
.length(); i
++)
1757 maybe_record_node (nodes
, bases_to_consider
[i
], inserted
, can_refer
, &complete
);
1759 bases_to_consider
.release();
1761 (*slot
)->targets
= nodes
;
1762 (*slot
)->complete
= complete
;
1764 *completep
= complete
;
1765 if (nonconstruction_targetsp
)
1766 *nonconstruction_targetsp
= (*slot
)->nonconstruction_targets
;
1768 pointer_set_destroy (inserted
);
1769 pointer_set_destroy (matched_vtables
);
1770 timevar_pop (TV_IPA_VIRTUAL_CALL
);
1774 /* Dump all possible targets of a polymorphic call. */
1777 dump_possible_polymorphic_call_targets (FILE *f
,
1779 HOST_WIDE_INT otr_token
,
1780 const ipa_polymorphic_call_context
&ctx
)
1782 vec
<cgraph_node
*> targets
;
1784 odr_type type
= get_odr_type (otr_type
, false);
1786 int nonconstruction
;
1790 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
,
1792 &final
, NULL
, &nonconstruction
);
1793 fprintf (f
, " Targets of polymorphic call of type %i:", type
->id
);
1794 print_generic_expr (f
, type
->type
, TDF_SLIM
);
1795 fprintf (f
, " token %i\n", (int)otr_token
);
1796 if (ctx
.outer_type
|| ctx
.offset
)
1798 fprintf (f
, " Contained in type:");
1799 print_generic_expr (f
, ctx
.outer_type
, TDF_SLIM
);
1800 fprintf (f
, " at offset "HOST_WIDE_INT_PRINT_DEC
"\n",
1804 fprintf (f
, " %s%s%s\n ",
1805 final
? "This is a complete list." :
1806 "This is partial list; extra targets may be defined in other units.",
1807 ctx
.maybe_in_construction
? " (base types included)" : "",
1808 ctx
.maybe_derived_type
? " (derived types included)" : "");
1809 for (i
= 0; i
< targets
.length (); i
++)
1812 if (i
== (unsigned)nonconstruction
)
1813 fprintf (f
, "\n If the type is in construction,"
1814 " then additional tarets are:\n"
1817 name
= cplus_demangle_v3 (targets
[i
]->asm_name (), 0);
1818 fprintf (f
, " %s/%i", name
? name
: targets
[i
]->name (), targets
[i
]->order
);
1821 if (!targets
[i
]->definition
)
1822 fprintf (f
, " (no definition%s)",
1823 DECL_DECLARED_INLINE_P (targets
[i
]->decl
)
1826 fprintf (f
, "\n\n");
1830 /* Return true if N can be possibly target of a polymorphic call of
1831 OTR_TYPE/OTR_TOKEN. */
1834 possible_polymorphic_call_target_p (tree otr_type
,
1835 HOST_WIDE_INT otr_token
,
1836 const ipa_polymorphic_call_context
&ctx
,
1837 struct cgraph_node
*n
)
1839 vec
<cgraph_node
*> targets
;
1841 enum built_in_function fcode
;
1844 if (TREE_CODE (TREE_TYPE (n
->decl
)) == FUNCTION_TYPE
1845 && ((fcode
= DECL_FUNCTION_CODE (n
->decl
))
1846 == BUILT_IN_UNREACHABLE
1847 || fcode
== BUILT_IN_TRAP
))
1850 if (!odr_hash
.is_created ())
1852 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
, ctx
, &final
);
1853 for (i
= 0; i
< targets
.length (); i
++)
1854 if (symtab_semantically_equivalent_p (n
, targets
[i
]))
1857 /* At a moment we allow middle end to dig out new external declarations
1858 as a targets of polymorphic calls. */
1859 if (!final
&& !n
->definition
)
1865 /* After callgraph construction new external nodes may appear.
1866 Add them into the graph. */
1869 update_type_inheritance_graph (void)
1871 struct cgraph_node
*n
;
1873 if (!odr_hash
.is_created ())
1875 free_polymorphic_call_targets_hash ();
1876 timevar_push (TV_IPA_INHERITANCE
);
1877 /* We reconstruct the graph starting from types of all methods seen in the
1879 FOR_EACH_FUNCTION (n
)
1880 if (DECL_VIRTUAL_P (n
->decl
)
1882 && symtab_real_symbol_p (n
))
1883 get_odr_type (method_class_type (TREE_TYPE (n
->decl
)), true);
1884 timevar_pop (TV_IPA_INHERITANCE
);
1888 /* Return true if N looks like likely target of a polymorphic call.
1889 Rule out cxa_pure_virtual, noreturns, function declared cold and
1890 other obvious cases. */
1893 likely_target_p (struct cgraph_node
*n
)
1896 /* cxa_pure_virtual and similar things are not likely. */
1897 if (TREE_CODE (TREE_TYPE (n
->decl
)) != METHOD_TYPE
)
1899 flags
= flags_from_decl_or_type (n
->decl
);
1900 if (flags
& ECF_NORETURN
)
1902 if (lookup_attribute ("cold",
1903 DECL_ATTRIBUTES (n
->decl
)))
1905 if (n
->frequency
< NODE_FREQUENCY_NORMAL
)
1907 /* If there are no virtual tables refering the target alive,
1908 the only way the target can be called is an instance comming from other
1909 compilation unit; speculative devirtualization is build around an
1910 assumption that won't happen. */
1911 if (!referenced_from_vtable_p (n
))
1916 /* The ipa-devirt pass.
1917 When polymorphic call has only one likely target in the unit,
1918 turn it into speculative call. */
1923 struct cgraph_node
*n
;
1924 struct pointer_set_t
*bad_call_targets
= pointer_set_create ();
1925 struct cgraph_edge
*e
;
1927 int npolymorphic
= 0, nspeculated
= 0, nconverted
= 0, ncold
= 0;
1928 int nmultiple
= 0, noverwritable
= 0, ndevirtualized
= 0, nnotdefined
= 0;
1929 int nwrong
= 0, nok
= 0, nexternal
= 0, nartificial
= 0;
1931 FOR_EACH_DEFINED_FUNCTION (n
)
1933 bool update
= false;
1934 if (dump_file
&& n
->indirect_calls
)
1935 fprintf (dump_file
, "\n\nProcesing function %s/%i\n",
1936 n
->name (), n
->order
);
1937 for (e
= n
->indirect_calls
; e
; e
= e
->next_callee
)
1938 if (e
->indirect_info
->polymorphic
)
1940 struct cgraph_node
*likely_target
= NULL
;
1943 int nonconstruction_targets
;
1944 vec
<cgraph_node
*>targets
1945 = possible_polymorphic_call_targets
1946 (e
, &final
, &cache_token
, &nonconstruction_targets
);
1950 dump_possible_polymorphic_call_targets
1955 if (!cgraph_maybe_hot_edge_p (e
))
1958 fprintf (dump_file
, "Call is cold\n\n");
1965 fprintf (dump_file
, "Call is aready speculated\n\n");
1968 /* When dumping see if we agree with speculation. */
1972 if (pointer_set_contains (bad_call_targets
,
1976 fprintf (dump_file
, "Target list is known to be useless\n\n");
1980 for (i
= 0; i
< targets
.length (); i
++)
1981 if (likely_target_p (targets
[i
]))
1985 if (i
< (unsigned) nonconstruction_targets
)
1987 likely_target
= NULL
;
1989 fprintf (dump_file
, "More than one likely target\n\n");
1994 likely_target
= targets
[i
];
1998 pointer_set_insert (bad_call_targets
, cache_token
);
2001 /* This is reached only when dumping; check if we agree or disagree
2002 with the speculation. */
2005 struct cgraph_edge
*e2
;
2006 struct ipa_ref
*ref
;
2007 cgraph_speculative_call_info (e
, e2
, e
, ref
);
2008 if (cgraph_function_or_thunk_node (e2
->callee
, NULL
)
2009 == cgraph_function_or_thunk_node (likely_target
, NULL
))
2011 fprintf (dump_file
, "We agree with speculation\n\n");
2016 fprintf (dump_file
, "We disagree with speculation\n\n");
2021 if (!likely_target
->definition
)
2024 fprintf (dump_file
, "Target is not an definition\n\n");
2028 /* Do not introduce new references to external symbols. While we
2029 can handle these just well, it is common for programs to
2030 incorrectly with headers defining methods they are linked
2032 if (DECL_EXTERNAL (likely_target
->decl
))
2035 fprintf (dump_file
, "Target is external\n\n");
2039 /* Don't use an implicitly-declared destructor (c++/58678). */
2040 struct cgraph_node
*non_thunk_target
2041 = cgraph_function_node (likely_target
);
2042 if (DECL_ARTIFICIAL (non_thunk_target
->decl
)
2043 && DECL_COMDAT (non_thunk_target
->decl
))
2046 fprintf (dump_file
, "Target is artificial\n\n");
2050 if (cgraph_function_body_availability (likely_target
)
2051 <= AVAIL_OVERWRITABLE
2052 && symtab_can_be_discarded (likely_target
))
2055 fprintf (dump_file
, "Target is overwritable\n\n");
2063 "Speculatively devirtualizing call in %s/%i to %s/%i\n\n",
2064 n
->name (), n
->order
,
2065 likely_target
->name (),
2066 likely_target
->order
);
2067 if (!symtab_can_be_discarded (likely_target
))
2070 alias
= cgraph (symtab_nonoverwritable_alias
2073 likely_target
= alias
;
2077 cgraph_turn_edge_to_speculative
2078 (e
, likely_target
, e
->count
* 8 / 10, e
->frequency
* 8 / 10);
2082 inline_update_overall_summary (n
);
2084 pointer_set_destroy (bad_call_targets
);
2088 "%i polymorphic calls, %i devirtualized,"
2089 " %i speculatively devirtualized, %i cold\n"
2090 "%i have multiple targets, %i overwritable,"
2091 " %i already speculated (%i agree, %i disagree),"
2092 " %i external, %i not defined, %i artificial\n",
2093 npolymorphic
, ndevirtualized
, nconverted
, ncold
,
2094 nmultiple
, noverwritable
, nspeculated
, nok
, nwrong
,
2095 nexternal
, nnotdefined
, nartificial
);
2096 return ndevirtualized
? TODO_remove_functions
: 0;
2101 const pass_data pass_data_ipa_devirt
=
2103 IPA_PASS
, /* type */
2104 "devirt", /* name */
2105 OPTGROUP_NONE
, /* optinfo_flags */
2106 true, /* has_execute */
2107 TV_IPA_DEVIRT
, /* tv_id */
2108 0, /* properties_required */
2109 0, /* properties_provided */
2110 0, /* properties_destroyed */
2111 0, /* todo_flags_start */
2112 ( TODO_dump_symtab
), /* todo_flags_finish */
2115 class pass_ipa_devirt
: public ipa_opt_pass_d
2118 pass_ipa_devirt (gcc::context
*ctxt
)
2119 : ipa_opt_pass_d (pass_data_ipa_devirt
, ctxt
,
2120 NULL
, /* generate_summary */
2121 NULL
, /* write_summary */
2122 NULL
, /* read_summary */
2123 NULL
, /* write_optimization_summary */
2124 NULL
, /* read_optimization_summary */
2125 NULL
, /* stmt_fixup */
2126 0, /* function_transform_todo_flags_start */
2127 NULL
, /* function_transform */
2128 NULL
) /* variable_transform */
2131 /* opt_pass methods: */
2132 virtual bool gate (function
*)
2134 return (flag_devirtualize
2135 && flag_devirtualize_speculatively
2139 virtual unsigned int execute (function
*) { return ipa_devirt (); }
2141 }; // class pass_ipa_devirt
2146 make_pass_ipa_devirt (gcc::context
*ctxt
)
2148 return new pass_ipa_devirt (ctxt
);
2151 #include "gt-ipa-devirt.h"