1 /* Basic IPA utilities for type inheritance graph construction and
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
84 build_type_inheritance_graph triggers a construction of the type inheritance
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
110 #include "coretypes.h"
113 #include "print-tree.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
132 /* Dummy polymorphic call context. */
134 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
135 = {0, NULL
, false, true};
137 /* Pointer set of all call targets appearing in the cache. */
138 static pointer_set_t
*cached_polymorphic_call_targets
;
140 /* The node of type inheritance graph. For each type unique in
141 One Defintion Rule (ODR) sense, we produce one node linking all
142 main variants of types equivalent to it, bases and derived types. */
144 struct GTY(()) odr_type_d
149 vec
<odr_type
> GTY((skip
)) bases
;
150 /* All derrived types with virtual methods seen in unit. */
151 vec
<odr_type
> GTY((skip
)) derived_types
;
153 /* All equivalent types, if more than one. */
154 vec
<tree
, va_gc
> *types
;
155 /* Set of all equivalent types, if NON-NULL. */
156 pointer_set_t
* GTY((skip
)) types_set
;
158 /* Unique ID indexing the type in odr_types array. */
160 /* Is it in anonymous namespace? */
161 bool anonymous_namespace
;
165 /* Return true if BINFO corresponds to a type with virtual methods.
167 Every type has several BINFOs. One is the BINFO associated by the type
168 while other represents bases of derived types. The BINFOs representing
169 bases do not have BINFO_VTABLE pointer set when this is the single
170 inheritance (because vtables are shared). Look up the BINFO of type
171 and check presence of its vtable. */
174 polymorphic_type_binfo_p (tree binfo
)
176 /* See if BINFO's type has an virtual table associtated with it. */
177 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo
)));
180 /* One Definition Rule hashtable helpers. */
184 typedef odr_type_d value_type
;
185 typedef union tree_node compare_type
;
186 static inline hashval_t
hash (const value_type
*);
187 static inline bool equal (const value_type
*, const compare_type
*);
188 static inline void remove (value_type
*);
191 /* Produce hash based on type name. */
194 hash_type_name (tree t
)
196 gcc_checking_assert (TYPE_MAIN_VARIANT (t
) == t
);
198 /* If not in LTO, all main variants are unique, so we can do
201 return htab_hash_pointer (t
);
203 /* Anonymous types are unique. */
204 if (type_in_anonymous_namespace_p (t
))
205 return htab_hash_pointer (t
);
207 /* For polymorphic types, we can simply hash the virtual table. */
208 if (TYPE_BINFO (t
) && BINFO_VTABLE (TYPE_BINFO (t
)))
210 tree v
= BINFO_VTABLE (TYPE_BINFO (t
));
213 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
215 hash
= TREE_INT_CST_LOW (TREE_OPERAND (v
, 1));
216 v
= TREE_OPERAND (TREE_OPERAND (v
, 0), 0);
219 v
= DECL_ASSEMBLER_NAME (v
);
220 hash
= iterative_hash_hashval_t (hash
, htab_hash_pointer (v
));
224 /* Rest is not implemented yet. */
228 /* Return the computed hashcode for ODR_TYPE. */
231 odr_hasher::hash (const value_type
*odr_type
)
233 return hash_type_name (odr_type
->type
);
236 /* Compare types T1 and T2 and return true if they are
240 odr_hasher::equal (const value_type
*t1
, const compare_type
*ct2
)
242 tree t2
= const_cast <tree
> (ct2
);
244 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2
) == ct2
);
249 return types_same_for_odr (t1
->type
, t2
);
252 /* Free ODR type V. */
255 odr_hasher::remove (value_type
*v
)
258 v
->derived_types
.release ();
260 pointer_set_destroy (v
->types_set
);
264 /* ODR type hash used to lookup ODR type based on tree type node. */
266 typedef hash_table
<odr_hasher
> odr_hash_type
;
267 static odr_hash_type odr_hash
;
269 /* ODR types are also stored into ODR_TYPE vector to allow consistent
270 walking. Bases appear before derived types. Vector is garbage collected
271 so we won't end up visiting empty types. */
273 static GTY(()) vec
<odr_type
, va_gc
> *odr_types_ptr
;
274 #define odr_types (*odr_types_ptr)
276 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
277 from VAL->type. This may happen in LTO where tree merging did not merge
278 all variants of the same type. It may or may not mean the ODR violation.
279 Add it to the list of duplicates and warn on some violations. */
282 add_type_duplicate (odr_type val
, tree type
)
285 val
->types_set
= pointer_set_create ();
287 /* See if this duplicate is new. */
288 if (!pointer_set_insert (val
->types_set
, type
))
291 bool base_mismatch
= false;
292 gcc_assert (in_lto_p
);
293 vec_safe_push (val
->types
, type
);
296 /* First we compare memory layout. */
297 if (!types_compatible_p (val
->type
, type
))
300 if (BINFO_VTABLE (TYPE_BINFO (val
->type
))
301 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type
)), 0,
302 "type %qD violates one definition rule ",
304 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val
->type
)),
305 "a type with the same name but different layout is "
306 "defined in another translation unit");
307 if (cgraph_dump_file
)
309 fprintf (cgraph_dump_file
, "ODR violation or merging or ODR type bug?\n");
311 print_node (cgraph_dump_file
, "", val
->type
, 0);
312 putc ('\n',cgraph_dump_file
);
313 print_node (cgraph_dump_file
, "", type
, 0);
314 putc ('\n',cgraph_dump_file
);
318 /* Next sanity check that bases are the same. If not, we will end
319 up producing wrong answers. */
320 for (j
= 0, i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
321 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type
), i
)))
323 odr_type base
= get_odr_type
325 (BINFO_BASE_BINFO (TYPE_BINFO (type
),
328 if (val
->bases
.length () <= j
|| val
->bases
[j
] != base
)
329 base_mismatch
= true;
336 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type
)), 0,
337 "type %qD violates one definition rule ",
339 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val
->type
)),
340 "a type with the same name but different bases is "
341 "defined in another translation unit");
342 if (cgraph_dump_file
)
344 fprintf (cgraph_dump_file
, "ODR bse violation or merging bug?\n");
346 print_node (cgraph_dump_file
, "", val
->type
, 0);
347 putc ('\n',cgraph_dump_file
);
348 print_node (cgraph_dump_file
, "", type
, 0);
349 putc ('\n',cgraph_dump_file
);
353 /* Regularize things a little. During LTO same types may come with
354 different BINFOs. Either because their virtual table was
355 not merged by tree merging and only later at decl merging or
356 because one type comes with external vtable, while other
357 with internal. We want to merge equivalent binfos to conserve
358 memory and streaming overhead.
360 The external vtables are more harmful: they contain references
361 to external declarations of methods that may be defined in the
362 merged LTO unit. For this reason we absolutely need to remove
363 them and replace by internal variants. Not doing so will lead
364 to incomplete answers from possible_polymorphic_call_targets. */
365 if (!flag_ltrans
&& merge
)
367 tree master_binfo
= TYPE_BINFO (val
->type
);
368 tree v1
= BINFO_VTABLE (master_binfo
);
369 tree v2
= BINFO_VTABLE (TYPE_BINFO (type
));
371 if (TREE_CODE (v1
) == POINTER_PLUS_EXPR
)
373 gcc_assert (TREE_CODE (v2
) == POINTER_PLUS_EXPR
374 && operand_equal_p (TREE_OPERAND (v1
, 1),
375 TREE_OPERAND (v2
, 1), 0));
376 v1
= TREE_OPERAND (TREE_OPERAND (v1
, 0), 0);
377 v2
= TREE_OPERAND (TREE_OPERAND (v2
, 0), 0);
379 gcc_assert (DECL_ASSEMBLER_NAME (v1
)
380 == DECL_ASSEMBLER_NAME (v2
));
382 if (DECL_EXTERNAL (v1
) && !DECL_EXTERNAL (v2
))
386 TYPE_BINFO (val
->type
) = TYPE_BINFO (type
);
387 for (i
= 0; i
< val
->types
->length (); i
++)
389 if (TYPE_BINFO ((*val
->types
)[i
])
391 TYPE_BINFO ((*val
->types
)[i
]) = TYPE_BINFO (type
);
395 TYPE_BINFO (type
) = master_binfo
;
400 /* Get ODR type hash entry for TYPE. If INSERT is true, create
401 possibly new entry. */
404 get_odr_type (tree type
, bool insert
)
410 type
= TYPE_MAIN_VARIANT (type
);
411 gcc_checking_assert (TYPE_MAIN_VARIANT (type
) == type
);
412 hash
= hash_type_name (type
);
413 slot
= odr_hash
.find_slot_with_hash (type
, hash
, insert
? INSERT
: NO_INSERT
);
417 /* See if we already have entry for type. */
422 /* With LTO we need to support multiple tree representation of
423 the same ODR type. */
424 if (val
->type
!= type
)
425 add_type_duplicate (val
, type
);
429 tree binfo
= TYPE_BINFO (type
);
432 val
= ggc_alloc_cleared_odr_type_d ();
435 val
->derived_types
= vNULL
;
436 val
->anonymous_namespace
= type_in_anonymous_namespace_p (type
);
438 for (i
= 0; i
< BINFO_N_BASE_BINFOS (binfo
); i
++)
439 /* For now record only polymorphic types. other are
440 pointless for devirtualization and we can not precisely
441 determine ODR equivalency of these during LTO. */
442 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo
, i
)))
444 odr_type base
= get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo
,
447 base
->derived_types
.safe_push (val
);
448 val
->bases
.safe_push (base
);
450 /* First record bases, then add into array so ids are increasing. */
452 val
->id
= odr_types
.length ();
453 vec_safe_push (odr_types_ptr
, val
);
458 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
459 recusive printing. */
462 dump_odr_type (FILE *f
, odr_type t
, int indent
=0)
465 fprintf (f
, "%*s type %i: ", indent
* 2, "", t
->id
);
466 print_generic_expr (f
, t
->type
, TDF_SLIM
);
467 fprintf (f
, "%s\n", t
->anonymous_namespace
? " (anonymous namespace)":"");
468 if (TYPE_NAME (t
->type
))
470 fprintf (f
, "%*s defined at: %s:%i\n", indent
* 2, "",
471 DECL_SOURCE_FILE (TYPE_NAME (t
->type
)),
472 DECL_SOURCE_LINE (TYPE_NAME (t
->type
)));
474 if (t
->bases
.length ())
476 fprintf (f
, "%*s base odr type ids: ", indent
* 2, "");
477 for (i
= 0; i
< t
->bases
.length (); i
++)
478 fprintf (f
, " %i", t
->bases
[i
]->id
);
481 if (t
->derived_types
.length ())
483 fprintf (f
, "%*s derived types:\n", indent
* 2, "");
484 for (i
= 0; i
< t
->derived_types
.length (); i
++)
485 dump_odr_type (f
, t
->derived_types
[i
], indent
+ 1);
490 /* Dump the type inheritance graph. */
493 dump_type_inheritance_graph (FILE *f
)
498 fprintf (f
, "\n\nType inheritance graph:\n");
499 for (i
= 0; i
< odr_types
.length (); i
++)
501 if (odr_types
[i
]->bases
.length () == 0)
502 dump_odr_type (f
, odr_types
[i
]);
504 for (i
= 0; i
< odr_types
.length (); i
++)
506 if (odr_types
[i
]->types
&& odr_types
[i
]->types
->length ())
509 fprintf (f
, "Duplicate tree types for odr type %i\n", i
);
510 print_node (f
, "", odr_types
[i
]->type
, 0);
511 for (j
= 0; j
< odr_types
[i
]->types
->length (); j
++)
514 fprintf (f
, "duplicate #%i\n", j
);
515 print_node (f
, "", (*odr_types
[i
]->types
)[j
], 0);
516 t
= (*odr_types
[i
]->types
)[j
];
517 while (TYPE_P (t
) && TYPE_CONTEXT (t
))
519 t
= TYPE_CONTEXT (t
);
520 print_node (f
, "", t
, 0);
528 /* Given method type T, return type of class it belongs to.
529 Lookup this pointer and get its type. */
532 method_class_type (tree t
)
534 tree first_parm_type
= TREE_VALUE (TYPE_ARG_TYPES (t
));
535 gcc_assert (TREE_CODE (t
) == METHOD_TYPE
);
537 return TREE_TYPE (first_parm_type
);
540 /* Initialize IPA devirt and build inheritance tree graph. */
543 build_type_inheritance_graph (void)
545 struct symtab_node
*n
;
546 FILE *inheritance_dump_file
;
549 if (odr_hash
.is_created ())
551 timevar_push (TV_IPA_INHERITANCE
);
552 inheritance_dump_file
= dump_begin (TDI_inheritance
, &flags
);
553 odr_hash
.create (23);
555 /* We reconstruct the graph starting of types of all methods seen in the
558 if (is_a
<cgraph_node
> (n
)
559 && DECL_VIRTUAL_P (n
->decl
)
560 && symtab_real_symbol_p (n
))
561 get_odr_type (method_class_type (TREE_TYPE (n
->decl
)), true);
563 /* Look also for virtual tables of types that do not define any methods.
565 We need it in a case where class B has virtual base of class A
566 re-defining its virtual method and there is class C with no virtual
567 methods with B as virtual base.
569 Here we output B's virtual method in two variant - for non-virtual
570 and virtual inheritance. B's virtual table has non-virtual version,
571 while C's has virtual.
573 For this reason we need to know about C in order to include both
574 variants of B. More correctly, record_target_from_binfo should
575 add both variants of the method when walking B, but we have no
576 link in between them.
578 We rely on fact that either the method is exported and thus we
579 assume it is called externally or C is in anonymous namespace and
580 thus we will see the vtable. */
582 else if (is_a
<varpool_node
> (n
)
583 && DECL_VIRTUAL_P (n
->decl
)
584 && TREE_CODE (DECL_CONTEXT (n
->decl
)) == RECORD_TYPE
585 && TYPE_BINFO (DECL_CONTEXT (n
->decl
))
586 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n
->decl
))))
587 get_odr_type (DECL_CONTEXT (n
->decl
), true);
588 if (inheritance_dump_file
)
590 dump_type_inheritance_graph (inheritance_dump_file
);
591 dump_end (TDI_inheritance
, inheritance_dump_file
);
593 timevar_pop (TV_IPA_INHERITANCE
);
596 /* If TARGET has associated node, record it in the NODES array.
597 if TARGET can not be inserted (for example because its body was
598 already removed and there is no way to refer to it), clear COMPLETEP. */
601 maybe_record_node (vec
<cgraph_node
*> &nodes
,
602 tree target
, pointer_set_t
*inserted
,
605 struct cgraph_node
*target_node
;
606 enum built_in_function fcode
;
609 /* Those are used to mark impossible scenarios. */
610 || (fcode
= DECL_FUNCTION_CODE (target
))
611 == BUILT_IN_UNREACHABLE
612 || fcode
== BUILT_IN_TRAP
)
615 target_node
= cgraph_get_node (target
);
617 if (target_node
!= NULL
618 && (TREE_PUBLIC (target
)
619 || target_node
->definition
)
620 && symtab_real_symbol_p (target_node
))
622 gcc_assert (!target_node
->global
.inlined_to
);
623 gcc_assert (symtab_real_symbol_p (target_node
));
624 if (!pointer_set_insert (inserted
, target
))
626 pointer_set_insert (cached_polymorphic_call_targets
,
628 nodes
.safe_push (target_node
);
632 && !type_in_anonymous_namespace_p
633 (method_class_type (TREE_TYPE (target
))))
637 /* See if BINFO's type match OUTER_TYPE. If so, lookup
638 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
639 method in vtable and insert method to NODES array.
640 Otherwise recurse to base BINFOs.
641 This match what get_binfo_at_offset does, but with offset
644 TYPE_BINFOS is a stack of BINFOS of types with defined
645 virtual table seen on way from class type to BINFO.
647 MATCHED_VTABLES tracks virtual tables we already did lookup
648 for virtual function in. INSERTED tracks nodes we already
651 ANONYMOUS is true if BINFO is part of anonymous namespace.
655 record_target_from_binfo (vec
<cgraph_node
*> &nodes
,
658 vec
<tree
> &type_binfos
,
659 HOST_WIDE_INT otr_token
,
661 HOST_WIDE_INT offset
,
662 pointer_set_t
*inserted
,
663 pointer_set_t
*matched_vtables
,
666 tree type
= BINFO_TYPE (binfo
);
671 if (BINFO_VTABLE (binfo
))
672 type_binfos
.safe_push (binfo
);
673 if (types_same_for_odr (type
, outer_type
))
676 tree type_binfo
= NULL
;
678 /* Lookup BINFO with virtual table. For normal types it is always last
680 for (i
= type_binfos
.length () - 1; i
>= 0; i
--)
681 if (BINFO_OFFSET (type_binfos
[i
]) == BINFO_OFFSET (binfo
))
683 type_binfo
= type_binfos
[i
];
686 if (BINFO_VTABLE (binfo
))
688 /* If this is duplicated BINFO for base shared by virtual inheritance,
689 we may not have its associated vtable. This is not a problem, since
690 we will walk it on the other path. */
693 gcc_assert (BINFO_VIRTUAL_P (binfo
));
696 tree inner_binfo
= get_binfo_at_offset (type_binfo
,
698 /* For types in anonymous namespace first check if the respective vtable
699 is alive. If not, we know the type can't be called. */
700 if (!flag_ltrans
&& anonymous
)
702 tree vtable
= BINFO_VTABLE (inner_binfo
);
705 if (TREE_CODE (vtable
) == POINTER_PLUS_EXPR
)
706 vtable
= TREE_OPERAND (TREE_OPERAND (vtable
, 0), 0);
707 vnode
= varpool_get_node (vtable
);
708 if (!vnode
|| !vnode
->definition
)
711 gcc_assert (inner_binfo
);
712 if (!pointer_set_insert (matched_vtables
, BINFO_VTABLE (inner_binfo
)))
714 tree target
= gimple_get_virt_method_for_binfo (otr_token
, inner_binfo
);
716 maybe_record_node (nodes
, target
, inserted
, NULL
);
722 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
723 /* Walking bases that have no virtual method is pointless excercise. */
724 if (polymorphic_type_binfo_p (base_binfo
))
725 record_target_from_binfo (nodes
, base_binfo
, otr_type
,
727 otr_token
, outer_type
, offset
, inserted
,
728 matched_vtables
, anonymous
);
729 if (BINFO_VTABLE (binfo
))
733 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
734 of TYPE, insert them to NODES, recurse into derived nodes.
735 INSERTED is used to avoid duplicate insertions of methods into NODES.
736 MATCHED_VTABLES are used to avoid duplicate walking vtables. */
739 possible_polymorphic_call_targets_1 (vec
<cgraph_node
*> &nodes
,
740 pointer_set_t
*inserted
,
741 pointer_set_t
*matched_vtables
,
744 HOST_WIDE_INT otr_token
,
746 HOST_WIDE_INT offset
)
748 tree binfo
= TYPE_BINFO (type
->type
);
750 vec
<tree
> type_binfos
= vNULL
;
752 record_target_from_binfo (nodes
, binfo
, otr_type
, type_binfos
, otr_token
,
754 inserted
, matched_vtables
,
755 type
->anonymous_namespace
);
756 type_binfos
.release ();
757 for (i
= 0; i
< type
->derived_types
.length (); i
++)
758 possible_polymorphic_call_targets_1 (nodes
, inserted
,
761 type
->derived_types
[i
],
762 otr_token
, outer_type
, offset
);
765 /* Cache of queries for polymorphic call targets.
767 Enumerating all call targets may get expensive when there are many
768 polymorphic calls in the program, so we memoize all the previous
769 queries and avoid duplicated work. */
771 struct polymorphic_call_target_d
773 HOST_WIDE_INT otr_token
;
774 ipa_polymorphic_call_context context
;
776 vec
<cgraph_node
*> targets
;
780 /* Polymorphic call target cache helpers. */
782 struct polymorphic_call_target_hasher
784 typedef polymorphic_call_target_d value_type
;
785 typedef polymorphic_call_target_d compare_type
;
786 static inline hashval_t
hash (const value_type
*);
787 static inline bool equal (const value_type
*, const compare_type
*);
788 static inline void remove (value_type
*);
791 /* Return the computed hashcode for ODR_QUERY. */
794 polymorphic_call_target_hasher::hash (const value_type
*odr_query
)
798 hash
= iterative_hash_host_wide_int
799 (odr_query
->otr_token
,
800 odr_query
->type
->id
);
801 hash
= iterative_hash_hashval_t (TYPE_UID (odr_query
->context
.outer_type
),
803 hash
= iterative_hash_host_wide_int (odr_query
->context
.offset
, hash
);
804 return iterative_hash_hashval_t
805 (((int)odr_query
->context
.maybe_in_construction
<< 1)
806 | (int)odr_query
->context
.maybe_derived_type
, hash
);
809 /* Compare cache entries T1 and T2. */
812 polymorphic_call_target_hasher::equal (const value_type
*t1
,
813 const compare_type
*t2
)
815 return (t1
->type
== t2
->type
&& t1
->otr_token
== t2
->otr_token
816 && t1
->context
.offset
== t2
->context
.offset
817 && t1
->context
.outer_type
== t2
->context
.outer_type
818 && t1
->context
.maybe_in_construction
819 == t2
->context
.maybe_in_construction
820 && t1
->context
.maybe_derived_type
== t2
->context
.maybe_derived_type
);
823 /* Remove entry in polymorphic call target cache hash. */
826 polymorphic_call_target_hasher::remove (value_type
*v
)
828 v
->targets
.release ();
832 /* Polymorphic call target query cache. */
834 typedef hash_table
<polymorphic_call_target_hasher
>
835 polymorphic_call_target_hash_type
;
836 static polymorphic_call_target_hash_type polymorphic_call_target_hash
;
838 /* Destroy polymorphic call target query cache. */
841 free_polymorphic_call_targets_hash ()
843 if (cached_polymorphic_call_targets
)
845 polymorphic_call_target_hash
.dispose ();
846 pointer_set_destroy (cached_polymorphic_call_targets
);
847 cached_polymorphic_call_targets
= NULL
;
851 /* When virtual function is removed, we may need to flush the cache. */
854 devirt_node_removal_hook (struct cgraph_node
*n
, void *d ATTRIBUTE_UNUSED
)
856 if (cached_polymorphic_call_targets
857 && pointer_set_contains (cached_polymorphic_call_targets
, n
))
858 free_polymorphic_call_targets_hash ();
861 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
862 is contained at CONTEXT->OFFSET. Walk the memory representation of
863 CONTEXT->OUTER_TYPE and find the outermost class type that match
864 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
867 For example when CONTEXT represents type
873 and we look for type at offset sizeof(int), we end up with B and offset 0.
874 If the same is produced by multiple inheritance, we end up with A and offset
877 If we can not find corresponding class, give up by setting
878 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
879 Return true when lookup was sucesful. */
882 get_class_context (ipa_polymorphic_call_context
*context
,
885 tree type
= context
->outer_type
;
886 HOST_WIDE_INT offset
= context
->offset
;
888 /* Find the sub-object the constant actually refers to and mark whether it is
889 an artificial one (as opposed to a user-defined one). */
892 HOST_WIDE_INT pos
, size
;
895 /* On a match, just return what we found. */
896 if (TREE_CODE (type
) == TREE_CODE (expected_type
)
897 && types_same_for_odr (type
, expected_type
))
899 /* Type can not contain itself on an non-zero offset. In that case
903 gcc_assert (offset
== 0);
907 /* Walk fields and find corresponding on at OFFSET. */
908 if (TREE_CODE (type
) == RECORD_TYPE
)
910 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
912 if (TREE_CODE (fld
) != FIELD_DECL
)
915 pos
= int_bit_position (fld
);
916 size
= tree_to_uhwi (DECL_SIZE (fld
));
917 if (pos
<= offset
&& (pos
+ size
) > offset
)
924 type
= TREE_TYPE (fld
);
926 /* DECL_ARTIFICIAL represents a basetype. */
927 if (!DECL_ARTIFICIAL (fld
))
929 context
->outer_type
= type
;
930 context
->offset
= offset
;
931 /* As soon as we se an field containing the type,
932 we know we are not looking for derivations. */
933 context
->maybe_derived_type
= false;
936 else if (TREE_CODE (type
) == ARRAY_TYPE
)
938 tree subtype
= TREE_TYPE (type
);
940 /* Give up if we don't know array size. */
941 if (!tree_fits_shwi_p (TYPE_SIZE (subtype
))
942 || !tree_to_shwi (TYPE_SIZE (subtype
)) <= 0)
944 offset
= offset
% tree_to_shwi (TYPE_SIZE (subtype
));
946 context
->outer_type
= type
;
947 context
->offset
= offset
;
948 context
->maybe_derived_type
= false;
950 /* Give up on anything else. */
955 /* If we failed to find subtype we look for, give up and fall back to the
956 most generic query. */
958 context
->outer_type
= expected_type
;
960 context
->maybe_derived_type
= true;
964 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
967 contains_type_p (tree outer_type
, HOST_WIDE_INT offset
,
970 ipa_polymorphic_call_context context
= {offset
, outer_type
,
972 return get_class_context (&context
, otr_type
);
975 /* Given REF call in FNDECL, determine class of the polymorphic
976 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
977 Return pointer to object described by the context */
980 get_polymorphic_call_info (tree fndecl
,
983 HOST_WIDE_INT
*otr_token
,
984 ipa_polymorphic_call_context
*context
)
987 *otr_type
= obj_type_ref_class (ref
);
988 *otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref
));
990 /* Set up basic info in case we find nothing interesting in the analysis. */
991 context
->outer_type
= *otr_type
;
993 base_pointer
= OBJ_TYPE_REF_OBJECT (ref
);
994 context
->maybe_derived_type
= true;
995 context
->maybe_in_construction
= false;
997 /* Walk SSA for outer object. */
1000 if (TREE_CODE (base_pointer
) == SSA_NAME
1001 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
1002 && SSA_NAME_DEF_STMT (base_pointer
)
1003 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer
)))
1005 base_pointer
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer
));
1006 STRIP_NOPS (base_pointer
);
1008 else if (TREE_CODE (base_pointer
) == ADDR_EXPR
)
1010 HOST_WIDE_INT size
, max_size
;
1011 HOST_WIDE_INT offset2
;
1012 tree base
= get_ref_base_and_extent (TREE_OPERAND (base_pointer
, 0),
1013 &offset2
, &size
, &max_size
);
1015 /* If this is a varying address, punt. */
1016 if ((TREE_CODE (base
) == MEM_REF
|| DECL_P (base
))
1018 && max_size
== size
)
1020 /* We found dereference of a pointer. Type of the pointer
1021 and MEM_REF is meaningless, but we can look futher. */
1022 if (TREE_CODE (base
) == MEM_REF
)
1024 base_pointer
= TREE_OPERAND (base
, 0);
1026 += offset2
+ mem_ref_offset (base
).low
* BITS_PER_UNIT
;
1027 context
->outer_type
= NULL
;
1029 /* We found base object. In this case the outer_type
1031 else if (DECL_P (base
))
1033 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base
)));
1035 /* Only type inconsistent programs can have otr_type that is
1036 not part of outer type. */
1037 if (!contains_type_p (TREE_TYPE (base
),
1038 context
->offset
+ offset2
, *otr_type
))
1039 return base_pointer
;
1040 context
->outer_type
= TREE_TYPE (base
);
1041 context
->offset
+= offset2
;
1042 /* Make very conservative assumption that all objects
1043 may be in construction.
1044 TODO: ipa-prop already contains code to tell better.
1046 context
->maybe_in_construction
= true;
1047 context
->maybe_derived_type
= false;
1056 else if (TREE_CODE (base_pointer
) == POINTER_PLUS_EXPR
1057 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer
, 1)))
1059 context
->offset
+= tree_to_shwi (TREE_OPERAND (base_pointer
, 1))
1061 base_pointer
= TREE_OPERAND (base_pointer
, 0);
1068 /* Try to determine type of the outer object. */
1069 if (TREE_CODE (base_pointer
) == SSA_NAME
1070 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
1071 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) == PARM_DECL
)
1073 /* See if parameter is THIS pointer of a method. */
1074 if (TREE_CODE (TREE_TYPE (fndecl
)) == METHOD_TYPE
1075 && SSA_NAME_VAR (base_pointer
) == DECL_ARGUMENTS (fndecl
))
1077 context
->outer_type
= TREE_TYPE (TREE_TYPE (base_pointer
));
1078 gcc_assert (TREE_CODE (context
->outer_type
) == RECORD_TYPE
);
1080 /* Dynamic casting has possibly upcasted the type
1081 in the hiearchy. In this case outer type is less
1082 informative than inner type and we should forget
1084 if (!contains_type_p (context
->outer_type
, context
->offset
,
1087 context
->outer_type
= NULL
;
1088 return base_pointer
;
1091 /* If the function is constructor or destructor, then
1092 the type is possibly in consturction, but we know
1093 it is not derived type. */
1094 if (DECL_CXX_CONSTRUCTOR_P (fndecl
)
1095 || DECL_CXX_DESTRUCTOR_P (fndecl
))
1097 context
->maybe_in_construction
= true;
1098 context
->maybe_derived_type
= false;
1102 context
->maybe_derived_type
= true;
1103 context
->maybe_in_construction
= false;
1105 return base_pointer
;
1107 /* Non-PODs passed by value are really passed by invisible
1108 reference. In this case we also know the type of the
1110 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer
)))
1112 context
->outer_type
= TREE_TYPE (TREE_TYPE (base_pointer
));
1113 gcc_assert (!POINTER_TYPE_P (context
->outer_type
));
1114 /* Only type inconsistent programs can have otr_type that is
1115 not part of outer type. */
1116 if (!contains_type_p (context
->outer_type
, context
->offset
,
1119 context
->outer_type
= NULL
;
1121 return base_pointer
;
1123 context
->maybe_derived_type
= false;
1124 context
->maybe_in_construction
= false;
1125 return base_pointer
;
1128 /* TODO: There are multiple ways to derive a type. For instance
1129 if BASE_POINTER is passed to an constructor call prior our refernece.
1130 We do not make this type of flow sensitive analysis yet. */
1131 return base_pointer
;
1134 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1135 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1136 and insert them to NODES.
1138 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1141 record_targets_from_bases (tree otr_type
,
1142 HOST_WIDE_INT otr_token
,
1144 HOST_WIDE_INT offset
,
1145 vec
<cgraph_node
*> nodes
,
1146 pointer_set_t
*inserted
,
1147 pointer_set_t
*matched_vtables
,
1152 HOST_WIDE_INT pos
, size
;
1156 if (types_same_for_odr (outer_type
, otr_type
))
1159 for (fld
= TYPE_FIELDS (outer_type
); fld
; fld
= DECL_CHAIN (fld
))
1161 if (TREE_CODE (fld
) != FIELD_DECL
)
1164 pos
= int_bit_position (fld
);
1165 size
= tree_to_shwi (DECL_SIZE (fld
));
1166 if (pos
<= offset
&& (pos
+ size
) > offset
)
1169 /* Within a class type we should always find correcponding fields. */
1170 gcc_assert (fld
&& TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
);
1172 /* Nonbasetypes should have been stripped by outer_class_type. */
1173 gcc_assert (DECL_ARTIFICIAL (fld
));
1175 outer_type
= TREE_TYPE (fld
);
1178 base_binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
),
1180 gcc_assert (base_binfo
);
1181 if (!pointer_set_insert (matched_vtables
, BINFO_VTABLE (base_binfo
)))
1183 tree target
= gimple_get_virt_method_for_binfo (otr_token
, base_binfo
);
1185 maybe_record_node (nodes
, target
, inserted
, completep
);
1186 /* The only way method in anonymous namespace can become unreferable
1187 is that it has been fully optimized out. */
1188 else if (flag_ltrans
|| !type_in_anonymous_namespace_p (outer_type
))
1190 pointer_set_insert (matched_vtables
, BINFO_VTABLE (base_binfo
));
1195 /* When virtual table is removed, we may need to flush the cache. */
1198 devirt_variable_node_removal_hook (varpool_node
*n
,
1199 void *d ATTRIBUTE_UNUSED
)
1201 if (cached_polymorphic_call_targets
1202 && DECL_VIRTUAL_P (n
->decl
)
1203 && type_in_anonymous_namespace_p (DECL_CONTEXT (n
->decl
)))
1204 free_polymorphic_call_targets_hash ();
1207 /* Return vector containing possible targets of polymorphic call of type
1208 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1209 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1210 OTR_TYPE and include their virtual method. This is useful for types
1211 possibly in construction or destruction where the virtual table may
1212 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1213 us to walk the inheritance graph for all derivations.
1215 If COMPLETEP is non-NULL, store true if the list is complette.
1216 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1217 in the target cache. If user needs to visit every target list
1218 just once, it can memoize them.
1220 Returned vector is placed into cache. It is NOT caller's responsibility
1221 to free it. The vector can be freed on cgraph_remove_node call if
1222 the particular node is a virtual function present in the cache. */
1225 possible_polymorphic_call_targets (tree otr_type
,
1226 HOST_WIDE_INT otr_token
,
1227 ipa_polymorphic_call_context context
,
1231 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
1232 pointer_set_t
*inserted
;
1233 pointer_set_t
*matched_vtables
;
1234 vec
<cgraph_node
*> nodes
=vNULL
;
1235 odr_type type
, outer_type
;
1236 polymorphic_call_target_d key
;
1237 polymorphic_call_target_d
**slot
;
1242 type
= get_odr_type (otr_type
, true);
1244 /* Lookup the outer class type we want to walk. */
1245 if (context
.outer_type
)
1246 get_class_context (&context
, otr_type
);
1248 /* We now canonicalize our query, so we do not need extra hashtable entries. */
1250 /* Without outer type, we have no use for offset. Just do the
1251 basic search from innter type */
1252 if (!context
.outer_type
)
1254 context
.outer_type
= otr_type
;
1257 /* We need to update our hiearchy if the type does not exist. */
1258 outer_type
= get_odr_type (context
.outer_type
, true);
1259 /* If outer and inner type match, there are no bases to see. */
1260 if (type
== outer_type
)
1261 context
.maybe_in_construction
= false;
1262 /* If the type is final, there are no derivations. */
1263 if (TYPE_FINAL_P (outer_type
->type
))
1264 context
.maybe_derived_type
= false;
1266 /* Initialize query cache. */
1267 if (!cached_polymorphic_call_targets
)
1269 cached_polymorphic_call_targets
= pointer_set_create ();
1270 polymorphic_call_target_hash
.create (23);
1271 if (!node_removal_hook_holder
)
1273 node_removal_hook_holder
=
1274 cgraph_add_node_removal_hook (&devirt_node_removal_hook
, NULL
);
1275 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook
,
1280 /* Lookup cached answer. */
1282 key
.otr_token
= otr_token
;
1283 key
.context
= context
;
1284 slot
= polymorphic_call_target_hash
.find_slot (&key
, INSERT
);
1286 *cache_token
= (void *)*slot
;
1290 *completep
= (*slot
)->final
;
1291 return (*slot
)->targets
;
1296 /* Do actual search. */
1297 timevar_push (TV_IPA_VIRTUAL_CALL
);
1298 *slot
= XCNEW (polymorphic_call_target_d
);
1300 *cache_token
= (void *)*slot
;
1301 (*slot
)->type
= type
;
1302 (*slot
)->otr_token
= otr_token
;
1303 (*slot
)->context
= context
;
1305 inserted
= pointer_set_create ();
1306 matched_vtables
= pointer_set_create ();
1308 /* First see virtual method of type itself. */
1310 binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
->type
),
1311 context
.offset
, otr_type
);
1312 target
= gimple_get_virt_method_for_binfo (otr_token
, binfo
);
1315 maybe_record_node (nodes
, target
, inserted
, &final
);
1317 /* In the case we get final method, we don't need
1318 to walk derivations. */
1319 if (DECL_FINAL_P (target
))
1320 context
.maybe_derived_type
= false;
1322 /* The only way method in anonymous namespace can become unreferable
1323 is that it has been fully optimized out. */
1324 else if (flag_ltrans
|| !type
->anonymous_namespace
)
1326 pointer_set_insert (matched_vtables
, BINFO_VTABLE (binfo
));
1328 /* Next walk bases, if asked to. */
1329 if (context
.maybe_in_construction
)
1330 record_targets_from_bases (otr_type
, otr_token
, outer_type
->type
,
1331 context
.offset
, nodes
, inserted
,
1332 matched_vtables
, &final
);
1334 /* Finally walk recursively all derived types. */
1335 if (context
.maybe_derived_type
)
1337 /* For anonymous namespace types we can attempt to build full type.
1338 All derivations must be in this unit (unless we see partial unit). */
1339 if (!type
->anonymous_namespace
|| flag_ltrans
)
1341 for (i
= 0; i
< outer_type
->derived_types
.length(); i
++)
1342 possible_polymorphic_call_targets_1 (nodes
, inserted
,
1344 otr_type
, outer_type
->derived_types
[i
],
1345 otr_token
, outer_type
->type
,
1348 (*slot
)->targets
= nodes
;
1349 (*slot
)->final
= final
;
1353 pointer_set_destroy (inserted
);
1354 pointer_set_destroy (matched_vtables
);
1355 timevar_pop (TV_IPA_VIRTUAL_CALL
);
1359 /* Dump all possible targets of a polymorphic call. */
1362 dump_possible_polymorphic_call_targets (FILE *f
,
1364 HOST_WIDE_INT otr_token
,
1365 const ipa_polymorphic_call_context
&ctx
)
1367 vec
<cgraph_node
*> targets
;
1369 odr_type type
= get_odr_type (otr_type
, false);
1374 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
,
1377 fprintf (f
, " Targets of polymorphic call of type %i:", type
->id
);
1378 print_generic_expr (f
, type
->type
, TDF_SLIM
);
1379 fprintf (f
, " token %i\n"
1380 " Contained in type:",
1382 print_generic_expr (f
, ctx
.outer_type
, TDF_SLIM
);
1383 fprintf (f
, " at offset "HOST_WIDE_INT_PRINT_DEC
"\n"
1386 final
? "This is full list." :
1387 "This is partial list; extra targets may be defined in other units.",
1388 ctx
.maybe_in_construction
? " (base types included)" : "",
1389 ctx
.maybe_derived_type
? " (derived types included)" : "");
1390 for (i
= 0; i
< targets
.length (); i
++)
1391 fprintf (f
, " %s/%i", targets
[i
]->name (),
1393 fprintf (f
, "\n\n");
1397 /* Return true if N can be possibly target of a polymorphic call of
1398 OTR_TYPE/OTR_TOKEN. */
1401 possible_polymorphic_call_target_p (tree otr_type
,
1402 HOST_WIDE_INT otr_token
,
1403 const ipa_polymorphic_call_context
&ctx
,
1404 struct cgraph_node
*n
)
1406 vec
<cgraph_node
*> targets
;
1408 enum built_in_function fcode
;
1411 if (TREE_CODE (TREE_TYPE (n
->decl
)) == FUNCTION_TYPE
1412 && ((fcode
= DECL_FUNCTION_CODE (n
->decl
))
1413 == BUILT_IN_UNREACHABLE
1414 || fcode
== BUILT_IN_TRAP
))
1417 if (!odr_hash
.is_created ())
1419 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
, ctx
, &final
);
1420 for (i
= 0; i
< targets
.length (); i
++)
1421 if (symtab_semantically_equivalent_p (n
, targets
[i
]))
1424 /* At a moment we allow middle end to dig out new external declarations
1425 as a targets of polymorphic calls. */
1426 if (!final
&& !n
->definition
)
1432 /* After callgraph construction new external nodes may appear.
1433 Add them into the graph. */
1436 update_type_inheritance_graph (void)
1438 struct cgraph_node
*n
;
1440 if (!odr_hash
.is_created ())
1442 free_polymorphic_call_targets_hash ();
1443 timevar_push (TV_IPA_INHERITANCE
);
1444 /* We reconstruct the graph starting from types of all methods seen in the
1446 FOR_EACH_FUNCTION (n
)
1447 if (DECL_VIRTUAL_P (n
->decl
)
1449 && symtab_real_symbol_p (n
))
1450 get_odr_type (method_class_type (TREE_TYPE (n
->decl
)), true);
1451 timevar_pop (TV_IPA_INHERITANCE
);
1455 /* Return true if N looks like likely target of a polymorphic call.
1456 Rule out cxa_pure_virtual, noreturns, function declared cold and
1457 other obvious cases. */
1460 likely_target_p (struct cgraph_node
*n
)
1463 /* cxa_pure_virtual and similar things are not likely. */
1464 if (TREE_CODE (TREE_TYPE (n
->decl
)) != METHOD_TYPE
)
1466 flags
= flags_from_decl_or_type (n
->decl
);
1467 if (flags
& ECF_NORETURN
)
1469 if (lookup_attribute ("cold",
1470 DECL_ATTRIBUTES (n
->decl
)))
1472 if (n
->frequency
< NODE_FREQUENCY_NORMAL
)
1477 /* The ipa-devirt pass.
1478 When polymorphic call has only one likely target in the unit,
1479 turn it into speculative call. */
1484 struct cgraph_node
*n
;
1485 struct pointer_set_t
*bad_call_targets
= pointer_set_create ();
1486 struct cgraph_edge
*e
;
1488 int npolymorphic
= 0, nspeculated
= 0, nconverted
= 0, ncold
= 0;
1489 int nmultiple
= 0, noverwritable
= 0, ndevirtualized
= 0, nnotdefined
= 0;
1490 int nwrong
= 0, nok
= 0, nexternal
= 0;;
1492 FOR_EACH_DEFINED_FUNCTION (n
)
1494 bool update
= false;
1495 if (dump_file
&& n
->indirect_calls
)
1496 fprintf (dump_file
, "\n\nProcesing function %s/%i\n",
1497 n
->name (), n
->order
);
1498 for (e
= n
->indirect_calls
; e
; e
= e
->next_callee
)
1499 if (e
->indirect_info
->polymorphic
)
1501 struct cgraph_node
*likely_target
= NULL
;
1504 vec
<cgraph_node
*>targets
1505 = possible_polymorphic_call_targets
1506 (e
, &final
, &cache_token
);
1510 dump_possible_polymorphic_call_targets
1515 if (!cgraph_maybe_hot_edge_p (e
))
1518 fprintf (dump_file
, "Call is cold\n");
1525 fprintf (dump_file
, "Call is aready speculated\n");
1528 /* When dumping see if we agree with speculation. */
1532 if (pointer_set_contains (bad_call_targets
,
1536 fprintf (dump_file
, "Target list is known to be useless\n");
1540 for (i
= 0; i
< targets
.length (); i
++)
1541 if (likely_target_p (targets
[i
]))
1545 likely_target
= NULL
;
1547 fprintf (dump_file
, "More than one likely target\n");
1551 likely_target
= targets
[i
];
1555 pointer_set_insert (bad_call_targets
, cache_token
);
1558 /* This is reached only when dumping; check if we agree or disagree
1559 with the speculation. */
1562 struct cgraph_edge
*e2
;
1563 struct ipa_ref
*ref
;
1564 cgraph_speculative_call_info (e
, e2
, e
, ref
);
1565 if (cgraph_function_or_thunk_node (e2
->callee
, NULL
)
1566 == cgraph_function_or_thunk_node (likely_target
, NULL
))
1568 fprintf (dump_file
, "We agree with speculation\n");
1573 fprintf (dump_file
, "We disagree with speculation\n");
1578 if (!likely_target
->definition
)
1581 fprintf (dump_file
, "Target is not an definition\n");
1585 /* Do not introduce new references to external symbols. While we
1586 can handle these just well, it is common for programs to
1587 incorrectly with headers defining methods they are linked
1589 if (DECL_EXTERNAL (likely_target
->decl
))
1592 fprintf (dump_file
, "Target is external\n");
1596 if (cgraph_function_body_availability (likely_target
)
1597 <= AVAIL_OVERWRITABLE
1598 && symtab_can_be_discarded (likely_target
))
1601 fprintf (dump_file
, "Target is overwritable\n");
1609 "Speculatively devirtualizing call in %s/%i to %s/%i\n",
1610 n
->name (), n
->order
,
1611 likely_target
->name (),
1612 likely_target
->order
);
1613 if (!symtab_can_be_discarded (likely_target
))
1616 alias
= cgraph (symtab_nonoverwritable_alias
1619 likely_target
= alias
;
1623 cgraph_turn_edge_to_speculative
1624 (e
, likely_target
, e
->count
* 8 / 10, e
->frequency
* 8 / 10);
1628 inline_update_overall_summary (n
);
1630 pointer_set_destroy (bad_call_targets
);
1634 "%i polymorphic calls, %i devirtualized,"
1635 " %i speculatively devirtualized, %i cold\n"
1636 "%i have multiple targets, %i overwritable,"
1637 " %i already speculated (%i agree, %i disagree),"
1638 " %i external, %i not defined\n",
1639 npolymorphic
, ndevirtualized
, nconverted
, ncold
,
1640 nmultiple
, noverwritable
, nspeculated
, nok
, nwrong
,
1641 nexternal
, nnotdefined
);
1642 return ndevirtualized
? TODO_remove_functions
: 0;
1645 /* Gate for speculative IPA devirtualization optimization. */
1648 gate_ipa_devirt (void)
1650 return (flag_devirtualize
1651 && flag_devirtualize_speculatively
1657 const pass_data pass_data_ipa_devirt
=
1659 IPA_PASS
, /* type */
1660 "devirt", /* name */
1661 OPTGROUP_NONE
, /* optinfo_flags */
1662 true, /* has_gate */
1663 true, /* has_execute */
1664 TV_IPA_DEVIRT
, /* tv_id */
1665 0, /* properties_required */
1666 0, /* properties_provided */
1667 0, /* properties_destroyed */
1668 0, /* todo_flags_start */
1669 ( TODO_dump_symtab
), /* todo_flags_finish */
1672 class pass_ipa_devirt
: public ipa_opt_pass_d
1675 pass_ipa_devirt (gcc::context
*ctxt
)
1676 : ipa_opt_pass_d (pass_data_ipa_devirt
, ctxt
,
1677 NULL
, /* generate_summary */
1678 NULL
, /* write_summary */
1679 NULL
, /* read_summary */
1680 NULL
, /* write_optimization_summary */
1681 NULL
, /* read_optimization_summary */
1682 NULL
, /* stmt_fixup */
1683 0, /* function_transform_todo_flags_start */
1684 NULL
, /* function_transform */
1685 NULL
) /* variable_transform */
1688 /* opt_pass methods: */
1689 bool gate () { return gate_ipa_devirt (); }
1690 unsigned int execute () { return ipa_devirt (); }
1692 }; // class pass_ipa_devirt
1697 make_pass_ipa_devirt (gcc::context
*ctxt
)
1699 return new pass_ipa_devirt (ctxt
);
1702 #include "gt-ipa-devirt.h"