1 /* Basic IPA utilities for type inheritance graph construction and
3 Copyright (C) 2013 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
84 build_type_inheritance_graph triggers a construction of the type inheritance
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
110 #include "coretypes.h"
113 #include "print-tree.h"
117 #include "tree-pass.h"
119 #include "pointer-set.h"
121 #include "hash-table.h"
122 #include "tree-pretty-print.h"
123 #include "ipa-utils.h"
125 #include "ipa-inline.h"
126 #include "diagnostic.h"
127 #include "tree-dfa.h"
129 /* Dummy polymorphic call context. */
131 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
132 = {0, NULL
, false, true};
134 /* Pointer set of all call targets appearing in the cache. */
135 static pointer_set_t
*cached_polymorphic_call_targets
;
137 /* The node of type inheritance graph. For each type unique in
138 One Defintion Rule (ODR) sense, we produce one node linking all
139 main variants of types equivalent to it, bases and derived types. */
141 struct GTY(()) odr_type_d
146 vec
<odr_type
> GTY((skip
)) bases
;
147 /* All derrived types with virtual methods seen in unit. */
148 vec
<odr_type
> GTY((skip
)) derived_types
;
150 /* All equivalent types, if more than one. */
151 vec
<tree
, va_gc
> *types
;
152 /* Set of all equivalent types, if NON-NULL. */
153 pointer_set_t
* GTY((skip
)) types_set
;
155 /* Unique ID indexing the type in odr_types array. */
157 /* Is it in anonymous namespace? */
158 bool anonymous_namespace
;
162 /* Return true if BINFO corresponds to a type with virtual methods.
164 Every type has several BINFOs. One is the BINFO associated by the type
165 while other represents bases of derived types. The BINFOs representing
166 bases do not have BINFO_VTABLE pointer set when this is the single
167 inheritance (because vtables are shared). Look up the BINFO of type
168 and check presence of its vtable. */
171 polymorphic_type_binfo_p (tree binfo
)
173 /* See if BINFO's type has an virtual table associtated with it. */
174 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo
)));
177 /* One Definition Rule hashtable helpers. */
181 typedef odr_type_d value_type
;
182 typedef union tree_node compare_type
;
183 static inline hashval_t
hash (const value_type
*);
184 static inline bool equal (const value_type
*, const compare_type
*);
185 static inline void remove (value_type
*);
188 /* Produce hash based on type name. */
191 hash_type_name (tree t
)
193 gcc_checking_assert (TYPE_MAIN_VARIANT (t
) == t
);
195 /* If not in LTO, all main variants are unique, so we can do
198 return htab_hash_pointer (t
);
200 /* Anonymous types are unique. */
201 if (type_in_anonymous_namespace_p (t
))
202 return htab_hash_pointer (t
);
204 /* For polymorphic types, we can simply hash the virtual table. */
205 if (TYPE_BINFO (t
) && BINFO_VTABLE (TYPE_BINFO (t
)))
207 tree v
= BINFO_VTABLE (TYPE_BINFO (t
));
210 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
212 hash
= TREE_INT_CST_LOW (TREE_OPERAND (v
, 1));
213 v
= TREE_OPERAND (TREE_OPERAND (v
, 0), 0);
216 v
= DECL_ASSEMBLER_NAME (v
);
217 hash
= iterative_hash_hashval_t (hash
, htab_hash_pointer (v
));
221 /* Rest is not implemented yet. */
225 /* Return the computed hashcode for ODR_TYPE. */
228 odr_hasher::hash (const value_type
*odr_type
)
230 return hash_type_name (odr_type
->type
);
233 /* Compare types T1 and T2 and return true if they are
237 odr_hasher::equal (const value_type
*t1
, const compare_type
*ct2
)
239 tree t2
= const_cast <tree
> (ct2
);
241 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2
) == ct2
);
246 return types_same_for_odr (t1
->type
, t2
);
249 /* Free ODR type V. */
252 odr_hasher::remove (value_type
*v
)
255 v
->derived_types
.release ();
257 pointer_set_destroy (v
->types_set
);
261 /* ODR type hash used to lookup ODR type based on tree type node. */
263 typedef hash_table
<odr_hasher
> odr_hash_type
;
264 static odr_hash_type odr_hash
;
266 /* ODR types are also stored into ODR_TYPE vector to allow consistent
267 walking. Bases appear before derived types. Vector is garbage collected
268 so we won't end up visiting empty types. */
270 static GTY(()) vec
<odr_type
, va_gc
> *odr_types_ptr
;
271 #define odr_types (*odr_types_ptr)
273 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
274 from VAL->type. This may happen in LTO where tree merging did not merge
275 all variants of the same type. It may or may not mean the ODR violation.
276 Add it to the list of duplicates and warn on some violations. */
279 add_type_duplicate (odr_type val
, tree type
)
282 val
->types_set
= pointer_set_create ();
284 /* See if this duplicate is new. */
285 if (!pointer_set_insert (val
->types_set
, type
))
288 bool base_mismatch
= false;
289 gcc_assert (in_lto_p
);
290 vec_safe_push (val
->types
, type
);
293 /* First we compare memory layout. */
294 if (!types_compatible_p (val
->type
, type
))
297 if (BINFO_VTABLE (TYPE_BINFO (val
->type
))
298 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type
)), 0,
299 "type %qD violates one definition rule ",
301 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val
->type
)),
302 "a type with the same name but different layout is "
303 "defined in another translation unit");
304 if (cgraph_dump_file
)
306 fprintf (cgraph_dump_file
, "ODR violation or merging or ODR type bug?\n");
308 print_node (cgraph_dump_file
, "", val
->type
, 0);
309 putc ('\n',cgraph_dump_file
);
310 print_node (cgraph_dump_file
, "", type
, 0);
311 putc ('\n',cgraph_dump_file
);
315 /* Next sanity check that bases are the same. If not, we will end
316 up producing wrong answers. */
317 for (j
= 0, i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
318 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type
), i
)))
320 odr_type base
= get_odr_type
322 (BINFO_BASE_BINFO (TYPE_BINFO (type
),
325 if (val
->bases
.length () <= j
|| val
->bases
[j
] != base
)
326 base_mismatch
= true;
333 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type
)), 0,
334 "type %qD violates one definition rule ",
336 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val
->type
)),
337 "a type with the same name but different bases is "
338 "defined in another translation unit");
339 if (cgraph_dump_file
)
341 fprintf (cgraph_dump_file
, "ODR bse violation or merging bug?\n");
343 print_node (cgraph_dump_file
, "", val
->type
, 0);
344 putc ('\n',cgraph_dump_file
);
345 print_node (cgraph_dump_file
, "", type
, 0);
346 putc ('\n',cgraph_dump_file
);
350 /* Regularize things a little. During LTO same types may come with
351 different BINFOs. Either because their virtual table was
352 not merged by tree merging and only later at decl merging or
353 because one type comes with external vtable, while other
354 with internal. We want to merge equivalent binfos to conserve
355 memory and streaming overhead.
357 The external vtables are more harmful: they contain references
358 to external declarations of methods that may be defined in the
359 merged LTO unit. For this reason we absolutely need to remove
360 them and replace by internal variants. Not doing so will lead
361 to incomplete answers from possible_polymorphic_call_targets. */
362 if (!flag_ltrans
&& merge
)
364 tree master_binfo
= TYPE_BINFO (val
->type
);
365 tree v1
= BINFO_VTABLE (master_binfo
);
366 tree v2
= BINFO_VTABLE (TYPE_BINFO (type
));
368 if (TREE_CODE (v1
) == POINTER_PLUS_EXPR
)
370 gcc_assert (TREE_CODE (v2
) == POINTER_PLUS_EXPR
371 && operand_equal_p (TREE_OPERAND (v1
, 1),
372 TREE_OPERAND (v2
, 1), 0));
373 v1
= TREE_OPERAND (TREE_OPERAND (v1
, 0), 0);
374 v2
= TREE_OPERAND (TREE_OPERAND (v2
, 0), 0);
376 gcc_assert (DECL_ASSEMBLER_NAME (v1
)
377 == DECL_ASSEMBLER_NAME (v2
));
379 if (DECL_EXTERNAL (v1
) && !DECL_EXTERNAL (v2
))
383 TYPE_BINFO (val
->type
) = TYPE_BINFO (type
);
384 for (i
= 0; i
< val
->types
->length (); i
++)
386 if (TYPE_BINFO ((*val
->types
)[i
])
388 TYPE_BINFO ((*val
->types
)[i
]) = TYPE_BINFO (type
);
392 TYPE_BINFO (type
) = master_binfo
;
397 /* Get ODR type hash entry for TYPE. If INSERT is true, create
398 possibly new entry. */
401 get_odr_type (tree type
, bool insert
)
407 type
= TYPE_MAIN_VARIANT (type
);
408 gcc_checking_assert (TYPE_MAIN_VARIANT (type
) == type
);
409 hash
= hash_type_name (type
);
410 slot
= odr_hash
.find_slot_with_hash (type
, hash
, insert
? INSERT
: NO_INSERT
);
414 /* See if we already have entry for type. */
419 /* With LTO we need to support multiple tree representation of
420 the same ODR type. */
421 if (val
->type
!= type
)
422 add_type_duplicate (val
, type
);
426 tree binfo
= TYPE_BINFO (type
);
429 val
= ggc_alloc_cleared_odr_type_d ();
432 val
->derived_types
= vNULL
;
433 val
->anonymous_namespace
= type_in_anonymous_namespace_p (type
);
435 for (i
= 0; i
< BINFO_N_BASE_BINFOS (binfo
); i
++)
436 /* For now record only polymorphic types. other are
437 pointless for devirtualization and we can not precisely
438 determine ODR equivalency of these during LTO. */
439 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo
, i
)))
441 odr_type base
= get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo
,
444 base
->derived_types
.safe_push (val
);
445 val
->bases
.safe_push (base
);
447 /* First record bases, then add into array so ids are increasing. */
449 val
->id
= odr_types
.length ();
450 vec_safe_push (odr_types_ptr
, val
);
455 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
456 recusive printing. */
459 dump_odr_type (FILE *f
, odr_type t
, int indent
=0)
462 fprintf (f
, "%*s type %i: ", indent
* 2, "", t
->id
);
463 print_generic_expr (f
, t
->type
, TDF_SLIM
);
464 fprintf (f
, "%s\n", t
->anonymous_namespace
? " (anonymous namespace)":"");
465 if (TYPE_NAME (t
->type
))
467 fprintf (f
, "%*s defined at: %s:%i\n", indent
* 2, "",
468 DECL_SOURCE_FILE (TYPE_NAME (t
->type
)),
469 DECL_SOURCE_LINE (TYPE_NAME (t
->type
)));
471 if (t
->bases
.length ())
473 fprintf (f
, "%*s base odr type ids: ", indent
* 2, "");
474 for (i
= 0; i
< t
->bases
.length (); i
++)
475 fprintf (f
, " %i", t
->bases
[i
]->id
);
478 if (t
->derived_types
.length ())
480 fprintf (f
, "%*s derived types:\n", indent
* 2, "");
481 for (i
= 0; i
< t
->derived_types
.length (); i
++)
482 dump_odr_type (f
, t
->derived_types
[i
], indent
+ 1);
487 /* Dump the type inheritance graph. */
490 dump_type_inheritance_graph (FILE *f
)
495 fprintf (f
, "\n\nType inheritance graph:\n");
496 for (i
= 0; i
< odr_types
.length (); i
++)
498 if (odr_types
[i
]->bases
.length () == 0)
499 dump_odr_type (f
, odr_types
[i
]);
501 for (i
= 0; i
< odr_types
.length (); i
++)
503 if (odr_types
[i
]->types
&& odr_types
[i
]->types
->length ())
506 fprintf (f
, "Duplicate tree types for odr type %i\n", i
);
507 print_node (f
, "", odr_types
[i
]->type
, 0);
508 for (j
= 0; j
< odr_types
[i
]->types
->length (); j
++)
511 fprintf (f
, "duplicate #%i\n", j
);
512 print_node (f
, "", (*odr_types
[i
]->types
)[j
], 0);
513 t
= (*odr_types
[i
]->types
)[j
];
514 while (TYPE_P (t
) && TYPE_CONTEXT (t
))
516 t
= TYPE_CONTEXT (t
);
517 print_node (f
, "", t
, 0);
525 /* Given method type T, return type of class it belongs to.
526 Lookup this pointer and get its type. */
529 method_class_type (tree t
)
531 tree first_parm_type
= TREE_VALUE (TYPE_ARG_TYPES (t
));
532 gcc_assert (TREE_CODE (t
) == METHOD_TYPE
);
534 return TREE_TYPE (first_parm_type
);
537 /* Initialize IPA devirt and build inheritance tree graph. */
540 build_type_inheritance_graph (void)
542 struct cgraph_node
*n
;
543 FILE *inheritance_dump_file
;
546 if (odr_hash
.is_created ())
548 timevar_push (TV_IPA_INHERITANCE
);
549 inheritance_dump_file
= dump_begin (TDI_inheritance
, &flags
);
550 odr_hash
.create (23);
552 /* We reconstruct the graph starting of types of all methods seen in the
554 FOR_EACH_FUNCTION (n
)
555 if (DECL_VIRTUAL_P (n
->decl
)
556 && symtab_real_symbol_p (n
))
557 get_odr_type (method_class_type (TREE_TYPE (n
->decl
)), true);
558 if (inheritance_dump_file
)
560 dump_type_inheritance_graph (inheritance_dump_file
);
561 dump_end (TDI_inheritance
, inheritance_dump_file
);
563 timevar_pop (TV_IPA_INHERITANCE
);
566 /* If TARGET has associated node, record it in the NODES array.
567 if TARGET can not be inserted (for example because its body was
568 already removed and there is no way to refer to it), clear COMPLETEP. */
571 maybe_record_node (vec
<cgraph_node
*> &nodes
,
572 tree target
, pointer_set_t
*inserted
,
575 struct cgraph_node
*target_node
;
576 enum built_in_function fcode
;
579 /* Those are used to mark impossible scenarios. */
580 || (fcode
= DECL_FUNCTION_CODE (target
))
581 == BUILT_IN_UNREACHABLE
582 || fcode
== BUILT_IN_TRAP
)
585 target_node
= cgraph_get_node (target
);
587 if (target_node
!= NULL
588 && (TREE_PUBLIC (target
)
589 || target_node
->definition
)
590 && symtab_real_symbol_p (target_node
))
592 gcc_assert (!target_node
->global
.inlined_to
);
593 gcc_assert (symtab_real_symbol_p (target_node
));
594 if (!pointer_set_insert (inserted
, target
))
596 pointer_set_insert (cached_polymorphic_call_targets
,
598 nodes
.safe_push (target_node
);
602 && !type_in_anonymous_namespace_p
603 (method_class_type (TREE_TYPE (target
))))
607 /* See if BINFO's type match OUTER_TYPE. If so, lookup
608 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
609 method in vtable and insert method to NODES array.
610 Otherwise recurse to base BINFOs.
611 This match what get_binfo_at_offset does, but with offset
614 TYPE_BINFO is binfo holding an virtual table matching
615 BINFO's type. In the case of single inheritance, this
616 is binfo of BINFO's type ancestor (vtable is shared),
617 otherwise it is binfo of BINFO's type.
619 MATCHED_VTABLES tracks virtual tables we already did lookup
620 for virtual function in. INSERTED tracks nodes we already
623 ANONYMOUS is true if BINFO is part of anonymous namespace.
627 record_target_from_binfo (vec
<cgraph_node
*> &nodes
,
631 HOST_WIDE_INT otr_token
,
633 HOST_WIDE_INT offset
,
634 pointer_set_t
*inserted
,
635 pointer_set_t
*matched_vtables
,
638 tree type
= BINFO_TYPE (binfo
);
642 gcc_checking_assert (BINFO_VTABLE (type_binfo
));
644 if (types_same_for_odr (type
, outer_type
))
646 tree inner_binfo
= get_binfo_at_offset (type_binfo
,
648 /* For types in anonymous namespace first check if the respective vtable
649 is alive. If not, we know the type can't be called. */
650 if (!flag_ltrans
&& anonymous
)
652 tree vtable
= BINFO_VTABLE (inner_binfo
);
653 struct varpool_node
*vnode
;
655 if (TREE_CODE (vtable
) == POINTER_PLUS_EXPR
)
656 vtable
= TREE_OPERAND (TREE_OPERAND (vtable
, 0), 0);
657 vnode
= varpool_get_node (vtable
);
658 if (!vnode
|| !vnode
->definition
)
661 gcc_assert (inner_binfo
);
662 if (!pointer_set_insert (matched_vtables
, BINFO_VTABLE (inner_binfo
)))
664 tree target
= gimple_get_virt_method_for_binfo (otr_token
, inner_binfo
);
666 maybe_record_node (nodes
, target
, inserted
, NULL
);
672 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
673 /* Walking bases that have no virtual method is pointless excercise. */
674 if (polymorphic_type_binfo_p (base_binfo
))
675 record_target_from_binfo (nodes
, base_binfo
, otr_type
,
676 /* In the case of single inheritance,
677 the virtual table is shared with
679 BINFO_VTABLE (base_binfo
) ? base_binfo
: type_binfo
,
680 otr_token
, outer_type
, offset
, inserted
,
681 matched_vtables
, anonymous
);
684 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
685 of TYPE, insert them to NODES, recurse into derived nodes.
686 INSERTED is used to avoid duplicate insertions of methods into NODES.
687 MATCHED_VTABLES are used to avoid duplicate walking vtables. */
690 possible_polymorphic_call_targets_1 (vec
<cgraph_node
*> &nodes
,
691 pointer_set_t
*inserted
,
692 pointer_set_t
*matched_vtables
,
695 HOST_WIDE_INT otr_token
,
697 HOST_WIDE_INT offset
)
699 tree binfo
= TYPE_BINFO (type
->type
);
702 record_target_from_binfo (nodes
, binfo
, otr_type
, binfo
, otr_token
,
704 inserted
, matched_vtables
,
705 type
->anonymous_namespace
);
706 for (i
= 0; i
< type
->derived_types
.length (); i
++)
707 possible_polymorphic_call_targets_1 (nodes
, inserted
,
710 type
->derived_types
[i
],
711 otr_token
, outer_type
, offset
);
714 /* Cache of queries for polymorphic call targets.
716 Enumerating all call targets may get expensive when there are many
717 polymorphic calls in the program, so we memoize all the previous
718 queries and avoid duplicated work. */
720 struct polymorphic_call_target_d
722 HOST_WIDE_INT otr_token
;
723 ipa_polymorphic_call_context context
;
725 vec
<cgraph_node
*> targets
;
729 /* Polymorphic call target cache helpers. */
731 struct polymorphic_call_target_hasher
733 typedef polymorphic_call_target_d value_type
;
734 typedef polymorphic_call_target_d compare_type
;
735 static inline hashval_t
hash (const value_type
*);
736 static inline bool equal (const value_type
*, const compare_type
*);
737 static inline void remove (value_type
*);
740 /* Return the computed hashcode for ODR_QUERY. */
743 polymorphic_call_target_hasher::hash (const value_type
*odr_query
)
747 hash
= iterative_hash_host_wide_int
748 (odr_query
->otr_token
,
749 odr_query
->type
->id
);
750 hash
= iterative_hash_hashval_t (TYPE_UID (odr_query
->context
.outer_type
),
752 hash
= iterative_hash_host_wide_int (odr_query
->context
.offset
, hash
);
753 return iterative_hash_hashval_t
754 (((int)odr_query
->context
.maybe_in_construction
<< 1)
755 | (int)odr_query
->context
.maybe_derived_type
, hash
);
758 /* Compare cache entries T1 and T2. */
761 polymorphic_call_target_hasher::equal (const value_type
*t1
,
762 const compare_type
*t2
)
764 return (t1
->type
== t2
->type
&& t1
->otr_token
== t2
->otr_token
765 && t1
->context
.offset
== t2
->context
.offset
766 && t1
->context
.outer_type
== t2
->context
.outer_type
767 && t1
->context
.maybe_in_construction
768 == t2
->context
.maybe_in_construction
769 && t1
->context
.maybe_derived_type
== t2
->context
.maybe_derived_type
);
772 /* Remove entry in polymorphic call target cache hash. */
775 polymorphic_call_target_hasher::remove (value_type
*v
)
777 v
->targets
.release ();
781 /* Polymorphic call target query cache. */
783 typedef hash_table
<polymorphic_call_target_hasher
>
784 polymorphic_call_target_hash_type
;
785 static polymorphic_call_target_hash_type polymorphic_call_target_hash
;
787 /* Destroy polymorphic call target query cache. */
790 free_polymorphic_call_targets_hash ()
792 if (cached_polymorphic_call_targets
)
794 polymorphic_call_target_hash
.dispose ();
795 pointer_set_destroy (cached_polymorphic_call_targets
);
796 cached_polymorphic_call_targets
= NULL
;
800 /* When virtual function is removed, we may need to flush the cache. */
803 devirt_node_removal_hook (struct cgraph_node
*n
, void *d ATTRIBUTE_UNUSED
)
805 if (cached_polymorphic_call_targets
806 && pointer_set_contains (cached_polymorphic_call_targets
, n
))
807 free_polymorphic_call_targets_hash ();
810 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
811 is contained at CONTEXT->OFFSET. Walk the memory representation of
812 CONTEXT->OUTER_TYPE and find the outermost class type that match
813 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
816 For example when CONTEXT represents type
822 and we look for type at offset sizeof(int), we end up with B and offset 0.
823 If the same is produced by multiple inheritance, we end up with A and offset
826 If we can not find corresponding class, give up by setting
827 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
828 Return true when lookup was sucesful. */
831 get_class_context (ipa_polymorphic_call_context
*context
,
834 tree type
= context
->outer_type
;
835 HOST_WIDE_INT offset
= context
->offset
;
837 /* Find the sub-object the constant actually refers to and mark whether it is
838 an artificial one (as opposed to a user-defined one). */
841 HOST_WIDE_INT pos
, size
;
844 /* On a match, just return what we found. */
845 if (TREE_CODE (type
) == TREE_CODE (expected_type
)
846 && types_same_for_odr (type
, expected_type
))
848 gcc_assert (offset
== 0);
852 /* Walk fields and find corresponding on at OFFSET. */
853 if (TREE_CODE (type
) == RECORD_TYPE
)
855 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
857 if (TREE_CODE (fld
) != FIELD_DECL
)
860 pos
= int_bit_position (fld
);
861 size
= tree_to_uhwi (DECL_SIZE (fld
));
862 if (pos
<= offset
&& (pos
+ size
) > offset
)
869 type
= TREE_TYPE (fld
);
871 /* DECL_ARTIFICIAL represents a basetype. */
872 if (!DECL_ARTIFICIAL (fld
))
874 context
->outer_type
= type
;
875 context
->offset
= offset
;
876 /* As soon as we se an field containing the type,
877 we know we are not looking for derivations. */
878 context
->maybe_derived_type
= false;
881 else if (TREE_CODE (type
) == ARRAY_TYPE
)
883 tree subtype
= TREE_TYPE (type
);
885 /* Give up if we don't know array size. */
886 if (!tree_fits_shwi_p (TYPE_SIZE (subtype
))
887 || !tree_to_shwi (TYPE_SIZE (subtype
)) <= 0)
889 offset
= offset
% tree_to_shwi (TYPE_SIZE (subtype
));
891 context
->outer_type
= type
;
892 context
->offset
= offset
;
893 context
->maybe_derived_type
= false;
895 /* Give up on anything else. */
900 /* If we failed to find subtype we look for, give up and fall back to the
901 most generic query. */
903 context
->outer_type
= expected_type
;
905 context
->maybe_derived_type
= true;
909 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
912 contains_type_p (tree outer_type
, HOST_WIDE_INT offset
,
915 ipa_polymorphic_call_context context
= {offset
, outer_type
,
917 return get_class_context (&context
, otr_type
);
920 /* Given REF call in FNDECL, determine class of the polymorphic
921 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
922 Return pointer to object described by the context */
925 get_polymorphic_call_info (tree fndecl
,
928 HOST_WIDE_INT
*otr_token
,
929 ipa_polymorphic_call_context
*context
)
932 *otr_type
= obj_type_ref_class (ref
);
933 *otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref
));
935 /* Set up basic info in case we find nothing interesting in the analysis. */
936 context
->outer_type
= *otr_type
;
938 base_pointer
= OBJ_TYPE_REF_OBJECT (ref
);
939 context
->maybe_derived_type
= true;
940 context
->maybe_in_construction
= false;
942 /* Walk SSA for outer object. */
945 if (TREE_CODE (base_pointer
) == SSA_NAME
946 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
947 && SSA_NAME_DEF_STMT (base_pointer
)
948 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer
)))
950 base_pointer
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer
));
951 STRIP_NOPS (base_pointer
);
953 else if (TREE_CODE (base_pointer
) == ADDR_EXPR
)
955 HOST_WIDE_INT size
, max_size
;
956 HOST_WIDE_INT offset2
;
957 tree base
= get_ref_base_and_extent (TREE_OPERAND (base_pointer
, 0),
958 &offset2
, &size
, &max_size
);
960 /* If this is a varying address, punt. */
961 if ((TREE_CODE (base
) == MEM_REF
|| DECL_P (base
))
965 /* We found dereference of a pointer. Type of the pointer
966 and MEM_REF is meaningless, but we can look futher. */
967 if (TREE_CODE (base
) == MEM_REF
)
969 base_pointer
= TREE_OPERAND (base
, 0);
971 += offset2
+ mem_ref_offset (base
).low
* BITS_PER_UNIT
;
972 context
->outer_type
= NULL
;
974 /* We found base object. In this case the outer_type
976 else if (DECL_P (base
))
978 context
->outer_type
= TREE_TYPE (base
);
979 gcc_assert (!POINTER_TYPE_P (context
->outer_type
));
981 /* Only type inconsistent programs can have otr_type that is
982 not part of outer type. */
983 if (!contains_type_p (context
->outer_type
,
984 context
->offset
, *otr_type
))
986 context
->offset
+= offset2
;
988 /* Make very conservative assumption that all objects
989 may be in construction.
990 TODO: ipa-prop already contains code to tell better.
992 context
->maybe_in_construction
= true;
993 context
->maybe_derived_type
= false;
1002 else if (TREE_CODE (base_pointer
) == POINTER_PLUS_EXPR
1003 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer
, 1)))
1005 context
->offset
+= tree_to_shwi (TREE_OPERAND (base_pointer
, 1))
1007 base_pointer
= TREE_OPERAND (base_pointer
, 0);
1014 /* Try to determine type of the outer object. */
1015 if (TREE_CODE (base_pointer
) == SSA_NAME
1016 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
1017 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) == PARM_DECL
)
1019 /* See if parameter is THIS pointer of a method. */
1020 if (TREE_CODE (TREE_TYPE (fndecl
)) == METHOD_TYPE
1021 && SSA_NAME_VAR (base_pointer
) == DECL_ARGUMENTS (fndecl
))
1023 context
->outer_type
= TREE_TYPE (TREE_TYPE (base_pointer
));
1024 gcc_assert (TREE_CODE (context
->outer_type
) == RECORD_TYPE
);
1026 /* Dynamic casting has possibly upcasted the type
1027 in the hiearchy. In this case outer type is less
1028 informative than inner type and we should forget
1030 if (!contains_type_p (context
->outer_type
, context
->offset
,
1033 context
->outer_type
= NULL
;
1034 return base_pointer
;
1037 /* If the function is constructor or destructor, then
1038 the type is possibly in consturction, but we know
1039 it is not derived type. */
1040 if (DECL_CXX_CONSTRUCTOR_P (fndecl
)
1041 || DECL_CXX_DESTRUCTOR_P (fndecl
))
1043 context
->maybe_in_construction
= true;
1044 context
->maybe_derived_type
= false;
1048 context
->maybe_derived_type
= true;
1049 context
->maybe_in_construction
= false;
1051 return base_pointer
;
1053 /* Non-PODs passed by value are really passed by invisible
1054 reference. In this case we also know the type of the
1056 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer
)))
1058 context
->outer_type
= TREE_TYPE (TREE_TYPE (base_pointer
));
1059 gcc_assert (!POINTER_TYPE_P (context
->outer_type
));
1060 /* Only type inconsistent programs can have otr_type that is
1061 not part of outer type. */
1062 if (!contains_type_p (context
->outer_type
, context
->offset
,
1065 context
->outer_type
= NULL
;
1067 return base_pointer
;
1069 context
->maybe_derived_type
= false;
1070 context
->maybe_in_construction
= false;
1071 return base_pointer
;
1074 /* TODO: There are multiple ways to derive a type. For instance
1075 if BASE_POINTER is passed to an constructor call prior our refernece.
1076 We do not make this type of flow sensitive analysis yet. */
1077 return base_pointer
;
1080 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1081 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1082 and insert them to NODES.
1084 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1087 record_targets_from_bases (tree otr_type
,
1088 HOST_WIDE_INT otr_token
,
1090 HOST_WIDE_INT offset
,
1091 vec
<cgraph_node
*> nodes
,
1092 pointer_set_t
*inserted
,
1093 pointer_set_t
*matched_vtables
,
1098 HOST_WIDE_INT pos
, size
;
1102 if (types_same_for_odr (outer_type
, otr_type
))
1105 for (fld
= TYPE_FIELDS (outer_type
); fld
; fld
= DECL_CHAIN (fld
))
1107 if (TREE_CODE (fld
) != FIELD_DECL
)
1110 pos
= int_bit_position (fld
);
1111 size
= tree_to_shwi (DECL_SIZE (fld
));
1112 if (pos
<= offset
&& (pos
+ size
) > offset
)
1115 /* Within a class type we should always find correcponding fields. */
1116 gcc_assert (fld
&& TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
);
1118 /* Nonbasetypes should have been stripped by outer_class_type. */
1119 gcc_assert (DECL_ARTIFICIAL (fld
));
1121 outer_type
= TREE_TYPE (fld
);
1124 base_binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
),
1126 gcc_assert (base_binfo
);
1127 if (!pointer_set_insert (matched_vtables
, BINFO_VTABLE (base_binfo
)))
1129 tree target
= gimple_get_virt_method_for_binfo (otr_token
, base_binfo
);
1131 maybe_record_node (nodes
, target
, inserted
, completep
);
1132 /* The only way method in anonymous namespace can become unreferable
1133 is that it has been fully optimized out. */
1134 else if (flag_ltrans
|| !type_in_anonymous_namespace_p (outer_type
))
1136 pointer_set_insert (matched_vtables
, BINFO_VTABLE (base_binfo
));
1141 /* When virtual table is removed, we may need to flush the cache. */
1144 devirt_variable_node_removal_hook (struct varpool_node
*n
,
1145 void *d ATTRIBUTE_UNUSED
)
1147 if (cached_polymorphic_call_targets
1148 && DECL_VIRTUAL_P (n
->decl
)
1149 && type_in_anonymous_namespace_p (DECL_CONTEXT (n
->decl
)))
1150 free_polymorphic_call_targets_hash ();
1153 /* Return vector containing possible targets of polymorphic call of type
1154 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1155 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1156 OTR_TYPE and include their virtual method. This is useful for types
1157 possibly in construction or destruction where the virtual table may
1158 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1159 us to walk the inheritance graph for all derivations.
1161 If COMPLETEP is non-NULL, store true if the list is complette.
1162 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1163 in the target cache. If user needs to visit every target list
1164 just once, it can memoize them.
1166 Returned vector is placed into cache. It is NOT caller's responsibility
1167 to free it. The vector can be freed on cgraph_remove_node call if
1168 the particular node is a virtual function present in the cache. */
1171 possible_polymorphic_call_targets (tree otr_type
,
1172 HOST_WIDE_INT otr_token
,
1173 ipa_polymorphic_call_context context
,
1177 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
1178 pointer_set_t
*inserted
;
1179 pointer_set_t
*matched_vtables
;
1180 vec
<cgraph_node
*> nodes
=vNULL
;
1181 odr_type type
, outer_type
;
1182 polymorphic_call_target_d key
;
1183 polymorphic_call_target_d
**slot
;
1188 type
= get_odr_type (otr_type
, true);
1190 /* Lookup the outer class type we want to walk. */
1191 if (context
.outer_type
)
1192 get_class_context (&context
, otr_type
);
1194 /* We now canonicalize our query, so we do not need extra hashtable entries. */
1196 /* Without outer type, we have no use for offset. Just do the
1197 basic search from innter type */
1198 if (!context
.outer_type
)
1200 context
.outer_type
= otr_type
;
1203 /* We need to update our hiearchy if the type does not exist. */
1204 outer_type
= get_odr_type (context
.outer_type
, true);
1205 /* If outer and inner type match, there are no bases to see. */
1206 if (type
== outer_type
)
1207 context
.maybe_in_construction
= false;
1208 /* If the type is final, there are no derivations. */
1209 if (TYPE_FINAL_P (outer_type
->type
))
1210 context
.maybe_derived_type
= false;
1212 /* Initialize query cache. */
1213 if (!cached_polymorphic_call_targets
)
1215 cached_polymorphic_call_targets
= pointer_set_create ();
1216 polymorphic_call_target_hash
.create (23);
1217 if (!node_removal_hook_holder
)
1219 node_removal_hook_holder
=
1220 cgraph_add_node_removal_hook (&devirt_node_removal_hook
, NULL
);
1221 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook
,
1226 /* Lookup cached answer. */
1228 key
.otr_token
= otr_token
;
1229 key
.context
= context
;
1230 slot
= polymorphic_call_target_hash
.find_slot (&key
, INSERT
);
1232 *cache_token
= (void *)*slot
;
1236 *completep
= (*slot
)->final
;
1237 return (*slot
)->targets
;
1242 /* Do actual search. */
1243 timevar_push (TV_IPA_VIRTUAL_CALL
);
1244 *slot
= XCNEW (polymorphic_call_target_d
);
1246 *cache_token
= (void *)*slot
;
1247 (*slot
)->type
= type
;
1248 (*slot
)->otr_token
= otr_token
;
1249 (*slot
)->context
= context
;
1251 inserted
= pointer_set_create ();
1252 matched_vtables
= pointer_set_create ();
1254 /* First see virtual method of type itself. */
1256 binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
->type
),
1257 context
.offset
, otr_type
);
1258 target
= gimple_get_virt_method_for_binfo (otr_token
, binfo
);
1261 maybe_record_node (nodes
, target
, inserted
, &final
);
1263 /* In the case we get final method, we don't need
1264 to walk derivations. */
1265 if (DECL_FINAL_P (target
))
1266 context
.maybe_derived_type
= false;
1268 /* The only way method in anonymous namespace can become unreferable
1269 is that it has been fully optimized out. */
1270 else if (flag_ltrans
|| !type
->anonymous_namespace
)
1272 pointer_set_insert (matched_vtables
, BINFO_VTABLE (binfo
));
1274 /* Next walk bases, if asked to. */
1275 if (context
.maybe_in_construction
)
1276 record_targets_from_bases (otr_type
, otr_token
, outer_type
->type
,
1277 context
.offset
, nodes
, inserted
,
1278 matched_vtables
, &final
);
1280 /* Finally walk recursively all derived types. */
1281 if (context
.maybe_derived_type
)
1283 /* For anonymous namespace types we can attempt to build full type.
1284 All derivations must be in this unit (unless we see partial unit). */
1285 if (!type
->anonymous_namespace
|| flag_ltrans
)
1287 for (i
= 0; i
< outer_type
->derived_types
.length(); i
++)
1288 possible_polymorphic_call_targets_1 (nodes
, inserted
,
1290 otr_type
, outer_type
->derived_types
[i
],
1291 otr_token
, outer_type
->type
,
1294 (*slot
)->targets
= nodes
;
1295 (*slot
)->final
= final
;
1299 pointer_set_destroy (inserted
);
1300 pointer_set_destroy (matched_vtables
);
1301 timevar_pop (TV_IPA_VIRTUAL_CALL
);
1305 /* Dump all possible targets of a polymorphic call. */
1308 dump_possible_polymorphic_call_targets (FILE *f
,
1310 HOST_WIDE_INT otr_token
,
1311 const ipa_polymorphic_call_context
&ctx
)
1313 vec
<cgraph_node
*> targets
;
1315 odr_type type
= get_odr_type (otr_type
, false);
1320 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
,
1323 fprintf (f
, " Targets of polymorphic call of type %i:", type
->id
);
1324 print_generic_expr (f
, type
->type
, TDF_SLIM
);
1325 fprintf (f
, " token %i\n"
1326 " Contained in type:",
1328 print_generic_expr (f
, ctx
.outer_type
, TDF_SLIM
);
1329 fprintf (f
, " at offset "HOST_WIDE_INT_PRINT_DEC
"\n"
1332 final
? "This is full list." :
1333 "This is partial list; extra targets may be defined in other units.",
1334 ctx
.maybe_in_construction
? " (base types included)" : "",
1335 ctx
.maybe_derived_type
? " (derived types included)" : "");
1336 for (i
= 0; i
< targets
.length (); i
++)
1337 fprintf (f
, " %s/%i", targets
[i
]->name (),
1339 fprintf (f
, "\n\n");
1343 /* Return true if N can be possibly target of a polymorphic call of
1344 OTR_TYPE/OTR_TOKEN. */
1347 possible_polymorphic_call_target_p (tree otr_type
,
1348 HOST_WIDE_INT otr_token
,
1349 const ipa_polymorphic_call_context
&ctx
,
1350 struct cgraph_node
*n
)
1352 vec
<cgraph_node
*> targets
;
1354 enum built_in_function fcode
;
1357 if (TREE_CODE (TREE_TYPE (n
->decl
)) == FUNCTION_TYPE
1358 && ((fcode
= DECL_FUNCTION_CODE (n
->decl
))
1359 == BUILT_IN_UNREACHABLE
1360 || fcode
== BUILT_IN_TRAP
))
1363 if (!odr_hash
.is_created ())
1365 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
, ctx
, &final
);
1366 for (i
= 0; i
< targets
.length (); i
++)
1367 if (symtab_semantically_equivalent_p (n
, targets
[i
]))
1370 /* At a moment we allow middle end to dig out new external declarations
1371 as a targets of polymorphic calls. */
1372 if (!final
&& !n
->definition
)
1378 /* After callgraph construction new external nodes may appear.
1379 Add them into the graph. */
1382 update_type_inheritance_graph (void)
1384 struct cgraph_node
*n
;
1386 if (!odr_hash
.is_created ())
1388 free_polymorphic_call_targets_hash ();
1389 timevar_push (TV_IPA_INHERITANCE
);
1390 /* We reconstruct the graph starting from types of all methods seen in the
1392 FOR_EACH_FUNCTION (n
)
1393 if (DECL_VIRTUAL_P (n
->decl
)
1395 && symtab_real_symbol_p (n
))
1396 get_odr_type (method_class_type (TREE_TYPE (n
->decl
)), true);
1397 timevar_pop (TV_IPA_INHERITANCE
);
1401 /* Return true if N looks like likely target of a polymorphic call.
1402 Rule out cxa_pure_virtual, noreturns, function declared cold and
1403 other obvious cases. */
1406 likely_target_p (struct cgraph_node
*n
)
1409 /* cxa_pure_virtual and similar things are not likely. */
1410 if (TREE_CODE (TREE_TYPE (n
->decl
)) != METHOD_TYPE
)
1412 flags
= flags_from_decl_or_type (n
->decl
);
1413 if (flags
& ECF_NORETURN
)
1415 if (lookup_attribute ("cold",
1416 DECL_ATTRIBUTES (n
->decl
)))
1418 if (n
->frequency
< NODE_FREQUENCY_NORMAL
)
1423 /* The ipa-devirt pass.
1424 When polymorphic call has only one likely target in the unit,
1425 turn it into speculative call. */
1430 struct cgraph_node
*n
;
1431 struct pointer_set_t
*bad_call_targets
= pointer_set_create ();
1432 struct cgraph_edge
*e
;
1434 int npolymorphic
= 0, nspeculated
= 0, nconverted
= 0, ncold
= 0;
1435 int nmultiple
= 0, noverwritable
= 0, ndevirtualized
= 0, nnotdefined
= 0;
1436 int nwrong
= 0, nok
= 0, nexternal
= 0;;
1438 FOR_EACH_DEFINED_FUNCTION (n
)
1440 bool update
= false;
1441 if (dump_file
&& n
->indirect_calls
)
1442 fprintf (dump_file
, "\n\nProcesing function %s/%i\n",
1443 n
->name (), n
->order
);
1444 for (e
= n
->indirect_calls
; e
; e
= e
->next_callee
)
1445 if (e
->indirect_info
->polymorphic
)
1447 struct cgraph_node
*likely_target
= NULL
;
1450 vec
<cgraph_node
*>targets
1451 = possible_polymorphic_call_targets
1452 (e
, &final
, &cache_token
);
1456 dump_possible_polymorphic_call_targets
1461 if (!cgraph_maybe_hot_edge_p (e
))
1464 fprintf (dump_file
, "Call is cold\n");
1471 fprintf (dump_file
, "Call is aready speculated\n");
1474 /* When dumping see if we agree with speculation. */
1478 if (pointer_set_contains (bad_call_targets
,
1482 fprintf (dump_file
, "Target list is known to be useless\n");
1486 for (i
= 0; i
< targets
.length (); i
++)
1487 if (likely_target_p (targets
[i
]))
1491 likely_target
= NULL
;
1493 fprintf (dump_file
, "More than one likely target\n");
1497 likely_target
= targets
[i
];
1501 pointer_set_insert (bad_call_targets
, cache_token
);
1504 /* This is reached only when dumping; check if we agree or disagree
1505 with the speculation. */
1508 struct cgraph_edge
*e2
;
1509 struct ipa_ref
*ref
;
1510 cgraph_speculative_call_info (e
, e2
, e
, ref
);
1511 if (cgraph_function_or_thunk_node (e2
->callee
, NULL
)
1512 == cgraph_function_or_thunk_node (likely_target
, NULL
))
1514 fprintf (dump_file
, "We agree with speculation\n");
1519 fprintf (dump_file
, "We disagree with speculation\n");
1524 if (!likely_target
->definition
)
1527 fprintf (dump_file
, "Target is not an definition\n");
1531 /* Do not introduce new references to external symbols. While we
1532 can handle these just well, it is common for programs to
1533 incorrectly with headers defining methods they are linked
1535 if (DECL_EXTERNAL (likely_target
->decl
))
1538 fprintf (dump_file
, "Target is external\n");
1542 if (cgraph_function_body_availability (likely_target
)
1543 <= AVAIL_OVERWRITABLE
1544 && symtab_can_be_discarded (likely_target
))
1547 fprintf (dump_file
, "Target is overwritable\n");
1555 "Speculatively devirtualizing call in %s/%i to %s/%i\n",
1556 n
->name (), n
->order
,
1557 likely_target
->name (),
1558 likely_target
->order
);
1559 if (!symtab_can_be_discarded (likely_target
))
1562 alias
= cgraph (symtab_nonoverwritable_alias
1565 likely_target
= alias
;
1569 cgraph_turn_edge_to_speculative
1570 (e
, likely_target
, e
->count
* 8 / 10, e
->frequency
* 8 / 10);
1574 inline_update_overall_summary (n
);
1576 pointer_set_destroy (bad_call_targets
);
1580 "%i polymorphic calls, %i devirtualized,"
1581 " %i speculatively devirtualized, %i cold\n"
1582 "%i have multiple targets, %i overwritable,"
1583 " %i already speculated (%i agree, %i disagree),"
1584 " %i external, %i not defined\n",
1585 npolymorphic
, ndevirtualized
, nconverted
, ncold
,
1586 nmultiple
, noverwritable
, nspeculated
, nok
, nwrong
,
1587 nexternal
, nnotdefined
);
1588 return ndevirtualized
? TODO_remove_functions
: 0;
1591 /* Gate for IPCP optimization. */
1594 gate_ipa_devirt (void)
1596 return flag_devirtualize_speculatively
&& optimize
;
1601 const pass_data pass_data_ipa_devirt
=
1603 IPA_PASS
, /* type */
1604 "devirt", /* name */
1605 OPTGROUP_NONE
, /* optinfo_flags */
1606 true, /* has_gate */
1607 true, /* has_execute */
1608 TV_IPA_DEVIRT
, /* tv_id */
1609 0, /* properties_required */
1610 0, /* properties_provided */
1611 0, /* properties_destroyed */
1612 0, /* todo_flags_start */
1613 ( TODO_dump_symtab
), /* todo_flags_finish */
1616 class pass_ipa_devirt
: public ipa_opt_pass_d
1619 pass_ipa_devirt (gcc::context
*ctxt
)
1620 : ipa_opt_pass_d (pass_data_ipa_devirt
, ctxt
,
1621 NULL
, /* generate_summary */
1622 NULL
, /* write_summary */
1623 NULL
, /* read_summary */
1624 NULL
, /* write_optimization_summary */
1625 NULL
, /* read_optimization_summary */
1626 NULL
, /* stmt_fixup */
1627 0, /* function_transform_todo_flags_start */
1628 NULL
, /* function_transform */
1629 NULL
) /* variable_transform */
1632 /* opt_pass methods: */
1633 bool gate () { return gate_ipa_devirt (); }
1634 unsigned int execute () { return ipa_devirt (); }
1636 }; // class pass_ipa_devirt
1641 make_pass_ipa_devirt (gcc::context
*ctxt
)
1643 return new pass_ipa_devirt (ctxt
);
1646 #include "gt-ipa-devirt.h"