jit: New API entrypoint: gcc_jit_context_get_last_error
[official-gcc.git] / gcc / ipa-devirt.c
blobdeea5b9892d08c3ad2a3a7d952be3c703aa5fb15
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2015 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
82 What we do here:
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "predict.h"
116 #include "basic-block.h"
117 #include "hash-map.h"
118 #include "is-a.h"
119 #include "plugin-api.h"
120 #include "vec.h"
121 #include "hashtab.h"
122 #include "hash-set.h"
123 #include "machmode.h"
124 #include "hard-reg-set.h"
125 #include "input.h"
126 #include "function.h"
127 #include "ipa-ref.h"
128 #include "cgraph.h"
129 #include "expr.h"
130 #include "tree-pass.h"
131 #include "target.h"
132 #include "hash-table.h"
133 #include "inchash.h"
134 #include "tree-pretty-print.h"
135 #include "ipa-utils.h"
136 #include "tree-ssa-alias.h"
137 #include "internal-fn.h"
138 #include "gimple-fold.h"
139 #include "gimple-expr.h"
140 #include "gimple.h"
141 #include "alloc-pool.h"
142 #include "symbol-summary.h"
143 #include "ipa-prop.h"
144 #include "ipa-inline.h"
145 #include "diagnostic.h"
146 #include "tree-dfa.h"
147 #include "demangle.h"
148 #include "dbgcnt.h"
149 #include "gimple-pretty-print.h"
150 #include "stor-layout.h"
151 #include "intl.h"
153 /* Hash based set of pairs of types. */
154 typedef struct
156 tree first;
157 tree second;
158 } type_pair;
160 struct pair_traits : default_hashset_traits
162 static hashval_t
163 hash (type_pair p)
165 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
167 static bool
168 is_empty (type_pair p)
170 return p.first == NULL;
172 static bool
173 is_deleted (type_pair p ATTRIBUTE_UNUSED)
175 return false;
177 static bool
178 equal (const type_pair &a, const type_pair &b)
180 return a.first==b.first && a.second == b.second;
182 static void
183 mark_empty (type_pair &e)
185 e.first = NULL;
189 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
190 hash_set<type_pair,pair_traits> *);
192 static bool odr_violation_reported = false;
195 /* Pointer set of all call targets appearing in the cache. */
196 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
198 /* The node of type inheritance graph. For each type unique in
199 One Definition Rule (ODR) sense, we produce one node linking all
200 main variants of types equivalent to it, bases and derived types. */
202 struct GTY(()) odr_type_d
204 /* leader type. */
205 tree type;
206 /* All bases; built only for main variants of types. */
207 vec<odr_type> GTY((skip)) bases;
208 /* All derived types with virtual methods seen in unit;
209 built only for main variants of types. */
210 vec<odr_type> GTY((skip)) derived_types;
212 /* All equivalent types, if more than one. */
213 vec<tree, va_gc> *types;
214 /* Set of all equivalent types, if NON-NULL. */
215 hash_set<tree> * GTY((skip)) types_set;
217 /* Unique ID indexing the type in odr_types array. */
218 int id;
219 /* Is it in anonymous namespace? */
220 bool anonymous_namespace;
221 /* Do we know about all derivations of given type? */
222 bool all_derivations_known;
223 /* Did we report ODR violation here? */
224 bool odr_violated;
227 /* Return TRUE if all derived types of T are known and thus
228 we may consider the walk of derived type complete.
230 This is typically true only for final anonymous namespace types and types
231 defined within functions (that may be COMDAT and thus shared across units,
232 but with the same set of derived types). */
234 bool
235 type_all_derivations_known_p (const_tree t)
237 if (TYPE_FINAL_P (t))
238 return true;
239 if (flag_ltrans)
240 return false;
241 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
242 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
243 return true;
244 if (type_in_anonymous_namespace_p (t))
245 return true;
246 return (decl_function_context (TYPE_NAME (t)) != NULL);
249 /* Return TRUE if type's constructors are all visible. */
251 static bool
252 type_all_ctors_visible_p (tree t)
254 return !flag_ltrans
255 && symtab->state >= CONSTRUCTION
256 /* We can not always use type_all_derivations_known_p.
257 For function local types we must assume case where
258 the function is COMDAT and shared in between units.
260 TODO: These cases are quite easy to get, but we need
261 to keep track of C++ privatizing via -Wno-weak
262 as well as the IPA privatizing. */
263 && type_in_anonymous_namespace_p (t);
266 /* Return TRUE if type may have instance. */
268 static bool
269 type_possibly_instantiated_p (tree t)
271 tree vtable;
272 varpool_node *vnode;
274 /* TODO: Add abstract types here. */
275 if (!type_all_ctors_visible_p (t))
276 return true;
278 vtable = BINFO_VTABLE (TYPE_BINFO (t));
279 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
280 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
281 vnode = varpool_node::get (vtable);
282 return vnode && vnode->definition;
285 /* One Definition Rule hashtable helpers. */
287 struct odr_hasher
289 typedef odr_type_d value_type;
290 typedef union tree_node compare_type;
291 static inline hashval_t hash (const value_type *);
292 static inline bool equal (const value_type *, const compare_type *);
293 static inline void remove (value_type *);
296 /* Return type that was declared with T's name so that T is an
297 qualified variant of it. */
299 static inline tree
300 main_odr_variant (const_tree t)
302 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
303 return TREE_TYPE (TYPE_NAME (t));
304 /* Unnamed types and non-C++ produced types can be compared by variants. */
305 else
306 return TYPE_MAIN_VARIANT (t);
309 /* Produce hash based on type name. */
311 static hashval_t
312 hash_type_name (tree t)
314 gcc_checking_assert (main_odr_variant (t) == t);
316 /* If not in LTO, all main variants are unique, so we can do
317 pointer hash. */
318 if (!in_lto_p)
319 return htab_hash_pointer (t);
321 /* Anonymous types are unique. */
322 if (type_in_anonymous_namespace_p (t))
323 return htab_hash_pointer (t);
325 /* ODR types have name specified. */
326 if (TYPE_NAME (t)
327 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)))
328 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
330 /* For polymorphic types that was compiled with -fno-lto-odr-type-merging
331 we can simply hash the virtual table. */
332 if (TREE_CODE (t) == RECORD_TYPE
333 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
335 tree v = BINFO_VTABLE (TYPE_BINFO (t));
336 hashval_t hash = 0;
338 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
340 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
341 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
344 v = DECL_ASSEMBLER_NAME (v);
345 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
346 return hash;
349 /* Builtin types may appear as main variants of ODR types and are unique.
350 Sanity check we do not get anything that looks non-builtin. */
351 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
352 || TREE_CODE (t) == VOID_TYPE
353 || TREE_CODE (t) == COMPLEX_TYPE
354 || TREE_CODE (t) == REAL_TYPE
355 || TREE_CODE (t) == POINTER_TYPE);
356 return htab_hash_pointer (t);
359 /* Return the computed hashcode for ODR_TYPE. */
361 inline hashval_t
362 odr_hasher::hash (const value_type *odr_type)
364 return hash_type_name (odr_type->type);
367 /* For languages with One Definition Rule, work out if
368 types are the same based on their name.
370 This is non-trivial for LTO where minor differences in
371 the type representation may have prevented type merging
372 to merge two copies of otherwise equivalent type.
374 Until we start streaming mangled type names, this function works
375 only for polymorphic types. */
377 bool
378 types_same_for_odr (const_tree type1, const_tree type2)
380 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
382 type1 = main_odr_variant (type1);
383 type2 = main_odr_variant (type2);
385 if (type1 == type2)
386 return true;
388 if (!in_lto_p)
389 return false;
391 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
392 on the corresponding TYPE_STUB_DECL. */
393 if (type_in_anonymous_namespace_p (type1)
394 || type_in_anonymous_namespace_p (type2))
395 return false;
398 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
400 Ideally we should never need types without ODR names here. It can however
401 happen in two cases:
403 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
404 Here testing for equivalence is safe, since their MAIN_VARIANTs are
405 unique.
406 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
407 establish precise ODR equivalency, but for correctness we care only
408 about equivalency on complete polymorphic types. For these we can
409 compare assembler names of their virtual tables. */
410 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
411 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
413 /* See if types are obviously different (i.e. different codes
414 or polymorphic wrt non-polymorphic). This is not strictly correct
415 for ODR violating programs, but we can't do better without streaming
416 ODR names. */
417 if (TREE_CODE (type1) != TREE_CODE (type2))
418 return false;
419 if (TREE_CODE (type1) == RECORD_TYPE
420 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
421 return false;
422 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
423 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
424 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
425 return false;
427 /* At the moment we have no way to establish ODR equivalence at LTO
428 other than comparing virtual table pointers of polymorphic types.
429 Eventually we should start saving mangled names in TYPE_NAME.
430 Then this condition will become non-trivial. */
432 if (TREE_CODE (type1) == RECORD_TYPE
433 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
434 && BINFO_VTABLE (TYPE_BINFO (type1))
435 && BINFO_VTABLE (TYPE_BINFO (type2)))
437 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
438 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
439 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
440 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
441 return (operand_equal_p (TREE_OPERAND (v1, 1),
442 TREE_OPERAND (v2, 1), 0)
443 && DECL_ASSEMBLER_NAME
444 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
445 == DECL_ASSEMBLER_NAME
446 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
448 gcc_unreachable ();
450 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
451 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
454 /* Return true if we can decide on ODR equivalency.
456 In non-LTO it is always decide, in LTO however it depends in the type has
457 ODR info attached. */
459 bool
460 types_odr_comparable (tree t1, tree t2)
462 return (!in_lto_p
463 || main_odr_variant (t1) == main_odr_variant (t2)
464 || (odr_type_p (t1) && odr_type_p (t2))
465 || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
466 && TYPE_BINFO (t1) && TYPE_BINFO (t2)
467 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
468 && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
471 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
472 known, be conservative and return false. */
474 bool
475 types_must_be_same_for_odr (tree t1, tree t2)
477 if (types_odr_comparable (t1, t2))
478 return types_same_for_odr (t1, t2);
479 else
480 return main_odr_variant (t1) == main_odr_variant (t2);
483 /* Compare types T1 and T2 and return true if they are
484 equivalent. */
486 inline bool
487 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
489 tree t2 = const_cast <tree> (ct2);
491 gcc_checking_assert (main_odr_variant (t2) == t2);
492 if (t1->type == t2)
493 return true;
494 if (!in_lto_p)
495 return false;
496 return types_same_for_odr (t1->type, t2);
499 /* Free ODR type V. */
501 inline void
502 odr_hasher::remove (value_type *v)
504 v->bases.release ();
505 v->derived_types.release ();
506 if (v->types_set)
507 delete v->types_set;
508 ggc_free (v);
511 /* ODR type hash used to look up ODR type based on tree type node. */
513 typedef hash_table<odr_hasher> odr_hash_type;
514 static odr_hash_type *odr_hash;
516 /* ODR types are also stored into ODR_TYPE vector to allow consistent
517 walking. Bases appear before derived types. Vector is garbage collected
518 so we won't end up visiting empty types. */
520 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
521 #define odr_types (*odr_types_ptr)
523 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
524 void
525 set_type_binfo (tree type, tree binfo)
527 for (; type; type = TYPE_NEXT_VARIANT (type))
528 if (COMPLETE_TYPE_P (type))
529 TYPE_BINFO (type) = binfo;
530 else
531 gcc_assert (!TYPE_BINFO (type));
534 /* Compare T2 and T2 based on name or structure. */
536 static bool
537 odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<type_pair,pair_traits> *visited)
539 bool an1, an2;
541 /* This can happen in incomplete types that should be handled earlier. */
542 gcc_assert (t1 && t2);
544 t1 = main_odr_variant (t1);
545 t2 = main_odr_variant (t2);
546 if (t1 == t2)
547 return true;
549 /* Anonymous namespace types must match exactly. */
550 an1 = type_in_anonymous_namespace_p (t1);
551 an2 = type_in_anonymous_namespace_p (t2);
552 if (an1 != an2 || an1)
553 return false;
555 /* For ODR types be sure to compare their names.
556 To support -wno-odr-type-merging we allow one type to be non-ODR
557 and other ODR even though it is a violation. */
558 if (types_odr_comparable (t1, t2))
560 if (!types_same_for_odr (t1, t2))
561 return false;
562 /* Limit recursion: If subtypes are ODR types and we know
563 that they are same, be happy. */
564 if (!get_odr_type (t1, true)->odr_violated)
565 return true;
568 /* Component types, builtins and possibly violating ODR types
569 have to be compared structurally. */
570 if (TREE_CODE (t1) != TREE_CODE (t2))
571 return false;
572 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
573 return false;
574 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
575 return false;
577 type_pair pair={t1,t2};
578 if (TYPE_UID (t1) > TYPE_UID (t2))
580 pair.first = t2;
581 pair.second = t1;
583 if (visited->add (pair))
584 return true;
585 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
588 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
589 violation warnings. */
591 void
592 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
594 int n1, n2;
595 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
597 odr_violation_reported = true;
598 if (DECL_VIRTUAL_P (prevailing->decl))
600 varpool_node *tmp = prevailing;
601 prevailing = vtable;
602 vtable = tmp;
604 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
605 OPT_Wodr,
606 "virtual table of type %qD violates one definition rule",
607 DECL_CONTEXT (vtable->decl)))
608 inform (DECL_SOURCE_LOCATION (prevailing->decl),
609 "variable of same assembler name as the virtual table is "
610 "defined in another translation unit");
611 return;
613 if (!prevailing->definition || !vtable->definition)
614 return;
615 for (n1 = 0, n2 = 0; true; n1++, n2++)
617 struct ipa_ref *ref1, *ref2;
618 bool end1, end2;
619 end1 = !prevailing->iterate_reference (n1, ref1);
620 end2 = !vtable->iterate_reference (n2, ref2);
621 if (end1 && end2)
622 return;
623 if (!end1 && !end2
624 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
625 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
626 && !n2
627 && !DECL_VIRTUAL_P (ref2->referred->decl)
628 && DECL_VIRTUAL_P (ref1->referred->decl))
630 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
631 "virtual table of type %qD contains RTTI information",
632 DECL_CONTEXT (vtable->decl)))
634 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
635 "but is prevailed by one without from other translation unit");
636 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
637 "RTTI will not work on this type");
639 n2++;
640 end2 = !vtable->iterate_reference (n2, ref2);
642 if (!end1 && !end2
643 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
644 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
645 && !n1
646 && !DECL_VIRTUAL_P (ref1->referred->decl)
647 && DECL_VIRTUAL_P (ref2->referred->decl))
649 n1++;
650 end1 = !vtable->iterate_reference (n1, ref1);
652 if (end1 || end2)
654 if (end1)
656 varpool_node *tmp = prevailing;
657 prevailing = vtable;
658 vtable = tmp;
659 ref1 = ref2;
661 if (warning_at (DECL_SOURCE_LOCATION
662 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
663 "virtual table of type %qD violates "
664 "one definition rule",
665 DECL_CONTEXT (vtable->decl)))
667 inform (DECL_SOURCE_LOCATION
668 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
669 "the conflicting type defined in another translation "
670 "unit");
671 inform (DECL_SOURCE_LOCATION
672 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
673 "contains additional virtual method %qD",
674 ref1->referred->decl);
676 return;
678 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
679 != DECL_ASSEMBLER_NAME (ref2->referred->decl))
681 if (warning_at (DECL_SOURCE_LOCATION
682 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
683 "virtual table of type %qD violates "
684 "one definition rule ",
685 DECL_CONTEXT (vtable->decl)))
687 inform (DECL_SOURCE_LOCATION
688 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
689 "the conflicting type defined in another translation "
690 "unit");
691 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
692 "virtual method %qD", ref1->referred->decl);
693 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
694 "ought to match virtual method %qD but does not",
695 ref2->referred->decl);
696 return;
702 /* Output ODR violation warning about T1 and T2 with REASON.
703 Display location of ST1 and ST2 if REASON speaks about field or
704 method of the type.
705 If WARN is false, do nothing. Set WARNED if warning was indeed
706 output. */
708 void
709 warn_odr (tree t1, tree t2, tree st1, tree st2,
710 bool warn, bool *warned, const char *reason)
712 tree decl2 = TYPE_NAME (t2);
714 if (!warn)
715 return;
716 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
717 "type %qT violates one definition rule",
718 t1))
719 return;
720 if (!st1 && !st2)
722 /* For FIELD_DECL support also case where one of fields is
723 NULL - this is used when the structures have mismatching number of
724 elements. */
725 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
727 inform (DECL_SOURCE_LOCATION (decl2),
728 "a different type is defined in another translation unit");
729 if (!st1)
731 st1 = st2;
732 st2 = NULL;
734 inform (DECL_SOURCE_LOCATION (st1),
735 "the first difference of corresponding definitions is field %qD",
736 st1);
737 if (st2)
738 decl2 = st2;
740 else if (TREE_CODE (st1) == FUNCTION_DECL)
742 inform (DECL_SOURCE_LOCATION (decl2),
743 "a different type is defined in another translation unit");
744 inform (DECL_SOURCE_LOCATION (st1),
745 "the first difference of corresponding definitions is method %qD",
746 st1);
747 decl2 = st2;
749 else
750 return;
751 inform (DECL_SOURCE_LOCATION (decl2), reason);
753 if (warned)
754 *warned = true;
757 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
758 because they are used on same place in ODR matching types.
759 They are not; inform the user. */
761 void
762 warn_types_mismatch (tree t1, tree t2)
764 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
765 return;
766 /* In Firefox it is a common bug to have same types but in
767 different namespaces. Be a bit more informative on
768 this. */
769 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
770 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
771 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
772 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
773 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
774 DECL_NAME (TYPE_CONTEXT (t2))))))
775 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
776 "type %qT should match type %qT but is defined "
777 "in different namespace ",
778 t1, t2);
779 else
780 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
781 "type %qT should match type %qT",
782 t1, t2);
783 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
784 "the incompatible type is defined here");
787 /* Compare T1 and T2, report ODR violations if WARN is true and set
788 WARNED to true if anything is reported. Return true if types match.
789 If true is returned, the types are also compatible in the sense of
790 gimple_canonical_types_compatible_p. */
792 static bool
793 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<type_pair,pair_traits> *visited)
795 /* Check first for the obvious case of pointer identity. */
796 if (t1 == t2)
797 return true;
798 gcc_assert (!type_in_anonymous_namespace_p (t1));
799 gcc_assert (!type_in_anonymous_namespace_p (t2));
801 /* Can't be the same type if the types don't have the same code. */
802 if (TREE_CODE (t1) != TREE_CODE (t2))
804 warn_odr (t1, t2, NULL, NULL, warn, warned,
805 G_("a different type is defined in another translation unit"));
806 return false;
809 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
811 warn_odr (t1, t2, NULL, NULL, warn, warned,
812 G_("a type with different qualifiers is defined in another "
813 "translation unit"));
814 return false;
817 if (comp_type_attributes (t1, t2) != 1)
819 warn_odr (t1, t2, NULL, NULL, warn, warned,
820 G_("a type with attributes "
821 "is defined in another translation unit"));
822 return false;
825 if (TREE_CODE (t1) == ENUMERAL_TYPE)
827 tree v1, v2;
828 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
829 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
831 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
833 warn_odr (t1, t2, NULL, NULL, warn, warned,
834 G_("an enum with different value name"
835 " is defined in another translation unit"));
836 return false;
838 if (TREE_VALUE (v1) != TREE_VALUE (v2)
839 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
840 DECL_INITIAL (TREE_VALUE (v2)), 0))
842 warn_odr (t1, t2, NULL, NULL, warn, warned,
843 G_("an enum with different values is defined"
844 " in another translation unit"));
845 return false;
848 if (v1 || v2)
850 warn_odr (t1, t2, NULL, NULL, warn, warned,
851 G_("an enum with mismatching number of values "
852 "is defined in another translation unit"));
853 return false;
857 /* Non-aggregate types can be handled cheaply. */
858 if (INTEGRAL_TYPE_P (t1)
859 || SCALAR_FLOAT_TYPE_P (t1)
860 || FIXED_POINT_TYPE_P (t1)
861 || TREE_CODE (t1) == VECTOR_TYPE
862 || TREE_CODE (t1) == COMPLEX_TYPE
863 || TREE_CODE (t1) == OFFSET_TYPE
864 || POINTER_TYPE_P (t1))
866 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
868 warn_odr (t1, t2, NULL, NULL, warn, warned,
869 G_("a type with different precision is defined "
870 "in another translation unit"));
871 return false;
873 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
875 warn_odr (t1, t2, NULL, NULL, warn, warned,
876 G_("a type with different signedness is defined "
877 "in another translation unit"));
878 return false;
881 if (TREE_CODE (t1) == INTEGER_TYPE
882 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
884 /* char WRT uint_8? */
885 warn_odr (t1, t2, NULL, NULL, warn, warned,
886 G_("a different type is defined in another "
887 "translation unit"));
888 return false;
891 /* For canonical type comparisons we do not want to build SCCs
892 so we cannot compare pointed-to types. But we can, for now,
893 require the same pointed-to type kind and match what
894 useless_type_conversion_p would do. */
895 if (POINTER_TYPE_P (t1))
897 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
898 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
900 warn_odr (t1, t2, NULL, NULL, warn, warned,
901 G_("it is defined as a pointer in different address "
902 "space in another translation unit"));
903 return false;
906 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
908 warn_odr (t1, t2, NULL, NULL, warn, warned,
909 G_("it is defined as a pointer to different type "
910 "in another translation unit"));
911 if (warn && warned)
912 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
913 return false;
917 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
918 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
920 /* Probably specific enough. */
921 warn_odr (t1, t2, NULL, NULL, warn, warned,
922 G_("a different type is defined "
923 "in another translation unit"));
924 if (warn && warned)
925 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
926 return false;
929 /* Do type-specific comparisons. */
930 else switch (TREE_CODE (t1))
932 case ARRAY_TYPE:
934 /* Array types are the same if the element types are the same and
935 the number of elements are the same. */
936 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
938 warn_odr (t1, t2, NULL, NULL, warn, warned,
939 G_("a different type is defined in another "
940 "translation unit"));
941 if (warn && warned)
942 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
944 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
945 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
946 == TYPE_NONALIASED_COMPONENT (t2));
948 tree i1 = TYPE_DOMAIN (t1);
949 tree i2 = TYPE_DOMAIN (t2);
951 /* For an incomplete external array, the type domain can be
952 NULL_TREE. Check this condition also. */
953 if (i1 == NULL_TREE || i2 == NULL_TREE)
954 return true;
956 tree min1 = TYPE_MIN_VALUE (i1);
957 tree min2 = TYPE_MIN_VALUE (i2);
958 tree max1 = TYPE_MAX_VALUE (i1);
959 tree max2 = TYPE_MAX_VALUE (i2);
961 /* In C++, minimums should be always 0. */
962 gcc_assert (min1 == min2);
963 if (!operand_equal_p (max1, max2, 0))
965 warn_odr (t1, t2, NULL, NULL, warn, warned,
966 G_("an array of different size is defined "
967 "in another translation unit"));
968 return false;
971 break;
973 case METHOD_TYPE:
974 case FUNCTION_TYPE:
975 /* Function types are the same if the return type and arguments types
976 are the same. */
977 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
979 warn_odr (t1, t2, NULL, NULL, warn, warned,
980 G_("has different return value "
981 "in another translation unit"));
982 if (warn && warned)
983 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
984 return false;
987 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
988 return true;
989 else
991 tree parms1, parms2;
993 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
994 parms1 && parms2;
995 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
997 if (!odr_subtypes_equivalent_p
998 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
1000 warn_odr (t1, t2, NULL, NULL, warn, warned,
1001 G_("has different parameters in another "
1002 "translation unit"));
1003 if (warn && warned)
1004 warn_types_mismatch (TREE_VALUE (parms1),
1005 TREE_VALUE (parms2));
1006 return false;
1010 if (parms1 || parms2)
1012 warn_odr (t1, t2, NULL, NULL, warn, warned,
1013 G_("has different parameters "
1014 "in another translation unit"));
1015 return false;
1018 return true;
1021 case RECORD_TYPE:
1022 case UNION_TYPE:
1023 case QUAL_UNION_TYPE:
1025 tree f1, f2;
1027 /* For aggregate types, all the fields must be the same. */
1028 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1030 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1031 f1 || f2;
1032 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1034 /* Skip non-fields. */
1035 while (f1 && TREE_CODE (f1) != FIELD_DECL)
1036 f1 = TREE_CHAIN (f1);
1037 while (f2 && TREE_CODE (f2) != FIELD_DECL)
1038 f2 = TREE_CHAIN (f2);
1039 if (!f1 || !f2)
1040 break;
1041 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1042 break;
1043 if (DECL_NAME (f1) != DECL_NAME (f2)
1044 && !DECL_ARTIFICIAL (f1))
1046 warn_odr (t1, t2, f1, f2, warn, warned,
1047 G_("a field with different name is defined "
1048 "in another translation unit"));
1049 return false;
1051 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1053 /* Do not warn about artificial fields and just go into generic
1054 field mismatch warning. */
1055 if (DECL_ARTIFICIAL (f1))
1056 break;
1058 warn_odr (t1, t2, f1, f2, warn, warned,
1059 G_("a field of same name but different type "
1060 "is defined in another translation unit"));
1061 if (warn && warned)
1062 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
1063 return false;
1065 if (!gimple_compare_field_offset (f1, f2))
1067 /* Do not warn about artificial fields and just go into generic
1068 field mismatch warning. */
1069 if (DECL_ARTIFICIAL (f1))
1070 break;
1071 warn_odr (t1, t2, t1, t2, warn, warned,
1072 G_("fields has different layout "
1073 "in another translation unit"));
1074 return false;
1076 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1077 == DECL_NONADDRESSABLE_P (f2));
1080 /* If one aggregate has more fields than the other, they
1081 are not the same. */
1082 if (f1 || f2)
1084 if (f1 && DECL_ARTIFICIAL (f1))
1085 f1 = NULL;
1086 if (f2 && DECL_ARTIFICIAL (f2))
1087 f2 = NULL;
1088 if (f1 || f2)
1089 warn_odr (t1, t2, f1, f2, warn, warned,
1090 G_("a type with different number of fields "
1091 "is defined in another translation unit"));
1092 /* Ideally we should never get this generic message. */
1093 else
1094 warn_odr (t1, t2, f1, f2, warn, warned,
1095 G_("a type with different memory representation "
1096 "is defined in another translation unit"));
1098 return false;
1100 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1101 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1102 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1104 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1105 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1106 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1108 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1110 warn_odr (t1, t2, f1, f2, warn, warned,
1111 G_("a different method of same type "
1112 "is defined in another translation unit"));
1113 return false;
1115 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1117 warn_odr (t1, t2, f1, f2, warn, warned,
1118 G_("s definition that differs by virtual "
1119 "keyword in another translation unit"));
1120 return false;
1122 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1124 warn_odr (t1, t2, f1, f2, warn, warned,
1125 G_("virtual table layout differs in another "
1126 "translation unit"));
1127 return false;
1129 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1131 warn_odr (t1, t2, f1, f2, warn, warned,
1132 G_("method with incompatible type is defined "
1133 "in another translation unit"));
1134 return false;
1137 if (f1 || f2)
1139 warn_odr (t1, t2, NULL, NULL, warn, warned,
1140 G_("a type with different number of methods "
1141 "is defined in another translation unit"));
1142 return false;
1146 break;
1148 case VOID_TYPE:
1149 break;
1151 default:
1152 debug_tree (t1);
1153 gcc_unreachable ();
1156 /* Those are better to come last as they are utterly uninformative. */
1157 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1158 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1160 warn_odr (t1, t2, NULL, NULL, warn, warned,
1161 G_("a type with different size "
1162 "is defined in another translation unit"));
1163 return false;
1165 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1166 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1168 warn_odr (t1, t2, NULL, NULL, warn, warned,
1169 G_("a type with different alignment "
1170 "is defined in another translation unit"));
1171 return false;
1173 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1174 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1175 TYPE_SIZE_UNIT (t2), 0));
1176 return true;
1179 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1180 from VAL->type. This may happen in LTO where tree merging did not merge
1181 all variants of the same type. It may or may not mean the ODR violation.
1182 Add it to the list of duplicates and warn on some violations. */
1184 static bool
1185 add_type_duplicate (odr_type val, tree type)
1187 bool build_bases = false;
1188 if (!val->types_set)
1189 val->types_set = new hash_set<tree>;
1191 /* Always prefer complete type to be the leader. */
1192 if (!COMPLETE_TYPE_P (val->type)
1193 && COMPLETE_TYPE_P (type))
1195 tree tmp = type;
1197 build_bases = true;
1198 type = val->type;
1199 val->type = tmp;
1202 /* See if this duplicate is new. */
1203 if (!val->types_set->add (type))
1205 bool merge = true;
1206 bool base_mismatch = false;
1207 unsigned int i,j;
1208 bool warned = false;
1209 hash_set<type_pair,pair_traits> visited;
1211 gcc_assert (in_lto_p);
1212 vec_safe_push (val->types, type);
1214 /* First we compare memory layout. */
1215 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
1216 &warned, &visited))
1218 merge = false;
1219 odr_violation_reported = true;
1220 val->odr_violated = true;
1221 if (symtab->dump_file)
1223 fprintf (symtab->dump_file, "ODR violation\n");
1225 print_node (symtab->dump_file, "", val->type, 0);
1226 putc ('\n',symtab->dump_file);
1227 print_node (symtab->dump_file, "", type, 0);
1228 putc ('\n',symtab->dump_file);
1232 /* Next sanity check that bases are the same. If not, we will end
1233 up producing wrong answers. */
1234 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1235 && TREE_CODE (val->type) == RECORD_TYPE
1236 && TREE_CODE (type) == RECORD_TYPE
1237 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1239 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1240 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
1242 odr_type base = get_odr_type
1243 (BINFO_TYPE
1244 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1245 i)),
1246 true);
1247 if (val->bases.length () <= j || val->bases[j] != base)
1248 base_mismatch = true;
1249 j++;
1251 if (base_mismatch)
1253 merge = false;
1254 odr_violation_reported = true;
1256 if (!warned && !val->odr_violated)
1257 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1258 "a type with the same name but different bases is "
1259 "defined in another translation unit");
1260 val->odr_violated = true;
1261 if (symtab->dump_file)
1263 fprintf (symtab->dump_file, "ODR bse violation or merging bug?\n");
1265 print_node (symtab->dump_file, "", val->type, 0);
1266 putc ('\n',symtab->dump_file);
1267 print_node (symtab->dump_file, "", type, 0);
1268 putc ('\n',symtab->dump_file);
1273 /* Regularize things a little. During LTO same types may come with
1274 different BINFOs. Either because their virtual table was
1275 not merged by tree merging and only later at decl merging or
1276 because one type comes with external vtable, while other
1277 with internal. We want to merge equivalent binfos to conserve
1278 memory and streaming overhead.
1280 The external vtables are more harmful: they contain references
1281 to external declarations of methods that may be defined in the
1282 merged LTO unit. For this reason we absolutely need to remove
1283 them and replace by internal variants. Not doing so will lead
1284 to incomplete answers from possible_polymorphic_call_targets.
1286 FIXME: disable for now; because ODR types are now build during
1287 streaming in, the variants do not need to be linked to the type,
1288 yet. We need to do the merging in cleanup pass to be implemented
1289 soon. */
1290 if (!flag_ltrans && merge
1291 && 0
1292 && TREE_CODE (val->type) == RECORD_TYPE
1293 && TREE_CODE (type) == RECORD_TYPE
1294 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1295 && TYPE_MAIN_VARIANT (type) == type
1296 && TYPE_MAIN_VARIANT (val->type) == val->type
1297 && BINFO_VTABLE (TYPE_BINFO (val->type))
1298 && BINFO_VTABLE (TYPE_BINFO (type)))
1300 tree master_binfo = TYPE_BINFO (val->type);
1301 tree v1 = BINFO_VTABLE (master_binfo);
1302 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1304 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1306 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1307 && operand_equal_p (TREE_OPERAND (v1, 1),
1308 TREE_OPERAND (v2, 1), 0));
1309 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1310 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1312 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1313 == DECL_ASSEMBLER_NAME (v2));
1315 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1317 unsigned int i;
1319 set_type_binfo (val->type, TYPE_BINFO (type));
1320 for (i = 0; i < val->types->length (); i++)
1322 if (TYPE_BINFO ((*val->types)[i])
1323 == master_binfo)
1324 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1326 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1328 else
1329 set_type_binfo (type, master_binfo);
1332 return build_bases;
1335 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1336 possibly new entry. */
1338 odr_type
1339 get_odr_type (tree type, bool insert)
1341 odr_type_d **slot;
1342 odr_type val;
1343 hashval_t hash;
1344 bool build_bases = false;
1345 bool insert_to_odr_array = false;
1346 int base_id = -1;
1348 type = main_odr_variant (type);
1350 hash = hash_type_name (type);
1351 slot = odr_hash->find_slot_with_hash (type, hash,
1352 insert ? INSERT : NO_INSERT);
1353 if (!slot)
1354 return NULL;
1356 /* See if we already have entry for type. */
1357 if (*slot)
1359 val = *slot;
1361 /* With LTO we need to support multiple tree representation of
1362 the same ODR type. */
1363 if (val->type != type)
1364 build_bases = add_type_duplicate (val, type);
1366 else
1368 val = ggc_cleared_alloc<odr_type_d> ();
1369 val->type = type;
1370 val->bases = vNULL;
1371 val->derived_types = vNULL;
1372 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1373 build_bases = COMPLETE_TYPE_P (val->type);
1374 insert_to_odr_array = true;
1377 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1378 && type == TYPE_MAIN_VARIANT (type))
1380 tree binfo = TYPE_BINFO (type);
1381 unsigned int i;
1383 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1385 val->all_derivations_known = type_all_derivations_known_p (type);
1386 *slot = val;
1387 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1388 /* For now record only polymorphic types. other are
1389 pointless for devirtualization and we can not precisely
1390 determine ODR equivalency of these during LTO. */
1391 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1393 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1394 i)),
1395 true);
1396 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1397 base->derived_types.safe_push (val);
1398 val->bases.safe_push (base);
1399 if (base->id > base_id)
1400 base_id = base->id;
1403 /* Ensure that type always appears after bases. */
1404 if (insert_to_odr_array)
1406 if (odr_types_ptr)
1407 val->id = odr_types.length ();
1408 vec_safe_push (odr_types_ptr, val);
1410 else if (base_id > val->id)
1412 odr_types[val->id] = 0;
1413 /* Be sure we did not recorded any derived types; these may need
1414 renumbering too. */
1415 gcc_assert (val->derived_types.length() == 0);
1416 if (odr_types_ptr)
1417 val->id = odr_types.length ();
1418 vec_safe_push (odr_types_ptr, val);
1420 return val;
1423 /* Add TYPE od ODR type hash. */
1425 void
1426 register_odr_type (tree type)
1428 if (!odr_hash)
1429 odr_hash = new odr_hash_type (23);
1430 /* Arrange things to be nicer and insert main variants first. */
1431 if (odr_type_p (TYPE_MAIN_VARIANT (type)))
1432 get_odr_type (TYPE_MAIN_VARIANT (type), true);
1433 if (TYPE_MAIN_VARIANT (type) != type)
1434 get_odr_type (type, true);
1437 /* Return true if type is known to have no derivations. */
1439 bool
1440 type_known_to_have_no_deriavations_p (tree t)
1442 return (type_all_derivations_known_p (t)
1443 && (TYPE_FINAL_P (t)
1444 || (odr_hash
1445 && !get_odr_type (t, true)->derived_types.length())));
1448 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
1449 recursive printing. */
1451 static void
1452 dump_odr_type (FILE *f, odr_type t, int indent=0)
1454 unsigned int i;
1455 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1456 print_generic_expr (f, t->type, TDF_SLIM);
1457 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1458 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1459 if (TYPE_NAME (t->type))
1461 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1462 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1463 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1465 if (t->bases.length ())
1467 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1468 for (i = 0; i < t->bases.length (); i++)
1469 fprintf (f, " %i", t->bases[i]->id);
1470 fprintf (f, "\n");
1472 if (t->derived_types.length ())
1474 fprintf (f, "%*s derived types:\n", indent * 2, "");
1475 for (i = 0; i < t->derived_types.length (); i++)
1476 dump_odr_type (f, t->derived_types[i], indent + 1);
1478 fprintf (f, "\n");
1481 /* Dump the type inheritance graph. */
1483 static void
1484 dump_type_inheritance_graph (FILE *f)
1486 unsigned int i;
1487 if (!odr_types_ptr)
1488 return;
1489 fprintf (f, "\n\nType inheritance graph:\n");
1490 for (i = 0; i < odr_types.length (); i++)
1492 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1493 dump_odr_type (f, odr_types[i]);
1495 for (i = 0; i < odr_types.length (); i++)
1497 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1499 unsigned int j;
1500 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1501 print_node (f, "", odr_types[i]->type, 0);
1502 for (j = 0; j < odr_types[i]->types->length (); j++)
1504 tree t;
1505 fprintf (f, "duplicate #%i\n", j);
1506 print_node (f, "", (*odr_types[i]->types)[j], 0);
1507 t = (*odr_types[i]->types)[j];
1508 while (TYPE_P (t) && TYPE_CONTEXT (t))
1510 t = TYPE_CONTEXT (t);
1511 print_node (f, "", t, 0);
1513 putc ('\n',f);
1519 /* Given method type T, return type of class it belongs to.
1520 Look up this pointer and get its type. */
1522 tree
1523 method_class_type (const_tree t)
1525 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1526 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1528 return TREE_TYPE (first_parm_type);
1531 /* Initialize IPA devirt and build inheritance tree graph. */
1533 void
1534 build_type_inheritance_graph (void)
1536 struct symtab_node *n;
1537 FILE *inheritance_dump_file;
1538 int flags;
1540 if (odr_hash)
1541 return;
1542 timevar_push (TV_IPA_INHERITANCE);
1543 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1544 odr_hash = new odr_hash_type (23);
1546 /* We reconstruct the graph starting of types of all methods seen in the
1547 the unit. */
1548 FOR_EACH_SYMBOL (n)
1549 if (is_a <cgraph_node *> (n)
1550 && DECL_VIRTUAL_P (n->decl)
1551 && n->real_symbol_p ())
1552 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1553 true);
1555 /* Look also for virtual tables of types that do not define any methods.
1557 We need it in a case where class B has virtual base of class A
1558 re-defining its virtual method and there is class C with no virtual
1559 methods with B as virtual base.
1561 Here we output B's virtual method in two variant - for non-virtual
1562 and virtual inheritance. B's virtual table has non-virtual version,
1563 while C's has virtual.
1565 For this reason we need to know about C in order to include both
1566 variants of B. More correctly, record_target_from_binfo should
1567 add both variants of the method when walking B, but we have no
1568 link in between them.
1570 We rely on fact that either the method is exported and thus we
1571 assume it is called externally or C is in anonymous namespace and
1572 thus we will see the vtable. */
1574 else if (is_a <varpool_node *> (n)
1575 && DECL_VIRTUAL_P (n->decl)
1576 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1577 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1578 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1579 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1580 if (inheritance_dump_file)
1582 dump_type_inheritance_graph (inheritance_dump_file);
1583 dump_end (TDI_inheritance, inheritance_dump_file);
1585 timevar_pop (TV_IPA_INHERITANCE);
1588 /* Return true if N has reference from live virtual table
1589 (and thus can be a destination of polymorphic call).
1590 Be conservatively correct when callgraph is not built or
1591 if the method may be referred externally. */
1593 static bool
1594 referenced_from_vtable_p (struct cgraph_node *node)
1596 int i;
1597 struct ipa_ref *ref;
1598 bool found = false;
1600 if (node->externally_visible
1601 || DECL_EXTERNAL (node->decl)
1602 || node->used_from_other_partition)
1603 return true;
1605 /* Keep this test constant time.
1606 It is unlikely this can happen except for the case where speculative
1607 devirtualization introduced many speculative edges to this node.
1608 In this case the target is very likely alive anyway. */
1609 if (node->ref_list.referring.length () > 100)
1610 return true;
1612 /* We need references built. */
1613 if (symtab->state <= CONSTRUCTION)
1614 return true;
1616 for (i = 0; node->iterate_referring (i, ref); i++)
1618 if ((ref->use == IPA_REF_ALIAS
1619 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1620 || (ref->use == IPA_REF_ADDR
1621 && TREE_CODE (ref->referring->decl) == VAR_DECL
1622 && DECL_VIRTUAL_P (ref->referring->decl)))
1624 found = true;
1625 break;
1627 return found;
1630 /* If TARGET has associated node, record it in the NODES array.
1631 CAN_REFER specify if program can refer to the target directly.
1632 if TARGET is unknown (NULL) or it can not be inserted (for example because
1633 its body was already removed and there is no way to refer to it), clear
1634 COMPLETEP. */
1636 static void
1637 maybe_record_node (vec <cgraph_node *> &nodes,
1638 tree target, hash_set<tree> *inserted,
1639 bool can_refer,
1640 bool *completep)
1642 struct cgraph_node *target_node, *alias_target;
1643 enum availability avail;
1645 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1646 list of targets; the runtime effect of calling them is undefined.
1647 Only "real" virtual methods should be accounted. */
1648 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1649 return;
1651 if (!can_refer)
1653 /* The only case when method of anonymous namespace becomes unreferable
1654 is when we completely optimized it out. */
1655 if (flag_ltrans
1656 || !target
1657 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1658 *completep = false;
1659 return;
1662 if (!target)
1663 return;
1665 target_node = cgraph_node::get (target);
1667 /* Prefer alias target over aliases, so we do not get confused by
1668 fake duplicates. */
1669 if (target_node)
1671 alias_target = target_node->ultimate_alias_target (&avail);
1672 if (target_node != alias_target
1673 && avail >= AVAIL_AVAILABLE
1674 && target_node->get_availability ())
1675 target_node = alias_target;
1678 /* Method can only be called by polymorphic call if any
1679 of vtables referring to it are alive.
1681 While this holds for non-anonymous functions, too, there are
1682 cases where we want to keep them in the list; for example
1683 inline functions with -fno-weak are static, but we still
1684 may devirtualize them when instance comes from other unit.
1685 The same holds for LTO.
1687 Currently we ignore these functions in speculative devirtualization.
1688 ??? Maybe it would make sense to be more aggressive for LTO even
1689 elsewhere. */
1690 if (!flag_ltrans
1691 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1692 && (!target_node
1693 || !referenced_from_vtable_p (target_node)))
1695 /* See if TARGET is useful function we can deal with. */
1696 else if (target_node != NULL
1697 && (TREE_PUBLIC (target)
1698 || DECL_EXTERNAL (target)
1699 || target_node->definition)
1700 && target_node->real_symbol_p ())
1702 gcc_assert (!target_node->global.inlined_to);
1703 gcc_assert (target_node->real_symbol_p ());
1704 if (!inserted->add (target))
1706 cached_polymorphic_call_targets->add (target_node);
1707 nodes.safe_push (target_node);
1710 else if (completep
1711 && (!type_in_anonymous_namespace_p
1712 (DECL_CONTEXT (target))
1713 || flag_ltrans))
1714 *completep = false;
1717 /* See if BINFO's type matches OUTER_TYPE. If so, look up
1718 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1719 method in vtable and insert method to NODES array
1720 or BASES_TO_CONSIDER if this array is non-NULL.
1721 Otherwise recurse to base BINFOs.
1722 This matches what get_binfo_at_offset does, but with offset
1723 being unknown.
1725 TYPE_BINFOS is a stack of BINFOS of types with defined
1726 virtual table seen on way from class type to BINFO.
1728 MATCHED_VTABLES tracks virtual tables we already did lookup
1729 for virtual function in. INSERTED tracks nodes we already
1730 inserted.
1732 ANONYMOUS is true if BINFO is part of anonymous namespace.
1734 Clear COMPLETEP when we hit unreferable target.
1737 static void
1738 record_target_from_binfo (vec <cgraph_node *> &nodes,
1739 vec <tree> *bases_to_consider,
1740 tree binfo,
1741 tree otr_type,
1742 vec <tree> &type_binfos,
1743 HOST_WIDE_INT otr_token,
1744 tree outer_type,
1745 HOST_WIDE_INT offset,
1746 hash_set<tree> *inserted,
1747 hash_set<tree> *matched_vtables,
1748 bool anonymous,
1749 bool *completep)
1751 tree type = BINFO_TYPE (binfo);
1752 int i;
1753 tree base_binfo;
1756 if (BINFO_VTABLE (binfo))
1757 type_binfos.safe_push (binfo);
1758 if (types_same_for_odr (type, outer_type))
1760 int i;
1761 tree type_binfo = NULL;
1763 /* Look up BINFO with virtual table. For normal types it is always last
1764 binfo on stack. */
1765 for (i = type_binfos.length () - 1; i >= 0; i--)
1766 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1768 type_binfo = type_binfos[i];
1769 break;
1771 if (BINFO_VTABLE (binfo))
1772 type_binfos.pop ();
1773 /* If this is duplicated BINFO for base shared by virtual inheritance,
1774 we may not have its associated vtable. This is not a problem, since
1775 we will walk it on the other path. */
1776 if (!type_binfo)
1777 return;
1778 tree inner_binfo = get_binfo_at_offset (type_binfo,
1779 offset, otr_type);
1780 if (!inner_binfo)
1782 gcc_assert (odr_violation_reported);
1783 return;
1785 /* For types in anonymous namespace first check if the respective vtable
1786 is alive. If not, we know the type can't be called. */
1787 if (!flag_ltrans && anonymous)
1789 tree vtable = BINFO_VTABLE (inner_binfo);
1790 varpool_node *vnode;
1792 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1793 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1794 vnode = varpool_node::get (vtable);
1795 if (!vnode || !vnode->definition)
1796 return;
1798 gcc_assert (inner_binfo);
1799 if (bases_to_consider
1800 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
1801 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
1803 bool can_refer;
1804 tree target = gimple_get_virt_method_for_binfo (otr_token,
1805 inner_binfo,
1806 &can_refer);
1807 if (!bases_to_consider)
1808 maybe_record_node (nodes, target, inserted, can_refer, completep);
1809 /* Destructors are never called via construction vtables. */
1810 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1811 bases_to_consider->safe_push (target);
1813 return;
1816 /* Walk bases. */
1817 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1818 /* Walking bases that have no virtual method is pointless exercise. */
1819 if (polymorphic_type_binfo_p (base_binfo))
1820 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1821 type_binfos,
1822 otr_token, outer_type, offset, inserted,
1823 matched_vtables, anonymous, completep);
1824 if (BINFO_VTABLE (binfo))
1825 type_binfos.pop ();
1828 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1829 of TYPE, insert them to NODES, recurse into derived nodes.
1830 INSERTED is used to avoid duplicate insertions of methods into NODES.
1831 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1832 Clear COMPLETEP if unreferable target is found.
1834 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
1835 all cases where BASE_SKIPPED is true (because the base is abstract
1836 class). */
1838 static void
1839 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1840 hash_set<tree> *inserted,
1841 hash_set<tree> *matched_vtables,
1842 tree otr_type,
1843 odr_type type,
1844 HOST_WIDE_INT otr_token,
1845 tree outer_type,
1846 HOST_WIDE_INT offset,
1847 bool *completep,
1848 vec <tree> &bases_to_consider,
1849 bool consider_construction)
1851 tree binfo = TYPE_BINFO (type->type);
1852 unsigned int i;
1853 auto_vec <tree, 8> type_binfos;
1854 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1856 /* We may need to consider types w/o instances because of possible derived
1857 types using their methods either directly or via construction vtables.
1858 We are safe to skip them when all derivations are known, since we will
1859 handle them later.
1860 This is done by recording them to BASES_TO_CONSIDER array. */
1861 if (possibly_instantiated || consider_construction)
1863 record_target_from_binfo (nodes,
1864 (!possibly_instantiated
1865 && type_all_derivations_known_p (type->type))
1866 ? &bases_to_consider : NULL,
1867 binfo, otr_type, type_binfos, otr_token,
1868 outer_type, offset,
1869 inserted, matched_vtables,
1870 type->anonymous_namespace, completep);
1872 for (i = 0; i < type->derived_types.length (); i++)
1873 possible_polymorphic_call_targets_1 (nodes, inserted,
1874 matched_vtables,
1875 otr_type,
1876 type->derived_types[i],
1877 otr_token, outer_type, offset, completep,
1878 bases_to_consider, consider_construction);
1881 /* Cache of queries for polymorphic call targets.
1883 Enumerating all call targets may get expensive when there are many
1884 polymorphic calls in the program, so we memoize all the previous
1885 queries and avoid duplicated work. */
1887 struct polymorphic_call_target_d
1889 HOST_WIDE_INT otr_token;
1890 ipa_polymorphic_call_context context;
1891 odr_type type;
1892 vec <cgraph_node *> targets;
1893 tree decl_warning;
1894 int type_warning;
1895 bool complete;
1896 bool speculative;
1899 /* Polymorphic call target cache helpers. */
1901 struct polymorphic_call_target_hasher
1903 typedef polymorphic_call_target_d value_type;
1904 typedef polymorphic_call_target_d compare_type;
1905 static inline hashval_t hash (const value_type *);
1906 static inline bool equal (const value_type *, const compare_type *);
1907 static inline void remove (value_type *);
1910 /* Return the computed hashcode for ODR_QUERY. */
1912 inline hashval_t
1913 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1915 inchash::hash hstate (odr_query->otr_token);
1917 hstate.add_wide_int (odr_query->type->id);
1918 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
1919 hstate.add_wide_int (odr_query->context.offset);
1921 if (odr_query->context.speculative_outer_type)
1923 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
1924 hstate.add_wide_int (odr_query->context.speculative_offset);
1926 hstate.add_flag (odr_query->speculative);
1927 hstate.add_flag (odr_query->context.maybe_in_construction);
1928 hstate.add_flag (odr_query->context.maybe_derived_type);
1929 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
1930 hstate.commit_flag ();
1931 return hstate.end ();
1934 /* Compare cache entries T1 and T2. */
1936 inline bool
1937 polymorphic_call_target_hasher::equal (const value_type *t1,
1938 const compare_type *t2)
1940 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1941 && t1->speculative == t2->speculative
1942 && t1->context.offset == t2->context.offset
1943 && t1->context.speculative_offset == t2->context.speculative_offset
1944 && t1->context.outer_type == t2->context.outer_type
1945 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
1946 && t1->context.maybe_in_construction
1947 == t2->context.maybe_in_construction
1948 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1949 && (t1->context.speculative_maybe_derived_type
1950 == t2->context.speculative_maybe_derived_type));
1953 /* Remove entry in polymorphic call target cache hash. */
1955 inline void
1956 polymorphic_call_target_hasher::remove (value_type *v)
1958 v->targets.release ();
1959 free (v);
1962 /* Polymorphic call target query cache. */
1964 typedef hash_table<polymorphic_call_target_hasher>
1965 polymorphic_call_target_hash_type;
1966 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1968 /* Destroy polymorphic call target query cache. */
1970 static void
1971 free_polymorphic_call_targets_hash ()
1973 if (cached_polymorphic_call_targets)
1975 delete polymorphic_call_target_hash;
1976 polymorphic_call_target_hash = NULL;
1977 delete cached_polymorphic_call_targets;
1978 cached_polymorphic_call_targets = NULL;
1982 /* When virtual function is removed, we may need to flush the cache. */
1984 static void
1985 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1987 if (cached_polymorphic_call_targets
1988 && cached_polymorphic_call_targets->contains (n))
1989 free_polymorphic_call_targets_hash ();
1992 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
1994 tree
1995 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1996 tree vtable)
1998 tree v = BINFO_VTABLE (binfo);
1999 int i;
2000 tree base_binfo;
2001 unsigned HOST_WIDE_INT this_offset;
2003 if (v)
2005 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2006 gcc_unreachable ();
2008 if (offset == this_offset
2009 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2010 return binfo;
2013 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2014 if (polymorphic_type_binfo_p (base_binfo))
2016 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2017 if (base_binfo)
2018 return base_binfo;
2020 return NULL;
2023 /* T is known constant value of virtual table pointer.
2024 Store virtual table to V and its offset to OFFSET.
2025 Return false if T does not look like virtual table reference. */
2027 bool
2028 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2029 unsigned HOST_WIDE_INT *offset)
2031 /* We expect &MEM[(void *)&virtual_table + 16B].
2032 We obtain object's BINFO from the context of the virtual table.
2033 This one contains pointer to virtual table represented via
2034 POINTER_PLUS_EXPR. Verify that this pointer matches what
2035 we propagated through.
2037 In the case of virtual inheritance, the virtual tables may
2038 be nested, i.e. the offset may be different from 16 and we may
2039 need to dive into the type representation. */
2040 if (TREE_CODE (t) == ADDR_EXPR
2041 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2042 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2043 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2044 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2045 == VAR_DECL)
2046 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2047 (TREE_OPERAND (t, 0), 0), 0)))
2049 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2050 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2051 return true;
2054 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2055 We need to handle it when T comes from static variable initializer or
2056 BINFO. */
2057 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2059 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2060 t = TREE_OPERAND (t, 0);
2062 else
2063 *offset = 0;
2065 if (TREE_CODE (t) != ADDR_EXPR)
2066 return false;
2067 *v = TREE_OPERAND (t, 0);
2068 return true;
2071 /* T is known constant value of virtual table pointer. Return BINFO of the
2072 instance type. */
2074 tree
2075 vtable_pointer_value_to_binfo (const_tree t)
2077 tree vtable;
2078 unsigned HOST_WIDE_INT offset;
2080 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2081 return NULL_TREE;
2083 /* FIXME: for stores of construction vtables we return NULL,
2084 because we do not have BINFO for those. Eventually we should fix
2085 our representation to allow this case to be handled, too.
2086 In the case we see store of BINFO we however may assume
2087 that standard folding will be able to cope with it. */
2088 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2089 offset, vtable);
2092 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2093 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2094 and insert them in NODES.
2096 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2098 static void
2099 record_targets_from_bases (tree otr_type,
2100 HOST_WIDE_INT otr_token,
2101 tree outer_type,
2102 HOST_WIDE_INT offset,
2103 vec <cgraph_node *> &nodes,
2104 hash_set<tree> *inserted,
2105 hash_set<tree> *matched_vtables,
2106 bool *completep)
2108 while (true)
2110 HOST_WIDE_INT pos, size;
2111 tree base_binfo;
2112 tree fld;
2114 if (types_same_for_odr (outer_type, otr_type))
2115 return;
2117 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2119 if (TREE_CODE (fld) != FIELD_DECL)
2120 continue;
2122 pos = int_bit_position (fld);
2123 size = tree_to_shwi (DECL_SIZE (fld));
2124 if (pos <= offset && (pos + size) > offset
2125 /* Do not get confused by zero sized bases. */
2126 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2127 break;
2129 /* Within a class type we should always find corresponding fields. */
2130 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2132 /* Nonbase types should have been stripped by outer_class_type. */
2133 gcc_assert (DECL_ARTIFICIAL (fld));
2135 outer_type = TREE_TYPE (fld);
2136 offset -= pos;
2138 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2139 offset, otr_type);
2140 if (!base_binfo)
2142 gcc_assert (odr_violation_reported);
2143 return;
2145 gcc_assert (base_binfo);
2146 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2148 bool can_refer;
2149 tree target = gimple_get_virt_method_for_binfo (otr_token,
2150 base_binfo,
2151 &can_refer);
2152 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2153 maybe_record_node (nodes, target, inserted, can_refer, completep);
2154 matched_vtables->add (BINFO_VTABLE (base_binfo));
2159 /* When virtual table is removed, we may need to flush the cache. */
2161 static void
2162 devirt_variable_node_removal_hook (varpool_node *n,
2163 void *d ATTRIBUTE_UNUSED)
2165 if (cached_polymorphic_call_targets
2166 && DECL_VIRTUAL_P (n->decl)
2167 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2168 free_polymorphic_call_targets_hash ();
2171 /* Record about how many calls would benefit from given type to be final. */
2173 struct odr_type_warn_count
2175 tree type;
2176 int count;
2177 gcov_type dyn_count;
2180 /* Record about how many calls would benefit from given method to be final. */
2182 struct decl_warn_count
2184 tree decl;
2185 int count;
2186 gcov_type dyn_count;
2189 /* Information about type and decl warnings. */
2191 struct final_warning_record
2193 gcov_type dyn_count;
2194 vec<odr_type_warn_count> type_warnings;
2195 hash_map<tree, decl_warn_count> decl_warnings;
2197 struct final_warning_record *final_warning_records;
2199 /* Return vector containing possible targets of polymorphic call of type
2200 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2201 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2202 OTR_TYPE and include their virtual method. This is useful for types
2203 possibly in construction or destruction where the virtual table may
2204 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2205 us to walk the inheritance graph for all derivations.
2207 If COMPLETEP is non-NULL, store true if the list is complete.
2208 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2209 in the target cache. If user needs to visit every target list
2210 just once, it can memoize them.
2212 If SPECULATIVE is set, the list will not contain targets that
2213 are not speculatively taken.
2215 Returned vector is placed into cache. It is NOT caller's responsibility
2216 to free it. The vector can be freed on cgraph_remove_node call if
2217 the particular node is a virtual function present in the cache. */
2219 vec <cgraph_node *>
2220 possible_polymorphic_call_targets (tree otr_type,
2221 HOST_WIDE_INT otr_token,
2222 ipa_polymorphic_call_context context,
2223 bool *completep,
2224 void **cache_token,
2225 bool speculative)
2227 static struct cgraph_node_hook_list *node_removal_hook_holder;
2228 vec <cgraph_node *> nodes = vNULL;
2229 auto_vec <tree, 8> bases_to_consider;
2230 odr_type type, outer_type;
2231 polymorphic_call_target_d key;
2232 polymorphic_call_target_d **slot;
2233 unsigned int i;
2234 tree binfo, target;
2235 bool complete;
2236 bool can_refer = false;
2237 bool skipped = false;
2239 otr_type = TYPE_MAIN_VARIANT (otr_type);
2241 /* If ODR is not initialized or the context is invalid, return empty
2242 incomplete list. */
2243 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
2245 if (completep)
2246 *completep = context.invalid;
2247 if (cache_token)
2248 *cache_token = NULL;
2249 return nodes;
2252 /* Do not bother to compute speculative info when user do not asks for it. */
2253 if (!speculative || !context.speculative_outer_type)
2254 context.clear_speculation ();
2256 type = get_odr_type (otr_type, true);
2258 /* Recording type variants would waste results cache. */
2259 gcc_assert (!context.outer_type
2260 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2262 /* Look up the outer class type we want to walk.
2263 If we fail to do so, the context is invalid. */
2264 if ((context.outer_type || context.speculative_outer_type)
2265 && !context.restrict_to_inner_class (otr_type))
2267 if (completep)
2268 *completep = true;
2269 if (cache_token)
2270 *cache_token = NULL;
2271 return nodes;
2273 gcc_assert (!context.invalid);
2275 /* Check that restrict_to_inner_class kept the main variant. */
2276 gcc_assert (!context.outer_type
2277 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2279 /* We canonicalize our query, so we do not need extra hashtable entries. */
2281 /* Without outer type, we have no use for offset. Just do the
2282 basic search from inner type. */
2283 if (!context.outer_type)
2284 context.clear_outer_type (otr_type);
2285 /* We need to update our hierarchy if the type does not exist. */
2286 outer_type = get_odr_type (context.outer_type, true);
2287 /* If the type is complete, there are no derivations. */
2288 if (TYPE_FINAL_P (outer_type->type))
2289 context.maybe_derived_type = false;
2291 /* Initialize query cache. */
2292 if (!cached_polymorphic_call_targets)
2294 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
2295 polymorphic_call_target_hash
2296 = new polymorphic_call_target_hash_type (23);
2297 if (!node_removal_hook_holder)
2299 node_removal_hook_holder =
2300 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
2301 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
2302 NULL);
2306 if (in_lto_p)
2308 if (context.outer_type != otr_type)
2309 context.outer_type
2310 = get_odr_type (context.outer_type, true)->type;
2311 if (context.speculative_outer_type)
2312 context.speculative_outer_type
2313 = get_odr_type (context.speculative_outer_type, true)->type;
2316 /* Look up cached answer. */
2317 key.type = type;
2318 key.otr_token = otr_token;
2319 key.speculative = speculative;
2320 key.context = context;
2321 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
2322 if (cache_token)
2323 *cache_token = (void *)*slot;
2324 if (*slot)
2326 if (completep)
2327 *completep = (*slot)->complete;
2328 if ((*slot)->type_warning && final_warning_records)
2330 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
2331 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
2332 += final_warning_records->dyn_count;
2334 if (!speculative && (*slot)->decl_warning && final_warning_records)
2336 struct decl_warn_count *c =
2337 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
2338 c->count++;
2339 c->dyn_count += final_warning_records->dyn_count;
2341 return (*slot)->targets;
2344 complete = true;
2346 /* Do actual search. */
2347 timevar_push (TV_IPA_VIRTUAL_CALL);
2348 *slot = XCNEW (polymorphic_call_target_d);
2349 if (cache_token)
2350 *cache_token = (void *)*slot;
2351 (*slot)->type = type;
2352 (*slot)->otr_token = otr_token;
2353 (*slot)->context = context;
2354 (*slot)->speculative = speculative;
2356 hash_set<tree> inserted;
2357 hash_set<tree> matched_vtables;
2359 /* First insert targets we speculatively identified as likely. */
2360 if (context.speculative_outer_type)
2362 odr_type speculative_outer_type;
2363 bool speculation_complete = true;
2365 /* First insert target from type itself and check if it may have
2366 derived types. */
2367 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
2368 if (TYPE_FINAL_P (speculative_outer_type->type))
2369 context.speculative_maybe_derived_type = false;
2370 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
2371 context.speculative_offset, otr_type);
2372 if (binfo)
2373 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2374 &can_refer);
2375 else
2376 target = NULL;
2378 /* In the case we get complete method, we don't need
2379 to walk derivations. */
2380 if (target && DECL_FINAL_P (target))
2381 context.speculative_maybe_derived_type = false;
2382 if (type_possibly_instantiated_p (speculative_outer_type->type))
2383 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
2384 if (binfo)
2385 matched_vtables.add (BINFO_VTABLE (binfo));
2388 /* Next walk recursively all derived types. */
2389 if (context.speculative_maybe_derived_type)
2390 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
2391 possible_polymorphic_call_targets_1 (nodes, &inserted,
2392 &matched_vtables,
2393 otr_type,
2394 speculative_outer_type->derived_types[i],
2395 otr_token, speculative_outer_type->type,
2396 context.speculative_offset,
2397 &speculation_complete,
2398 bases_to_consider,
2399 false);
2402 if (!speculative || !nodes.length ())
2404 /* First see virtual method of type itself. */
2405 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2406 context.offset, otr_type);
2407 if (binfo)
2408 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2409 &can_refer);
2410 else
2412 gcc_assert (odr_violation_reported);
2413 target = NULL;
2416 /* Destructors are never called through construction virtual tables,
2417 because the type is always known. */
2418 if (target && DECL_CXX_DESTRUCTOR_P (target))
2419 context.maybe_in_construction = false;
2421 if (target)
2423 /* In the case we get complete method, we don't need
2424 to walk derivations. */
2425 if (DECL_FINAL_P (target))
2426 context.maybe_derived_type = false;
2429 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2430 if (type_possibly_instantiated_p (outer_type->type))
2431 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2432 else
2433 skipped = true;
2435 if (binfo)
2436 matched_vtables.add (BINFO_VTABLE (binfo));
2438 /* Next walk recursively all derived types. */
2439 if (context.maybe_derived_type)
2441 for (i = 0; i < outer_type->derived_types.length(); i++)
2442 possible_polymorphic_call_targets_1 (nodes, &inserted,
2443 &matched_vtables,
2444 otr_type,
2445 outer_type->derived_types[i],
2446 otr_token, outer_type->type,
2447 context.offset, &complete,
2448 bases_to_consider,
2449 context.maybe_in_construction);
2451 if (!outer_type->all_derivations_known)
2453 if (!speculative && final_warning_records)
2455 if (complete
2456 && nodes.length () == 1
2457 && warn_suggest_final_types
2458 && !outer_type->derived_types.length ())
2460 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
2461 final_warning_records->type_warnings.safe_grow_cleared
2462 (odr_types.length ());
2463 final_warning_records->type_warnings[outer_type->id].count++;
2464 final_warning_records->type_warnings[outer_type->id].dyn_count
2465 += final_warning_records->dyn_count;
2466 final_warning_records->type_warnings[outer_type->id].type
2467 = outer_type->type;
2468 (*slot)->type_warning = outer_type->id + 1;
2470 if (complete
2471 && warn_suggest_final_methods
2472 && nodes.length () == 1
2473 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
2474 outer_type->type))
2476 bool existed;
2477 struct decl_warn_count &c =
2478 final_warning_records->decl_warnings.get_or_insert
2479 (nodes[0]->decl, &existed);
2481 if (existed)
2483 c.count++;
2484 c.dyn_count += final_warning_records->dyn_count;
2486 else
2488 c.count = 1;
2489 c.dyn_count = final_warning_records->dyn_count;
2490 c.decl = nodes[0]->decl;
2492 (*slot)->decl_warning = nodes[0]->decl;
2495 complete = false;
2499 if (!speculative)
2501 /* Destructors are never called through construction virtual tables,
2502 because the type is always known. One of entries may be
2503 cxa_pure_virtual so look to at least two of them. */
2504 if (context.maybe_in_construction)
2505 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2506 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2507 context.maybe_in_construction = false;
2508 if (context.maybe_in_construction)
2510 if (type != outer_type
2511 && (!skipped
2512 || (context.maybe_derived_type
2513 && !type_all_derivations_known_p (outer_type->type))))
2514 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2515 context.offset, nodes, &inserted,
2516 &matched_vtables, &complete);
2517 if (skipped)
2518 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2519 for (i = 0; i < bases_to_consider.length(); i++)
2520 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
2525 (*slot)->targets = nodes;
2526 (*slot)->complete = complete;
2527 if (completep)
2528 *completep = complete;
2530 timevar_pop (TV_IPA_VIRTUAL_CALL);
2531 return nodes;
2534 bool
2535 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
2536 vec<const decl_warn_count*> *vec)
2538 vec->safe_push (&value);
2539 return true;
2542 /* Dump target list TARGETS into FILE. */
2544 static void
2545 dump_targets (FILE *f, vec <cgraph_node *> targets)
2547 unsigned int i;
2549 for (i = 0; i < targets.length (); i++)
2551 char *name = NULL;
2552 if (in_lto_p)
2553 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2554 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2555 if (in_lto_p)
2556 free (name);
2557 if (!targets[i]->definition)
2558 fprintf (f, " (no definition%s)",
2559 DECL_DECLARED_INLINE_P (targets[i]->decl)
2560 ? " inline" : "");
2562 fprintf (f, "\n");
2565 /* Dump all possible targets of a polymorphic call. */
2567 void
2568 dump_possible_polymorphic_call_targets (FILE *f,
2569 tree otr_type,
2570 HOST_WIDE_INT otr_token,
2571 const ipa_polymorphic_call_context &ctx)
2573 vec <cgraph_node *> targets;
2574 bool final;
2575 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
2576 unsigned int len;
2578 if (!type)
2579 return;
2580 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2581 ctx,
2582 &final, NULL, false);
2583 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
2584 print_generic_expr (f, type->type, TDF_SLIM);
2585 fprintf (f, " token %i\n", (int)otr_token);
2587 ctx.dump (f);
2589 fprintf (f, " %s%s%s%s\n ",
2590 final ? "This is a complete list." :
2591 "This is partial list; extra targets may be defined in other units.",
2592 ctx.maybe_in_construction ? " (base types included)" : "",
2593 ctx.maybe_derived_type ? " (derived types included)" : "",
2594 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
2595 len = targets.length ();
2596 dump_targets (f, targets);
2598 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2599 ctx,
2600 &final, NULL, true);
2601 gcc_assert (targets.length () <= len);
2602 if (targets.length () != len)
2604 fprintf (f, " Speculative targets:");
2605 dump_targets (f, targets);
2607 fprintf (f, "\n");
2611 /* Return true if N can be possibly target of a polymorphic call of
2612 OTR_TYPE/OTR_TOKEN. */
2614 bool
2615 possible_polymorphic_call_target_p (tree otr_type,
2616 HOST_WIDE_INT otr_token,
2617 const ipa_polymorphic_call_context &ctx,
2618 struct cgraph_node *n)
2620 vec <cgraph_node *> targets;
2621 unsigned int i;
2622 enum built_in_function fcode;
2623 bool final;
2625 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2626 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2627 == BUILT_IN_UNREACHABLE
2628 || fcode == BUILT_IN_TRAP))
2629 return true;
2631 if (!odr_hash)
2632 return true;
2633 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
2634 for (i = 0; i < targets.length (); i++)
2635 if (n->semantically_equivalent_p (targets[i]))
2636 return true;
2638 /* At a moment we allow middle end to dig out new external declarations
2639 as a targets of polymorphic calls. */
2640 if (!final && !n->definition)
2641 return true;
2642 return false;
2647 /* Return true if N can be possibly target of a polymorphic call of
2648 OBJ_TYPE_REF expression REF in STMT. */
2650 bool
2651 possible_polymorphic_call_target_p (tree ref,
2652 gimple stmt,
2653 struct cgraph_node *n)
2655 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
2656 tree call_fn = gimple_call_fn (stmt);
2658 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
2659 tree_to_uhwi
2660 (OBJ_TYPE_REF_TOKEN (call_fn)),
2661 context,
2666 /* After callgraph construction new external nodes may appear.
2667 Add them into the graph. */
2669 void
2670 update_type_inheritance_graph (void)
2672 struct cgraph_node *n;
2674 if (!odr_hash)
2675 return;
2676 free_polymorphic_call_targets_hash ();
2677 timevar_push (TV_IPA_INHERITANCE);
2678 /* We reconstruct the graph starting from types of all methods seen in the
2679 the unit. */
2680 FOR_EACH_FUNCTION (n)
2681 if (DECL_VIRTUAL_P (n->decl)
2682 && !n->definition
2683 && n->real_symbol_p ())
2684 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2685 true);
2686 timevar_pop (TV_IPA_INHERITANCE);
2690 /* Return true if N looks like likely target of a polymorphic call.
2691 Rule out cxa_pure_virtual, noreturns, function declared cold and
2692 other obvious cases. */
2694 bool
2695 likely_target_p (struct cgraph_node *n)
2697 int flags;
2698 /* cxa_pure_virtual and similar things are not likely. */
2699 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
2700 return false;
2701 flags = flags_from_decl_or_type (n->decl);
2702 if (flags & ECF_NORETURN)
2703 return false;
2704 if (lookup_attribute ("cold",
2705 DECL_ATTRIBUTES (n->decl)))
2706 return false;
2707 if (n->frequency < NODE_FREQUENCY_NORMAL)
2708 return false;
2709 /* If there are no live virtual tables referring the target,
2710 the only way the target can be called is an instance coming from other
2711 compilation unit; speculative devirtualization is built around an
2712 assumption that won't happen. */
2713 if (!referenced_from_vtable_p (n))
2714 return false;
2715 return true;
2718 /* Compare type warning records P1 and P2 and choose one with larger count;
2719 helper for qsort. */
2722 type_warning_cmp (const void *p1, const void *p2)
2724 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
2725 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
2727 if (t1->dyn_count < t2->dyn_count)
2728 return 1;
2729 if (t1->dyn_count > t2->dyn_count)
2730 return -1;
2731 return t2->count - t1->count;
2734 /* Compare decl warning records P1 and P2 and choose one with larger count;
2735 helper for qsort. */
2738 decl_warning_cmp (const void *p1, const void *p2)
2740 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
2741 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
2743 if (t1->dyn_count < t2->dyn_count)
2744 return 1;
2745 if (t1->dyn_count > t2->dyn_count)
2746 return -1;
2747 return t2->count - t1->count;
2751 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
2752 context CTX. */
2754 struct cgraph_node *
2755 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
2756 ipa_polymorphic_call_context ctx)
2758 vec <cgraph_node *>targets
2759 = possible_polymorphic_call_targets
2760 (otr_type, otr_token, ctx, NULL, NULL, true);
2761 unsigned int i;
2762 struct cgraph_node *likely_target = NULL;
2764 for (i = 0; i < targets.length (); i++)
2765 if (likely_target_p (targets[i]))
2767 if (likely_target)
2768 return NULL;
2769 likely_target = targets[i];
2771 if (!likely_target
2772 ||!likely_target->definition
2773 || DECL_EXTERNAL (likely_target->decl))
2774 return NULL;
2776 /* Don't use an implicitly-declared destructor (c++/58678). */
2777 struct cgraph_node *non_thunk_target
2778 = likely_target->function_symbol ();
2779 if (DECL_ARTIFICIAL (non_thunk_target->decl))
2780 return NULL;
2781 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
2782 && likely_target->can_be_discarded_p ())
2783 return NULL;
2784 return likely_target;
2787 /* The ipa-devirt pass.
2788 When polymorphic call has only one likely target in the unit,
2789 turn it into a speculative call. */
2791 static unsigned int
2792 ipa_devirt (void)
2794 struct cgraph_node *n;
2795 hash_set<void *> bad_call_targets;
2796 struct cgraph_edge *e;
2798 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2799 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
2800 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
2802 if (!odr_types_ptr)
2803 return 0;
2805 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
2806 This is implemented by setting up final_warning_records that are updated
2807 by get_polymorphic_call_targets.
2808 We need to clear cache in this case to trigger recomputation of all
2809 entries. */
2810 if (warn_suggest_final_methods || warn_suggest_final_types)
2812 final_warning_records = new (final_warning_record);
2813 final_warning_records->type_warnings = vNULL;
2814 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
2815 free_polymorphic_call_targets_hash ();
2818 FOR_EACH_DEFINED_FUNCTION (n)
2820 bool update = false;
2821 if (!opt_for_fn (n->decl, flag_devirtualize))
2822 continue;
2823 if (dump_file && n->indirect_calls)
2824 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
2825 n->name (), n->order);
2826 for (e = n->indirect_calls; e; e = e->next_callee)
2827 if (e->indirect_info->polymorphic)
2829 struct cgraph_node *likely_target = NULL;
2830 void *cache_token;
2831 bool final;
2833 if (final_warning_records)
2834 final_warning_records->dyn_count = e->count;
2836 vec <cgraph_node *>targets
2837 = possible_polymorphic_call_targets
2838 (e, &final, &cache_token, true);
2839 unsigned int i;
2841 /* Trigger warnings by calculating non-speculative targets. */
2842 if (warn_suggest_final_methods || warn_suggest_final_types)
2843 possible_polymorphic_call_targets (e);
2845 if (dump_file)
2846 dump_possible_polymorphic_call_targets
2847 (dump_file, e);
2849 npolymorphic++;
2851 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
2852 continue;
2854 if (!e->maybe_hot_p ())
2856 if (dump_file)
2857 fprintf (dump_file, "Call is cold\n\n");
2858 ncold++;
2859 continue;
2861 if (e->speculative)
2863 if (dump_file)
2864 fprintf (dump_file, "Call is already speculated\n\n");
2865 nspeculated++;
2867 /* When dumping see if we agree with speculation. */
2868 if (!dump_file)
2869 continue;
2871 if (bad_call_targets.contains (cache_token))
2873 if (dump_file)
2874 fprintf (dump_file, "Target list is known to be useless\n\n");
2875 nmultiple++;
2876 continue;
2878 for (i = 0; i < targets.length (); i++)
2879 if (likely_target_p (targets[i]))
2881 if (likely_target)
2883 likely_target = NULL;
2884 if (dump_file)
2885 fprintf (dump_file, "More than one likely target\n\n");
2886 nmultiple++;
2887 break;
2889 likely_target = targets[i];
2891 if (!likely_target)
2893 bad_call_targets.add (cache_token);
2894 continue;
2896 /* This is reached only when dumping; check if we agree or disagree
2897 with the speculation. */
2898 if (e->speculative)
2900 struct cgraph_edge *e2;
2901 struct ipa_ref *ref;
2902 e->speculative_call_info (e2, e, ref);
2903 if (e2->callee->ultimate_alias_target ()
2904 == likely_target->ultimate_alias_target ())
2906 fprintf (dump_file, "We agree with speculation\n\n");
2907 nok++;
2909 else
2911 fprintf (dump_file, "We disagree with speculation\n\n");
2912 nwrong++;
2914 continue;
2916 if (!likely_target->definition)
2918 if (dump_file)
2919 fprintf (dump_file, "Target is not a definition\n\n");
2920 nnotdefined++;
2921 continue;
2923 /* Do not introduce new references to external symbols. While we
2924 can handle these just well, it is common for programs to
2925 incorrectly with headers defining methods they are linked
2926 with. */
2927 if (DECL_EXTERNAL (likely_target->decl))
2929 if (dump_file)
2930 fprintf (dump_file, "Target is external\n\n");
2931 nexternal++;
2932 continue;
2934 /* Don't use an implicitly-declared destructor (c++/58678). */
2935 struct cgraph_node *non_thunk_target
2936 = likely_target->function_symbol ();
2937 if (DECL_ARTIFICIAL (non_thunk_target->decl))
2939 if (dump_file)
2940 fprintf (dump_file, "Target is artificial\n\n");
2941 nartificial++;
2942 continue;
2944 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
2945 && likely_target->can_be_discarded_p ())
2947 if (dump_file)
2948 fprintf (dump_file, "Target is overwritable\n\n");
2949 noverwritable++;
2950 continue;
2952 else if (dbg_cnt (devirt))
2954 if (dump_enabled_p ())
2956 location_t locus = gimple_location_safe (e->call_stmt);
2957 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2958 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2959 n->name (), n->order,
2960 likely_target->name (),
2961 likely_target->order);
2963 if (!likely_target->can_be_discarded_p ())
2965 cgraph_node *alias;
2966 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
2967 if (alias)
2968 likely_target = alias;
2970 nconverted++;
2971 update = true;
2972 e->make_speculative
2973 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2976 if (update)
2977 inline_update_overall_summary (n);
2979 if (warn_suggest_final_methods || warn_suggest_final_types)
2981 if (warn_suggest_final_types)
2983 final_warning_records->type_warnings.qsort (type_warning_cmp);
2984 for (unsigned int i = 0;
2985 i < final_warning_records->type_warnings.length (); i++)
2986 if (final_warning_records->type_warnings[i].count)
2988 tree type = final_warning_records->type_warnings[i].type;
2989 int count = final_warning_records->type_warnings[i].count;
2990 long long dyn_count
2991 = final_warning_records->type_warnings[i].dyn_count;
2993 if (!dyn_count)
2994 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
2995 OPT_Wsuggest_final_types, count,
2996 "Declaring type %qD final "
2997 "would enable devirtualization of %i call",
2998 "Declaring type %qD final "
2999 "would enable devirtualization of %i calls",
3000 type,
3001 count);
3002 else
3003 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3004 OPT_Wsuggest_final_types, count,
3005 "Declaring type %qD final "
3006 "would enable devirtualization of %i call "
3007 "executed %lli times",
3008 "Declaring type %qD final "
3009 "would enable devirtualization of %i calls "
3010 "executed %lli times",
3011 type,
3012 count,
3013 dyn_count);
3017 if (warn_suggest_final_methods)
3019 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3021 final_warning_records->decl_warnings.traverse
3022 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3023 decl_warnings_vec.qsort (decl_warning_cmp);
3024 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3026 tree decl = decl_warnings_vec[i]->decl;
3027 int count = decl_warnings_vec[i]->count;
3028 long long dyn_count = decl_warnings_vec[i]->dyn_count;
3030 if (!dyn_count)
3031 if (DECL_CXX_DESTRUCTOR_P (decl))
3032 warning_n (DECL_SOURCE_LOCATION (decl),
3033 OPT_Wsuggest_final_methods, count,
3034 "Declaring virtual destructor of %qD final "
3035 "would enable devirtualization of %i call",
3036 "Declaring virtual destructor of %qD final "
3037 "would enable devirtualization of %i calls",
3038 DECL_CONTEXT (decl), count);
3039 else
3040 warning_n (DECL_SOURCE_LOCATION (decl),
3041 OPT_Wsuggest_final_methods, count,
3042 "Declaring method %qD final "
3043 "would enable devirtualization of %i call",
3044 "Declaring method %qD final "
3045 "would enable devirtualization of %i calls",
3046 decl, count);
3047 else if (DECL_CXX_DESTRUCTOR_P (decl))
3048 warning_n (DECL_SOURCE_LOCATION (decl),
3049 OPT_Wsuggest_final_methods, count,
3050 "Declaring virtual destructor of %qD final "
3051 "would enable devirtualization of %i call "
3052 "executed %lli times",
3053 "Declaring virtual destructor of %qD final "
3054 "would enable devirtualization of %i calls "
3055 "executed %lli times",
3056 DECL_CONTEXT (decl), count, dyn_count);
3057 else
3058 warning_n (DECL_SOURCE_LOCATION (decl),
3059 OPT_Wsuggest_final_methods, count,
3060 "Declaring method %qD final "
3061 "would enable devirtualization of %i call "
3062 "executed %lli times",
3063 "Declaring method %qD final "
3064 "would enable devirtualization of %i calls "
3065 "executed %lli times",
3066 decl, count, dyn_count);
3070 delete (final_warning_records);
3071 final_warning_records = 0;
3074 if (dump_file)
3075 fprintf (dump_file,
3076 "%i polymorphic calls, %i devirtualized,"
3077 " %i speculatively devirtualized, %i cold\n"
3078 "%i have multiple targets, %i overwritable,"
3079 " %i already speculated (%i agree, %i disagree),"
3080 " %i external, %i not defined, %i artificial\n",
3081 npolymorphic, ndevirtualized, nconverted, ncold,
3082 nmultiple, noverwritable, nspeculated, nok, nwrong,
3083 nexternal, nnotdefined, nartificial);
3084 return ndevirtualized ? TODO_remove_functions : 0;
3087 namespace {
3089 const pass_data pass_data_ipa_devirt =
3091 IPA_PASS, /* type */
3092 "devirt", /* name */
3093 OPTGROUP_NONE, /* optinfo_flags */
3094 TV_IPA_DEVIRT, /* tv_id */
3095 0, /* properties_required */
3096 0, /* properties_provided */
3097 0, /* properties_destroyed */
3098 0, /* todo_flags_start */
3099 ( TODO_dump_symtab ), /* todo_flags_finish */
3102 class pass_ipa_devirt : public ipa_opt_pass_d
3104 public:
3105 pass_ipa_devirt (gcc::context *ctxt)
3106 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3107 NULL, /* generate_summary */
3108 NULL, /* write_summary */
3109 NULL, /* read_summary */
3110 NULL, /* write_optimization_summary */
3111 NULL, /* read_optimization_summary */
3112 NULL, /* stmt_fixup */
3113 0, /* function_transform_todo_flags_start */
3114 NULL, /* function_transform */
3115 NULL) /* variable_transform */
3118 /* opt_pass methods: */
3119 virtual bool gate (function *)
3121 /* In LTO, always run the IPA passes and decide on function basis if the
3122 pass is enabled. */
3123 if (in_lto_p)
3124 return true;
3125 return (flag_devirtualize
3126 && (flag_devirtualize_speculatively
3127 || (warn_suggest_final_methods
3128 || warn_suggest_final_types))
3129 && optimize);
3132 virtual unsigned int execute (function *) { return ipa_devirt (); }
3134 }; // class pass_ipa_devirt
3136 } // anon namespace
3138 ipa_opt_pass_d *
3139 make_pass_ipa_devirt (gcc::context *ctxt)
3141 return new pass_ipa_devirt (ctxt);
3144 #include "gt-ipa-devirt.h"