always define DYNAMIC_CHAIN_ADDRESS
[official-gcc.git] / gcc / ipa-devirt.c
bloba7a8e8e6d22f2bf67f8ed2a0f54f66196f636719
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2015 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
82 What we do here:
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "backend.h"
112 #include "tree.h"
113 #include "gimple.h"
114 #include "rtl.h"
115 #include "alias.h"
116 #include "fold-const.h"
117 #include "print-tree.h"
118 #include "calls.h"
119 #include "cgraph.h"
120 #include "flags.h"
121 #include "insn-config.h"
122 #include "expmed.h"
123 #include "dojump.h"
124 #include "explow.h"
125 #include "emit-rtl.h"
126 #include "varasm.h"
127 #include "stmt.h"
128 #include "expr.h"
129 #include "tree-pass.h"
130 #include "target.h"
131 #include "ipa-utils.h"
132 #include "internal-fn.h"
133 #include "gimple-fold.h"
134 #include "alloc-pool.h"
135 #include "symbol-summary.h"
136 #include "ipa-prop.h"
137 #include "ipa-inline.h"
138 #include "diagnostic.h"
139 #include "tree-dfa.h"
140 #include "demangle.h"
141 #include "dbgcnt.h"
142 #include "gimple-pretty-print.h"
143 #include "stor-layout.h"
144 #include "intl.h"
145 #include "lto-streamer.h"
147 /* Hash based set of pairs of types. */
148 struct type_pair
150 tree first;
151 tree second;
154 template <>
155 struct default_hash_traits <type_pair> : typed_noop_remove <type_pair>
157 typedef type_pair value_type;
158 typedef type_pair compare_type;
159 static hashval_t
160 hash (type_pair p)
162 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
164 static bool
165 is_empty (type_pair p)
167 return p.first == NULL;
169 static bool
170 is_deleted (type_pair p ATTRIBUTE_UNUSED)
172 return false;
174 static bool
175 equal (const type_pair &a, const type_pair &b)
177 return a.first==b.first && a.second == b.second;
179 static void
180 mark_empty (type_pair &e)
182 e.first = NULL;
186 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
187 hash_set<type_pair> *,
188 location_t, location_t);
190 static bool odr_violation_reported = false;
193 /* Pointer set of all call targets appearing in the cache. */
194 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
196 /* The node of type inheritance graph. For each type unique in
197 One Definition Rule (ODR) sense, we produce one node linking all
198 main variants of types equivalent to it, bases and derived types. */
200 struct GTY(()) odr_type_d
202 /* leader type. */
203 tree type;
204 /* All bases; built only for main variants of types. */
205 vec<odr_type> GTY((skip)) bases;
206 /* All derived types with virtual methods seen in unit;
207 built only for main variants of types. */
208 vec<odr_type> GTY((skip)) derived_types;
210 /* All equivalent types, if more than one. */
211 vec<tree, va_gc> *types;
212 /* Set of all equivalent types, if NON-NULL. */
213 hash_set<tree> * GTY((skip)) types_set;
215 /* Unique ID indexing the type in odr_types array. */
216 int id;
217 /* Is it in anonymous namespace? */
218 bool anonymous_namespace;
219 /* Do we know about all derivations of given type? */
220 bool all_derivations_known;
221 /* Did we report ODR violation here? */
222 bool odr_violated;
223 /* Set when virtual table without RTTI previaled table with. */
224 bool rtti_broken;
227 /* Return true if T is a type with linkage defined. */
229 bool
230 type_with_linkage_p (const_tree t)
232 /* Builtin types do not define linkage, their TYPE_CONTEXT is NULL. */
233 if (!TYPE_CONTEXT (t)
234 || !TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL
235 || !TYPE_STUB_DECL (t))
236 return false;
238 /* In LTO do not get confused by non-C++ produced types or types built
239 with -fno-lto-odr-type-merigng. */
240 if (in_lto_p)
242 /* To support -fno-lto-odr-type-merigng recognize types with vtables
243 to have linkage. */
244 if (RECORD_OR_UNION_TYPE_P (t)
245 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
246 return true;
247 /* Do not accept any other types - we do not know if they were produced
248 by C++ FE. */
249 if (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)))
250 return false;
253 return (RECORD_OR_UNION_TYPE_P (t)
254 || TREE_CODE (t) == ENUMERAL_TYPE);
257 /* Return true if T is in anonymous namespace.
258 This works only on those C++ types with linkage defined. */
260 bool
261 type_in_anonymous_namespace_p (const_tree t)
263 gcc_assert (type_with_linkage_p (t));
265 /* Keep -fno-lto-odr-type-merging working by recognizing classes with vtables
266 properly into anonymous namespaces. */
267 if (RECORD_OR_UNION_TYPE_P (t)
268 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
269 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
271 if (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)))
273 /* C++ FE uses magic <anon> as assembler names of anonymous types.
274 verify that this match with type_in_anonymous_namespace_p. */
275 #ifdef ENABLE_CHECKING
276 if (in_lto_p)
277 gcc_assert (!strcmp ("<anon>",
278 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t)))));
279 #endif
280 return true;
282 return false;
285 /* Return true of T is type with One Definition Rule info attached.
286 It means that either it is anonymous type or it has assembler name
287 set. */
289 bool
290 odr_type_p (const_tree t)
292 /* We do not have this information when not in LTO, but we do not need
293 to care, since it is used only for type merging. */
294 gcc_checking_assert (in_lto_p || flag_lto);
296 /* To support -fno-lto-odr-type-merging consider types with vtables ODR. */
297 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
298 return true;
300 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
301 && (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t))))
303 #ifdef ENABLE_CHECKING
304 /* C++ FE uses magic <anon> as assembler names of anonymous types.
305 verify that this match with type_in_anonymous_namespace_p. */
306 gcc_assert (!type_with_linkage_p (t)
307 || strcmp ("<anon>",
308 IDENTIFIER_POINTER
309 (DECL_ASSEMBLER_NAME (TYPE_NAME (t))))
310 || type_in_anonymous_namespace_p (t));
311 #endif
312 return true;
314 return false;
317 /* Return TRUE if all derived types of T are known and thus
318 we may consider the walk of derived type complete.
320 This is typically true only for final anonymous namespace types and types
321 defined within functions (that may be COMDAT and thus shared across units,
322 but with the same set of derived types). */
324 bool
325 type_all_derivations_known_p (const_tree t)
327 if (TYPE_FINAL_P (t))
328 return true;
329 if (flag_ltrans)
330 return false;
331 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
332 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
333 return true;
334 if (type_in_anonymous_namespace_p (t))
335 return true;
336 return (decl_function_context (TYPE_NAME (t)) != NULL);
339 /* Return TRUE if type's constructors are all visible. */
341 static bool
342 type_all_ctors_visible_p (tree t)
344 return !flag_ltrans
345 && symtab->state >= CONSTRUCTION
346 /* We can not always use type_all_derivations_known_p.
347 For function local types we must assume case where
348 the function is COMDAT and shared in between units.
350 TODO: These cases are quite easy to get, but we need
351 to keep track of C++ privatizing via -Wno-weak
352 as well as the IPA privatizing. */
353 && type_in_anonymous_namespace_p (t);
356 /* Return TRUE if type may have instance. */
358 static bool
359 type_possibly_instantiated_p (tree t)
361 tree vtable;
362 varpool_node *vnode;
364 /* TODO: Add abstract types here. */
365 if (!type_all_ctors_visible_p (t))
366 return true;
368 vtable = BINFO_VTABLE (TYPE_BINFO (t));
369 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
370 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
371 vnode = varpool_node::get (vtable);
372 return vnode && vnode->definition;
375 /* Hash used to unify ODR types based on their mangled name and for anonymous
376 namespace types. */
378 struct odr_name_hasher : pointer_hash <odr_type_d>
380 typedef union tree_node *compare_type;
381 static inline hashval_t hash (const odr_type_d *);
382 static inline bool equal (const odr_type_d *, const tree_node *);
383 static inline void remove (odr_type_d *);
386 /* Has used to unify ODR types based on their associated virtual table.
387 This hash is needed to keep -fno-lto-odr-type-merging to work and contains
388 only polymorphic types. Types with mangled names are inserted to both. */
390 struct odr_vtable_hasher:odr_name_hasher
392 static inline hashval_t hash (const odr_type_d *);
393 static inline bool equal (const odr_type_d *, const tree_node *);
396 /* Return type that was declared with T's name so that T is an
397 qualified variant of it. */
399 static inline tree
400 main_odr_variant (const_tree t)
402 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
403 return TREE_TYPE (TYPE_NAME (t));
404 /* Unnamed types and non-C++ produced types can be compared by variants. */
405 else
406 return TYPE_MAIN_VARIANT (t);
409 static bool
410 can_be_name_hashed_p (tree t)
412 return (!in_lto_p || odr_type_p (t));
415 /* Hash type by its ODR name. */
417 static hashval_t
418 hash_odr_name (const_tree t)
420 gcc_checking_assert (main_odr_variant (t) == t);
422 /* If not in LTO, all main variants are unique, so we can do
423 pointer hash. */
424 if (!in_lto_p)
425 return htab_hash_pointer (t);
427 /* Anonymous types are unique. */
428 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
429 return htab_hash_pointer (t);
431 gcc_checking_assert (TYPE_NAME (t)
432 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
433 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
436 /* Return the computed hashcode for ODR_TYPE. */
438 inline hashval_t
439 odr_name_hasher::hash (const odr_type_d *odr_type)
441 return hash_odr_name (odr_type->type);
444 static bool
445 can_be_vtable_hashed_p (tree t)
447 /* vtable hashing can distinguish only main variants. */
448 if (TYPE_MAIN_VARIANT (t) != t)
449 return false;
450 /* Anonymous namespace types are always handled by name hash. */
451 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
452 return false;
453 return (TREE_CODE (t) == RECORD_TYPE
454 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
457 /* Hash type by assembler name of its vtable. */
459 static hashval_t
460 hash_odr_vtable (const_tree t)
462 tree v = BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (t)));
463 inchash::hash hstate;
465 gcc_checking_assert (in_lto_p);
466 gcc_checking_assert (!type_in_anonymous_namespace_p (t));
467 gcc_checking_assert (TREE_CODE (t) == RECORD_TYPE
468 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
469 gcc_checking_assert (main_odr_variant (t) == t);
471 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
473 add_expr (TREE_OPERAND (v, 1), hstate);
474 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
477 hstate.add_wide_int (IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (v)));
478 return hstate.end ();
481 /* Return the computed hashcode for ODR_TYPE. */
483 inline hashval_t
484 odr_vtable_hasher::hash (const odr_type_d *odr_type)
486 return hash_odr_vtable (odr_type->type);
489 /* For languages with One Definition Rule, work out if
490 types are the same based on their name.
492 This is non-trivial for LTO where minor differences in
493 the type representation may have prevented type merging
494 to merge two copies of otherwise equivalent type.
496 Until we start streaming mangled type names, this function works
497 only for polymorphic types.
499 When STRICT is true, we compare types by their names for purposes of
500 ODR violation warnings. When strict is false, we consider variants
501 equivalent, becuase it is all that matters for devirtualization machinery.
504 bool
505 types_same_for_odr (const_tree type1, const_tree type2, bool strict)
507 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
509 type1 = main_odr_variant (type1);
510 type2 = main_odr_variant (type2);
511 if (!strict)
513 type1 = TYPE_MAIN_VARIANT (type1);
514 type2 = TYPE_MAIN_VARIANT (type2);
517 if (type1 == type2)
518 return true;
520 if (!in_lto_p)
521 return false;
523 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
524 on the corresponding TYPE_STUB_DECL. */
525 if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
526 || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
527 return false;
530 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
532 Ideally we should never need types without ODR names here. It can however
533 happen in two cases:
535 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
536 Here testing for equivalence is safe, since their MAIN_VARIANTs are
537 unique.
538 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
539 establish precise ODR equivalency, but for correctness we care only
540 about equivalency on complete polymorphic types. For these we can
541 compare assembler names of their virtual tables. */
542 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
543 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
545 /* See if types are obviously different (i.e. different codes
546 or polymorphic wrt non-polymorphic). This is not strictly correct
547 for ODR violating programs, but we can't do better without streaming
548 ODR names. */
549 if (TREE_CODE (type1) != TREE_CODE (type2))
550 return false;
551 if (TREE_CODE (type1) == RECORD_TYPE
552 && (TYPE_BINFO (type1) == NULL_TREE)
553 != (TYPE_BINFO (type2) == NULL_TREE))
554 return false;
555 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
556 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
557 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
558 return false;
560 /* At the moment we have no way to establish ODR equivalence at LTO
561 other than comparing virtual table pointers of polymorphic types.
562 Eventually we should start saving mangled names in TYPE_NAME.
563 Then this condition will become non-trivial. */
565 if (TREE_CODE (type1) == RECORD_TYPE
566 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
567 && BINFO_VTABLE (TYPE_BINFO (type1))
568 && BINFO_VTABLE (TYPE_BINFO (type2)))
570 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
571 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
572 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
573 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
574 return (operand_equal_p (TREE_OPERAND (v1, 1),
575 TREE_OPERAND (v2, 1), 0)
576 && DECL_ASSEMBLER_NAME
577 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
578 == DECL_ASSEMBLER_NAME
579 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
581 gcc_unreachable ();
583 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
584 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
587 /* Return true if we can decide on ODR equivalency.
589 In non-LTO it is always decide, in LTO however it depends in the type has
590 ODR info attached.
592 When STRICT is false, compare main variants. */
594 bool
595 types_odr_comparable (tree t1, tree t2, bool strict)
597 return (!in_lto_p
598 || (strict ? (main_odr_variant (t1) == main_odr_variant (t2)
599 && main_odr_variant (t1))
600 : TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
601 || (odr_type_p (t1) && odr_type_p (t2))
602 || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
603 && TYPE_BINFO (t1) && TYPE_BINFO (t2)
604 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
605 && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
608 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
609 known, be conservative and return false. */
611 bool
612 types_must_be_same_for_odr (tree t1, tree t2)
614 if (types_odr_comparable (t1, t2))
615 return types_same_for_odr (t1, t2);
616 else
617 return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
620 /* If T is compound type, return type it is based on. */
622 static tree
623 compound_type_base (const_tree t)
625 if (TREE_CODE (t) == ARRAY_TYPE
626 || POINTER_TYPE_P (t)
627 || TREE_CODE (t) == COMPLEX_TYPE
628 || VECTOR_TYPE_P (t))
629 return TREE_TYPE (t);
630 if (TREE_CODE (t) == METHOD_TYPE)
631 return TYPE_METHOD_BASETYPE (t);
632 if (TREE_CODE (t) == OFFSET_TYPE)
633 return TYPE_OFFSET_BASETYPE (t);
634 return NULL_TREE;
637 /* Return true if T is either ODR type or compound type based from it.
638 If the function return true, we know that T is a type originating from C++
639 source even at link-time. */
641 bool
642 odr_or_derived_type_p (const_tree t)
646 if (odr_type_p (t))
647 return true;
648 /* Function type is a tricky one. Basically we can consider it
649 ODR derived if return type or any of the parameters is.
650 We need to check all parameters because LTO streaming merges
651 common types (such as void) and they are not considered ODR then. */
652 if (TREE_CODE (t) == FUNCTION_TYPE)
654 if (TYPE_METHOD_BASETYPE (t))
655 t = TYPE_METHOD_BASETYPE (t);
656 else
658 if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
659 return true;
660 for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
661 if (odr_or_derived_type_p (TREE_VALUE (t)))
662 return true;
663 return false;
666 else
667 t = compound_type_base (t);
669 while (t);
670 return t;
673 /* Compare types T1 and T2 and return true if they are
674 equivalent. */
676 inline bool
677 odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
679 tree t1 = o1->type;
681 gcc_checking_assert (main_odr_variant (t2) == t2);
682 gcc_checking_assert (main_odr_variant (t1) == t1);
683 if (t1 == t2)
684 return true;
685 if (!in_lto_p)
686 return false;
687 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
688 on the corresponding TYPE_STUB_DECL. */
689 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
690 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
691 return false;
692 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
693 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
694 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
695 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
698 /* Compare types T1 and T2 and return true if they are
699 equivalent. */
701 inline bool
702 odr_vtable_hasher::equal (const odr_type_d *o1, const tree_node *t2)
704 tree t1 = o1->type;
706 gcc_checking_assert (main_odr_variant (t2) == t2);
707 gcc_checking_assert (main_odr_variant (t1) == t1);
708 gcc_checking_assert (in_lto_p);
709 t1 = TYPE_MAIN_VARIANT (t1);
710 t2 = TYPE_MAIN_VARIANT (t2);
711 if (t1 == t2)
712 return true;
713 tree v1 = BINFO_VTABLE (TYPE_BINFO (t1));
714 tree v2 = BINFO_VTABLE (TYPE_BINFO (t2));
715 return (operand_equal_p (TREE_OPERAND (v1, 1),
716 TREE_OPERAND (v2, 1), 0)
717 && DECL_ASSEMBLER_NAME
718 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
719 == DECL_ASSEMBLER_NAME
720 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
723 /* Free ODR type V. */
725 inline void
726 odr_name_hasher::remove (odr_type_d *v)
728 v->bases.release ();
729 v->derived_types.release ();
730 if (v->types_set)
731 delete v->types_set;
732 ggc_free (v);
735 /* ODR type hash used to look up ODR type based on tree type node. */
737 typedef hash_table<odr_name_hasher> odr_hash_type;
738 static odr_hash_type *odr_hash;
739 typedef hash_table<odr_vtable_hasher> odr_vtable_hash_type;
740 static odr_vtable_hash_type *odr_vtable_hash;
742 /* ODR types are also stored into ODR_TYPE vector to allow consistent
743 walking. Bases appear before derived types. Vector is garbage collected
744 so we won't end up visiting empty types. */
746 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
747 #define odr_types (*odr_types_ptr)
749 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
750 void
751 set_type_binfo (tree type, tree binfo)
753 for (; type; type = TYPE_NEXT_VARIANT (type))
754 if (COMPLETE_TYPE_P (type))
755 TYPE_BINFO (type) = binfo;
756 else
757 gcc_assert (!TYPE_BINFO (type));
760 /* Compare T2 and T2 based on name or structure. */
762 static bool
763 odr_subtypes_equivalent_p (tree t1, tree t2,
764 hash_set<type_pair> *visited,
765 location_t loc1, location_t loc2)
768 /* This can happen in incomplete types that should be handled earlier. */
769 gcc_assert (t1 && t2);
771 t1 = main_odr_variant (t1);
772 t2 = main_odr_variant (t2);
773 if (t1 == t2)
774 return true;
776 /* Anonymous namespace types must match exactly. */
777 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
778 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
779 return false;
781 /* For ODR types be sure to compare their names.
782 To support -wno-odr-type-merging we allow one type to be non-ODR
783 and other ODR even though it is a violation. */
784 if (types_odr_comparable (t1, t2, true))
786 if (!types_same_for_odr (t1, t2, true))
787 return false;
788 /* Limit recursion: If subtypes are ODR types and we know
789 that they are same, be happy. */
790 if (!odr_type_p (t1) || !get_odr_type (t1, true)->odr_violated)
791 return true;
794 /* Component types, builtins and possibly violating ODR types
795 have to be compared structurally. */
796 if (TREE_CODE (t1) != TREE_CODE (t2))
797 return false;
798 if (AGGREGATE_TYPE_P (t1)
799 && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
800 return false;
802 type_pair pair={t1,t2};
803 if (TYPE_UID (t1) > TYPE_UID (t2))
805 pair.first = t2;
806 pair.second = t1;
808 if (visited->add (pair))
809 return true;
810 return odr_types_equivalent_p (t1, t2, false, NULL, visited, loc1, loc2);
813 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
814 violation warnings. */
816 void
817 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
819 int n1, n2;
821 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
823 odr_violation_reported = true;
824 if (DECL_VIRTUAL_P (prevailing->decl))
826 varpool_node *tmp = prevailing;
827 prevailing = vtable;
828 vtable = tmp;
830 if (warning_at (DECL_SOURCE_LOCATION
831 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
832 OPT_Wodr,
833 "virtual table of type %qD violates one definition rule",
834 DECL_CONTEXT (vtable->decl)))
835 inform (DECL_SOURCE_LOCATION (prevailing->decl),
836 "variable of same assembler name as the virtual table is "
837 "defined in another translation unit");
838 return;
840 if (!prevailing->definition || !vtable->definition)
841 return;
843 /* If we do not stream ODR type info, do not bother to do useful compare. */
844 if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
845 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
846 return;
848 odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
850 if (class_type->odr_violated)
851 return;
853 for (n1 = 0, n2 = 0; true; n1++, n2++)
855 struct ipa_ref *ref1, *ref2;
856 bool end1, end2;
858 end1 = !prevailing->iterate_reference (n1, ref1);
859 end2 = !vtable->iterate_reference (n2, ref2);
861 /* !DECL_VIRTUAL_P means RTTI entry;
862 We warn when RTTI is lost because non-RTTI previals; we silently
863 accept the other case. */
864 while (!end2
865 && (end1
866 || (DECL_ASSEMBLER_NAME (ref1->referred->decl)
867 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
868 && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
869 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
871 if (!class_type->rtti_broken
872 && warning_at (DECL_SOURCE_LOCATION
873 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
874 OPT_Wodr,
875 "virtual table of type %qD contains RTTI "
876 "information",
877 DECL_CONTEXT (vtable->decl)))
879 inform (DECL_SOURCE_LOCATION
880 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
881 "but is prevailed by one without from other translation "
882 "unit");
883 inform (DECL_SOURCE_LOCATION
884 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
885 "RTTI will not work on this type");
886 class_type->rtti_broken = true;
888 n2++;
889 end2 = !vtable->iterate_reference (n2, ref2);
891 while (!end1
892 && (end2
893 || (DECL_ASSEMBLER_NAME (ref2->referred->decl)
894 != DECL_ASSEMBLER_NAME (ref1->referred->decl)
895 && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
896 && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
898 n1++;
899 end1 = !prevailing->iterate_reference (n1, ref1);
902 /* Finished? */
903 if (end1 && end2)
905 /* Extra paranoia; compare the sizes. We do not have information
906 about virtual inheritance offsets, so just be sure that these
907 match.
908 Do this as very last check so the not very informative error
909 is not output too often. */
910 if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
912 class_type->odr_violated = true;
913 if (warning_at (DECL_SOURCE_LOCATION
914 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
915 OPT_Wodr,
916 "virtual table of type %qD violates "
917 "one definition rule ",
918 DECL_CONTEXT (vtable->decl)))
920 inform (DECL_SOURCE_LOCATION
921 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
922 "the conflicting type defined in another translation "
923 "unit has virtual table of different size");
926 return;
929 if (!end1 && !end2)
931 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
932 == DECL_ASSEMBLER_NAME (ref2->referred->decl))
933 continue;
935 class_type->odr_violated = true;
937 /* If the loops above stopped on non-virtual pointer, we have
938 mismatch in RTTI information mangling. */
939 if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
940 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
942 if (warning_at (DECL_SOURCE_LOCATION
943 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
944 OPT_Wodr,
945 "virtual table of type %qD violates "
946 "one definition rule ",
947 DECL_CONTEXT (vtable->decl)))
949 inform (DECL_SOURCE_LOCATION
950 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
951 "the conflicting type defined in another translation "
952 "unit with different RTTI information");
954 return;
956 /* At this point both REF1 and REF2 points either to virtual table
957 or virtual method. If one points to virtual table and other to
958 method we can complain the same way as if one table was shorter
959 than other pointing out the extra method. */
960 if (TREE_CODE (ref1->referred->decl)
961 != TREE_CODE (ref2->referred->decl))
963 if (TREE_CODE (ref1->referred->decl) == VAR_DECL)
964 end1 = true;
965 else if (TREE_CODE (ref2->referred->decl) == VAR_DECL)
966 end2 = true;
970 class_type->odr_violated = true;
972 /* Complain about size mismatch. Either we have too many virutal
973 functions or too many virtual table pointers. */
974 if (end1 || end2)
976 if (end1)
978 varpool_node *tmp = prevailing;
979 prevailing = vtable;
980 vtable = tmp;
981 ref1 = ref2;
983 if (warning_at (DECL_SOURCE_LOCATION
984 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
985 OPT_Wodr,
986 "virtual table of type %qD violates "
987 "one definition rule",
988 DECL_CONTEXT (vtable->decl)))
990 if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
992 inform (DECL_SOURCE_LOCATION
993 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
994 "the conflicting type defined in another translation "
995 "unit");
996 inform (DECL_SOURCE_LOCATION
997 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
998 "contains additional virtual method %qD",
999 ref1->referred->decl);
1001 else
1003 inform (DECL_SOURCE_LOCATION
1004 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
1005 "the conflicting type defined in another translation "
1006 "unit has virtual table with more entries");
1009 return;
1012 /* And in the last case we have either mistmatch in between two virtual
1013 methods or two virtual table pointers. */
1014 if (warning_at (DECL_SOURCE_LOCATION
1015 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
1016 "virtual table of type %qD violates "
1017 "one definition rule ",
1018 DECL_CONTEXT (vtable->decl)))
1020 if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
1022 inform (DECL_SOURCE_LOCATION
1023 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
1024 "the conflicting type defined in another translation "
1025 "unit");
1026 gcc_assert (TREE_CODE (ref2->referred->decl)
1027 == FUNCTION_DECL);
1028 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
1029 "virtual method %qD", ref1->referred->decl);
1030 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
1031 "ought to match virtual method %qD but does not",
1032 ref2->referred->decl);
1034 else
1035 inform (DECL_SOURCE_LOCATION
1036 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
1037 "the conflicting type defined in another translation "
1038 "unit has virtual table with different contents");
1039 return;
1044 /* Output ODR violation warning about T1 and T2 with REASON.
1045 Display location of ST1 and ST2 if REASON speaks about field or
1046 method of the type.
1047 If WARN is false, do nothing. Set WARNED if warning was indeed
1048 output. */
1050 void
1051 warn_odr (tree t1, tree t2, tree st1, tree st2,
1052 bool warn, bool *warned, const char *reason)
1054 tree decl2 = TYPE_NAME (t2);
1055 if (warned)
1056 *warned = false;
1058 if (!warn || !TYPE_NAME(t1))
1059 return;
1061 /* ODR warnings are output druing LTO streaming; we must apply location
1062 cache for potential warnings to be output correctly. */
1063 if (lto_location_cache::current_cache)
1064 lto_location_cache::current_cache->apply_location_cache ();
1066 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
1067 "type %qT violates the C++ One Definition Rule",
1068 t1))
1069 return;
1070 if (!st1 && !st2)
1072 /* For FIELD_DECL support also case where one of fields is
1073 NULL - this is used when the structures have mismatching number of
1074 elements. */
1075 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
1077 inform (DECL_SOURCE_LOCATION (decl2),
1078 "a different type is defined in another translation unit");
1079 if (!st1)
1081 st1 = st2;
1082 st2 = NULL;
1084 inform (DECL_SOURCE_LOCATION (st1),
1085 "the first difference of corresponding definitions is field %qD",
1086 st1);
1087 if (st2)
1088 decl2 = st2;
1090 else if (TREE_CODE (st1) == FUNCTION_DECL)
1092 inform (DECL_SOURCE_LOCATION (decl2),
1093 "a different type is defined in another translation unit");
1094 inform (DECL_SOURCE_LOCATION (st1),
1095 "the first difference of corresponding definitions is method %qD",
1096 st1);
1097 decl2 = st2;
1099 else
1100 return;
1101 inform (DECL_SOURCE_LOCATION (decl2), reason);
1103 if (warned)
1104 *warned = true;
1107 /* Return ture if T1 and T2 are incompatible and we want to recusively
1108 dive into them from warn_type_mismatch to give sensible answer. */
1110 static bool
1111 type_mismatch_p (tree t1, tree t2)
1113 if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
1114 && !odr_types_equivalent_p (t1, t2))
1115 return true;
1116 return !types_compatible_p (t1, t2);
1120 /* Types T1 and T2 was found to be incompatible in a context they can't
1121 (either used to declare a symbol of same assembler name or unified by
1122 ODR rule). We already output warning about this, but if possible, output
1123 extra information on how the types mismatch.
1125 This is hard to do in general. We basically handle the common cases.
1127 If LOC1 and LOC2 are meaningful locations, use it in the case the types
1128 themselves do no thave one.*/
1130 void
1131 warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
1133 /* Location of type is known only if it has TYPE_NAME and the name is
1134 TYPE_DECL. */
1135 location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1136 ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
1137 : UNKNOWN_LOCATION;
1138 location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1139 ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
1140 : UNKNOWN_LOCATION;
1141 bool loc_t2_useful = false;
1143 /* With LTO it is a common case that the location of both types match.
1144 See if T2 has a location that is different from T1. If so, we will
1145 inform user about the location.
1146 Do not consider the location passed to us in LOC1/LOC2 as those are
1147 already output. */
1148 if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
1150 if (loc_t1 <= BUILTINS_LOCATION)
1151 loc_t2_useful = true;
1152 else
1154 expanded_location xloc1 = expand_location (loc_t1);
1155 expanded_location xloc2 = expand_location (loc_t2);
1157 if (strcmp (xloc1.file, xloc2.file)
1158 || xloc1.line != xloc2.line
1159 || xloc1.column != xloc2.column)
1160 loc_t2_useful = true;
1164 if (loc_t1 <= BUILTINS_LOCATION)
1165 loc_t1 = loc1;
1166 if (loc_t2 <= BUILTINS_LOCATION)
1167 loc_t2 = loc2;
1169 location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
1171 /* It is a quite common bug to reference anonymous namespace type in
1172 non-anonymous namespace class. */
1173 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1174 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1176 if (type_with_linkage_p (t1) && !type_in_anonymous_namespace_p (t1))
1178 std::swap (t1, t2);
1179 std::swap (loc_t1, loc_t2);
1181 gcc_assert (TYPE_NAME (t1) && TYPE_NAME (t2)
1182 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1183 && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL);
1184 /* Most of the time, the type names will match, do not be unnecesarily
1185 verbose. */
1186 if (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t1)))
1187 != IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t2))))
1188 inform (loc_t1,
1189 "type %qT defined in anonymous namespace can not match "
1190 "type %qT across the translation unit boundary",
1191 t1, t2);
1192 else
1193 inform (loc_t1,
1194 "type %qT defined in anonymous namespace can not match "
1195 "across the translation unit boundary",
1196 t1);
1197 if (loc_t2_useful)
1198 inform (loc_t2,
1199 "the incompatible type defined in another translation unit");
1200 return;
1202 /* If types have mangled ODR names and they are different, it is most
1203 informative to output those.
1204 This also covers types defined in different namespaces. */
1205 if (TYPE_NAME (t1) && TYPE_NAME (t2)
1206 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1207 && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1208 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t1))
1209 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t2))
1210 && DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
1211 != DECL_ASSEMBLER_NAME (TYPE_NAME (t2)))
1213 char *name1 = xstrdup (cplus_demangle
1214 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))),
1215 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES));
1216 char *name2 = cplus_demangle
1217 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t2))),
1218 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES);
1219 if (name1 && name2 && strcmp (name1, name2))
1221 inform (loc_t1,
1222 "type name %<%s%> should match type name %<%s%>",
1223 name1, name2);
1224 if (loc_t2_useful)
1225 inform (loc_t2,
1226 "the incompatible type is defined here");
1227 free (name1);
1228 return;
1230 free (name1);
1232 /* A tricky case are compound types. Often they appear the same in source
1233 code and the mismatch is dragged in by type they are build from.
1234 Look for those differences in subtypes and try to be informative. In other
1235 cases just output nothing because the source code is probably different
1236 and in this case we already output a all necessary info. */
1237 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1239 if (TREE_CODE (t1) == TREE_CODE (t2))
1241 if (TREE_CODE (t1) == ARRAY_TYPE
1242 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1244 tree i1 = TYPE_DOMAIN (t1);
1245 tree i2 = TYPE_DOMAIN (t2);
1247 if (i1 && i2
1248 && TYPE_MAX_VALUE (i1)
1249 && TYPE_MAX_VALUE (i2)
1250 && !operand_equal_p (TYPE_MAX_VALUE (i1),
1251 TYPE_MAX_VALUE (i2), 0))
1253 inform (loc,
1254 "array types have different bounds");
1255 return;
1258 if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1259 && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1260 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
1261 else if (TREE_CODE (t1) == METHOD_TYPE
1262 || TREE_CODE (t1) == FUNCTION_TYPE)
1264 tree parms1 = NULL, parms2 = NULL;
1265 int count = 1;
1267 if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1269 inform (loc, "return value type mismatch");
1270 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
1271 loc_t2);
1272 return;
1274 if (prototype_p (t1) && prototype_p (t2))
1275 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1276 parms1 && parms2;
1277 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1278 count++)
1280 if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
1282 if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1283 inform (loc,
1284 "implicit this pointer type mismatch");
1285 else
1286 inform (loc,
1287 "type mismatch in parameter %i",
1288 count - (TREE_CODE (t1) == METHOD_TYPE));
1289 warn_types_mismatch (TREE_VALUE (parms1),
1290 TREE_VALUE (parms2),
1291 loc_t1, loc_t2);
1292 return;
1295 if (parms1 || parms2)
1297 inform (loc,
1298 "types have different parameter counts");
1299 return;
1303 return;
1306 if (types_odr_comparable (t1, t2, true)
1307 && types_same_for_odr (t1, t2, true))
1308 inform (loc_t1,
1309 "type %qT itself violate the C++ One Definition Rule", t1);
1310 /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1311 else if (TYPE_NAME (t1) == TYPE_NAME (t2)
1312 && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
1313 return;
1314 else
1315 inform (loc_t1, "type %qT should match type %qT",
1316 t1, t2);
1317 if (loc_t2_useful)
1318 inform (loc_t2, "the incompatible type is defined here");
1321 /* Compare T1 and T2, report ODR violations if WARN is true and set
1322 WARNED to true if anything is reported. Return true if types match.
1323 If true is returned, the types are also compatible in the sense of
1324 gimple_canonical_types_compatible_p.
1325 If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1326 about the type if the type itself do not have location. */
1328 static bool
1329 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1330 hash_set<type_pair> *visited,
1331 location_t loc1, location_t loc2)
1333 /* Check first for the obvious case of pointer identity. */
1334 if (t1 == t2)
1335 return true;
1336 gcc_assert (!type_with_linkage_p (t1) || !type_in_anonymous_namespace_p (t1));
1337 gcc_assert (!type_with_linkage_p (t2) || !type_in_anonymous_namespace_p (t2));
1339 /* Can't be the same type if the types don't have the same code. */
1340 if (TREE_CODE (t1) != TREE_CODE (t2))
1342 warn_odr (t1, t2, NULL, NULL, warn, warned,
1343 G_("a different type is defined in another translation unit"));
1344 return false;
1347 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
1349 warn_odr (t1, t2, NULL, NULL, warn, warned,
1350 G_("a type with different qualifiers is defined in another "
1351 "translation unit"));
1352 return false;
1355 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1356 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1358 /* We can not trip this when comparing ODR types, only when trying to
1359 match different ODR derivations from different declarations.
1360 So WARN should be always false. */
1361 gcc_assert (!warn);
1362 return false;
1365 if (comp_type_attributes (t1, t2) != 1)
1367 warn_odr (t1, t2, NULL, NULL, warn, warned,
1368 G_("a type with different attributes "
1369 "is defined in another translation unit"));
1370 return false;
1373 if (TREE_CODE (t1) == ENUMERAL_TYPE
1374 && TYPE_VALUES (t1) && TYPE_VALUES (t2))
1376 tree v1, v2;
1377 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
1378 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
1380 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
1382 warn_odr (t1, t2, NULL, NULL, warn, warned,
1383 G_("an enum with different value name"
1384 " is defined in another translation unit"));
1385 return false;
1387 if (TREE_VALUE (v1) != TREE_VALUE (v2)
1388 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
1389 DECL_INITIAL (TREE_VALUE (v2)), 0))
1391 warn_odr (t1, t2, NULL, NULL, warn, warned,
1392 G_("an enum with different values is defined"
1393 " in another translation unit"));
1394 return false;
1397 if (v1 || v2)
1399 warn_odr (t1, t2, NULL, NULL, warn, warned,
1400 G_("an enum with mismatching number of values "
1401 "is defined in another translation unit"));
1402 return false;
1406 /* Non-aggregate types can be handled cheaply. */
1407 if (INTEGRAL_TYPE_P (t1)
1408 || SCALAR_FLOAT_TYPE_P (t1)
1409 || FIXED_POINT_TYPE_P (t1)
1410 || TREE_CODE (t1) == VECTOR_TYPE
1411 || TREE_CODE (t1) == COMPLEX_TYPE
1412 || TREE_CODE (t1) == OFFSET_TYPE
1413 || POINTER_TYPE_P (t1))
1415 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1417 warn_odr (t1, t2, NULL, NULL, warn, warned,
1418 G_("a type with different precision is defined "
1419 "in another translation unit"));
1420 return false;
1422 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1424 warn_odr (t1, t2, NULL, NULL, warn, warned,
1425 G_("a type with different signedness is defined "
1426 "in another translation unit"));
1427 return false;
1430 if (TREE_CODE (t1) == INTEGER_TYPE
1431 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1433 /* char WRT uint_8? */
1434 warn_odr (t1, t2, NULL, NULL, warn, warned,
1435 G_("a different type is defined in another "
1436 "translation unit"));
1437 return false;
1440 /* For canonical type comparisons we do not want to build SCCs
1441 so we cannot compare pointed-to types. But we can, for now,
1442 require the same pointed-to type kind and match what
1443 useless_type_conversion_p would do. */
1444 if (POINTER_TYPE_P (t1))
1446 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1447 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1449 warn_odr (t1, t2, NULL, NULL, warn, warned,
1450 G_("it is defined as a pointer in different address "
1451 "space in another translation unit"));
1452 return false;
1455 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1456 visited, loc1, loc2))
1458 warn_odr (t1, t2, NULL, NULL, warn, warned,
1459 G_("it is defined as a pointer to different type "
1460 "in another translation unit"));
1461 if (warn && warned)
1462 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
1463 loc1, loc2);
1464 return false;
1468 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1469 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1470 visited, loc1, loc2))
1472 /* Probably specific enough. */
1473 warn_odr (t1, t2, NULL, NULL, warn, warned,
1474 G_("a different type is defined "
1475 "in another translation unit"));
1476 if (warn && warned)
1477 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1478 return false;
1481 /* Do type-specific comparisons. */
1482 else switch (TREE_CODE (t1))
1484 case ARRAY_TYPE:
1486 /* Array types are the same if the element types are the same and
1487 the number of elements are the same. */
1488 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1489 visited, loc1, loc2))
1491 warn_odr (t1, t2, NULL, NULL, warn, warned,
1492 G_("a different type is defined in another "
1493 "translation unit"));
1494 if (warn && warned)
1495 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1497 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1498 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1499 == TYPE_NONALIASED_COMPONENT (t2));
1501 tree i1 = TYPE_DOMAIN (t1);
1502 tree i2 = TYPE_DOMAIN (t2);
1504 /* For an incomplete external array, the type domain can be
1505 NULL_TREE. Check this condition also. */
1506 if (i1 == NULL_TREE || i2 == NULL_TREE)
1507 return true;
1509 tree min1 = TYPE_MIN_VALUE (i1);
1510 tree min2 = TYPE_MIN_VALUE (i2);
1511 tree max1 = TYPE_MAX_VALUE (i1);
1512 tree max2 = TYPE_MAX_VALUE (i2);
1514 /* In C++, minimums should be always 0. */
1515 gcc_assert (min1 == min2);
1516 if (!operand_equal_p (max1, max2, 0))
1518 warn_odr (t1, t2, NULL, NULL, warn, warned,
1519 G_("an array of different size is defined "
1520 "in another translation unit"));
1521 return false;
1524 break;
1526 case METHOD_TYPE:
1527 case FUNCTION_TYPE:
1528 /* Function types are the same if the return type and arguments types
1529 are the same. */
1530 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1531 visited, loc1, loc2))
1533 warn_odr (t1, t2, NULL, NULL, warn, warned,
1534 G_("has different return value "
1535 "in another translation unit"));
1536 if (warn && warned)
1537 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1538 return false;
1541 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1542 || !prototype_p (t1) || !prototype_p (t2))
1543 return true;
1544 else
1546 tree parms1, parms2;
1548 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1549 parms1 && parms2;
1550 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1552 if (!odr_subtypes_equivalent_p
1553 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited,
1554 loc1, loc2))
1556 warn_odr (t1, t2, NULL, NULL, warn, warned,
1557 G_("has different parameters in another "
1558 "translation unit"));
1559 if (warn && warned)
1560 warn_types_mismatch (TREE_VALUE (parms1),
1561 TREE_VALUE (parms2), loc1, loc2);
1562 return false;
1566 if (parms1 || parms2)
1568 warn_odr (t1, t2, NULL, NULL, warn, warned,
1569 G_("has different parameters "
1570 "in another translation unit"));
1571 return false;
1574 return true;
1577 case RECORD_TYPE:
1578 case UNION_TYPE:
1579 case QUAL_UNION_TYPE:
1581 tree f1, f2;
1583 /* For aggregate types, all the fields must be the same. */
1584 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1586 if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1587 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1588 != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1590 if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1591 warn_odr (t1, t2, NULL, NULL, warn, warned,
1592 G_("a type defined in another translation unit "
1593 "is not polymorphic"));
1594 else
1595 warn_odr (t1, t2, NULL, NULL, warn, warned,
1596 G_("a type defined in another translation unit "
1597 "is polymorphic"));
1598 return false;
1600 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1601 f1 || f2;
1602 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1604 /* Skip non-fields. */
1605 while (f1 && TREE_CODE (f1) != FIELD_DECL)
1606 f1 = TREE_CHAIN (f1);
1607 while (f2 && TREE_CODE (f2) != FIELD_DECL)
1608 f2 = TREE_CHAIN (f2);
1609 if (!f1 || !f2)
1610 break;
1611 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1613 warn_odr (t1, t2, NULL, NULL, warn, warned,
1614 G_("a type with different virtual table pointers"
1615 " is defined in another translation unit"));
1616 return false;
1618 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1620 warn_odr (t1, t2, NULL, NULL, warn, warned,
1621 G_("a type with different bases is defined "
1622 "in another translation unit"));
1623 return false;
1625 if (DECL_NAME (f1) != DECL_NAME (f2)
1626 && !DECL_ARTIFICIAL (f1))
1628 warn_odr (t1, t2, f1, f2, warn, warned,
1629 G_("a field with different name is defined "
1630 "in another translation unit"));
1631 return false;
1633 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1634 TREE_TYPE (f2), visited,
1635 loc1, loc2))
1637 /* Do not warn about artificial fields and just go into
1638 generic field mismatch warning. */
1639 if (DECL_ARTIFICIAL (f1))
1640 break;
1642 warn_odr (t1, t2, f1, f2, warn, warned,
1643 G_("a field of same name but different type "
1644 "is defined in another translation unit"));
1645 if (warn && warned)
1646 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
1647 return false;
1649 if (!gimple_compare_field_offset (f1, f2))
1651 /* Do not warn about artificial fields and just go into
1652 generic field mismatch warning. */
1653 if (DECL_ARTIFICIAL (f1))
1654 break;
1655 warn_odr (t1, t2, f1, f2, warn, warned,
1656 G_("fields has different layout "
1657 "in another translation unit"));
1658 return false;
1660 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1661 == DECL_NONADDRESSABLE_P (f2));
1664 /* If one aggregate has more fields than the other, they
1665 are not the same. */
1666 if (f1 || f2)
1668 if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1669 warn_odr (t1, t2, NULL, NULL, warn, warned,
1670 G_("a type with different virtual table pointers"
1671 " is defined in another translation unit"));
1672 else if ((f1 && DECL_ARTIFICIAL (f1))
1673 || (f2 && DECL_ARTIFICIAL (f2)))
1674 warn_odr (t1, t2, NULL, NULL, warn, warned,
1675 G_("a type with different bases is defined "
1676 "in another translation unit"));
1677 else
1678 warn_odr (t1, t2, f1, f2, warn, warned,
1679 G_("a type with different number of fields "
1680 "is defined in another translation unit"));
1682 return false;
1684 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1685 && COMPLETE_TYPE_P (TYPE_MAIN_VARIANT (t1))
1686 && COMPLETE_TYPE_P (TYPE_MAIN_VARIANT (t2))
1687 && odr_type_p (TYPE_MAIN_VARIANT (t1))
1688 && odr_type_p (TYPE_MAIN_VARIANT (t2))
1689 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1690 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1692 /* Currently free_lang_data sets TYPE_METHODS to error_mark_node
1693 if it is non-NULL so this loop will never realy execute. */
1694 if (TYPE_METHODS (TYPE_MAIN_VARIANT (t1)) != error_mark_node
1695 && TYPE_METHODS (TYPE_MAIN_VARIANT (t2)) != error_mark_node)
1696 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1697 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1698 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1700 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1702 warn_odr (t1, t2, f1, f2, warn, warned,
1703 G_("a different method of same type "
1704 "is defined in another "
1705 "translation unit"));
1706 return false;
1708 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1710 warn_odr (t1, t2, f1, f2, warn, warned,
1711 G_("s definition that differs by virtual "
1712 "keyword in another translation unit"));
1713 return false;
1715 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1717 warn_odr (t1, t2, f1, f2, warn, warned,
1718 G_("virtual table layout differs "
1719 "in another translation unit"));
1720 return false;
1722 if (odr_subtypes_equivalent_p (TREE_TYPE (f1),
1723 TREE_TYPE (f2), visited,
1724 loc1, loc2))
1726 warn_odr (t1, t2, f1, f2, warn, warned,
1727 G_("method with incompatible type is "
1728 "defined in another translation unit"));
1729 return false;
1732 if ((f1 == NULL) != (f2 == NULL))
1734 warn_odr (t1, t2, NULL, NULL, warn, warned,
1735 G_("a type with different number of methods "
1736 "is defined in another translation unit"));
1737 return false;
1741 break;
1743 case VOID_TYPE:
1744 case NULLPTR_TYPE:
1745 break;
1747 default:
1748 debug_tree (t1);
1749 gcc_unreachable ();
1752 /* Those are better to come last as they are utterly uninformative. */
1753 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1754 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1756 warn_odr (t1, t2, NULL, NULL, warn, warned,
1757 G_("a type with different size "
1758 "is defined in another translation unit"));
1759 return false;
1761 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1762 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1764 warn_odr (t1, t2, NULL, NULL, warn, warned,
1765 G_("a type with different alignment "
1766 "is defined in another translation unit"));
1767 return false;
1769 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1770 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1771 TYPE_SIZE_UNIT (t2), 0));
1772 return true;
1775 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1777 bool
1778 odr_types_equivalent_p (tree type1, tree type2)
1780 hash_set<type_pair> visited;
1782 #ifdef ENABLE_CHECKING
1783 gcc_assert (odr_or_derived_type_p (type1) && odr_or_derived_type_p (type2));
1784 #endif
1785 return odr_types_equivalent_p (type1, type2, false, NULL,
1786 &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1789 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1790 from VAL->type. This may happen in LTO where tree merging did not merge
1791 all variants of the same type or due to ODR violation.
1793 Analyze and report ODR violations and add type to duplicate list.
1794 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1795 this is first time we see definition of a class return true so the
1796 base types are analyzed. */
1798 static bool
1799 add_type_duplicate (odr_type val, tree type)
1801 bool build_bases = false;
1802 bool prevail = false;
1803 bool odr_must_violate = false;
1805 if (!val->types_set)
1806 val->types_set = new hash_set<tree>;
1808 /* Chose polymorphic type as leader (this happens only in case of ODR
1809 violations. */
1810 if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1811 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1812 && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1813 || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1815 prevail = true;
1816 build_bases = true;
1818 /* Always prefer complete type to be the leader. */
1819 else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1821 prevail = true;
1822 build_bases = TYPE_BINFO (type);
1824 else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1826 else if (TREE_CODE (val->type) == ENUMERAL_TYPE
1827 && TREE_CODE (type) == ENUMERAL_TYPE
1828 && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
1829 prevail = true;
1830 else if (TREE_CODE (val->type) == RECORD_TYPE
1831 && TREE_CODE (type) == RECORD_TYPE
1832 && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1834 gcc_assert (!val->bases.length ());
1835 build_bases = true;
1836 prevail = true;
1839 if (prevail)
1840 std::swap (val->type, type);
1842 val->types_set->add (type);
1844 /* If we now have a mangled name, be sure to record it to val->type
1845 so ODR hash can work. */
1847 if (can_be_name_hashed_p (type) && !can_be_name_hashed_p (val->type))
1848 SET_DECL_ASSEMBLER_NAME (TYPE_NAME (val->type),
1849 DECL_ASSEMBLER_NAME (TYPE_NAME (type)));
1851 bool merge = true;
1852 bool base_mismatch = false;
1853 unsigned int i;
1854 bool warned = false;
1855 hash_set<type_pair> visited;
1857 gcc_assert (in_lto_p);
1858 vec_safe_push (val->types, type);
1860 /* If both are class types, compare the bases. */
1861 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1862 && TREE_CODE (val->type) == RECORD_TYPE
1863 && TREE_CODE (type) == RECORD_TYPE
1864 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1866 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1867 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1869 if (!flag_ltrans && !warned && !val->odr_violated)
1871 tree extra_base;
1872 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1873 "a type with the same name but different "
1874 "number of polymorphic bases is "
1875 "defined in another translation unit");
1876 if (warned)
1878 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1879 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1880 extra_base = BINFO_BASE_BINFO
1881 (TYPE_BINFO (type),
1882 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1883 else
1884 extra_base = BINFO_BASE_BINFO
1885 (TYPE_BINFO (val->type),
1886 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1887 tree extra_base_type = BINFO_TYPE (extra_base);
1888 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1889 "the extra base is defined here");
1892 base_mismatch = true;
1894 else
1895 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1897 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1898 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1899 tree type1 = BINFO_TYPE (base1);
1900 tree type2 = BINFO_TYPE (base2);
1902 if (types_odr_comparable (type1, type2))
1904 if (!types_same_for_odr (type1, type2))
1905 base_mismatch = true;
1907 else
1908 if (!odr_types_equivalent_p (type1, type2))
1909 base_mismatch = true;
1910 if (base_mismatch)
1912 if (!warned && !val->odr_violated)
1914 warn_odr (type, val->type, NULL, NULL,
1915 !warned, &warned,
1916 "a type with the same name but different base "
1917 "type is defined in another translation unit");
1918 if (warned)
1919 warn_types_mismatch (type1, type2,
1920 UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1922 break;
1924 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1926 base_mismatch = true;
1927 if (!warned && !val->odr_violated)
1928 warn_odr (type, val->type, NULL, NULL,
1929 !warned, &warned,
1930 "a type with the same name but different base "
1931 "layout is defined in another translation unit");
1932 break;
1934 /* One of bases is not of complete type. */
1935 if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1937 /* If we have a polymorphic type info specified for TYPE1
1938 but not for TYPE2 we possibly missed a base when recording
1939 VAL->type earlier.
1940 Be sure this does not happen. */
1941 if (TYPE_BINFO (type1)
1942 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1943 && !build_bases)
1944 odr_must_violate = true;
1945 break;
1947 /* One base is polymorphic and the other not.
1948 This ought to be diagnosed earlier, but do not ICE in the
1949 checking bellow. */
1950 else if (TYPE_BINFO (type1)
1951 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1952 != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1954 if (!warned && !val->odr_violated)
1955 warn_odr (type, val->type, NULL, NULL,
1956 !warned, &warned,
1957 "a base of the type is polymorphic only in one "
1958 "translation unit");
1959 base_mismatch = true;
1960 break;
1963 if (base_mismatch)
1965 merge = false;
1966 odr_violation_reported = true;
1967 val->odr_violated = true;
1969 if (symtab->dump_file)
1971 fprintf (symtab->dump_file, "ODR base violation\n");
1973 print_node (symtab->dump_file, "", val->type, 0);
1974 putc ('\n',symtab->dump_file);
1975 print_node (symtab->dump_file, "", type, 0);
1976 putc ('\n',symtab->dump_file);
1981 /* Next compare memory layout. */
1982 if (!odr_types_equivalent_p (val->type, type,
1983 !flag_ltrans && !val->odr_violated && !warned,
1984 &warned, &visited,
1985 DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
1986 DECL_SOURCE_LOCATION (TYPE_NAME (type))))
1988 merge = false;
1989 odr_violation_reported = true;
1990 val->odr_violated = true;
1991 if (symtab->dump_file)
1993 fprintf (symtab->dump_file, "ODR violation\n");
1995 print_node (symtab->dump_file, "", val->type, 0);
1996 putc ('\n',symtab->dump_file);
1997 print_node (symtab->dump_file, "", type, 0);
1998 putc ('\n',symtab->dump_file);
2001 gcc_assert (val->odr_violated || !odr_must_violate);
2002 /* Sanity check that all bases will be build same way again. */
2003 #ifdef ENABLE_CHECKING
2004 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
2005 && TREE_CODE (val->type) == RECORD_TYPE
2006 && TREE_CODE (type) == RECORD_TYPE
2007 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
2008 && !val->odr_violated
2009 && !base_mismatch && val->bases.length ())
2011 unsigned int num_poly_bases = 0;
2012 unsigned int j;
2014 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
2015 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
2016 (TYPE_BINFO (type), i)))
2017 num_poly_bases++;
2018 gcc_assert (num_poly_bases == val->bases.length ());
2019 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
2020 i++)
2021 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
2022 (TYPE_BINFO (type), i)))
2024 odr_type base = get_odr_type
2025 (BINFO_TYPE
2026 (BINFO_BASE_BINFO (TYPE_BINFO (type),
2027 i)),
2028 true);
2029 gcc_assert (val->bases[j] == base);
2030 j++;
2033 #endif
2036 /* Regularize things a little. During LTO same types may come with
2037 different BINFOs. Either because their virtual table was
2038 not merged by tree merging and only later at decl merging or
2039 because one type comes with external vtable, while other
2040 with internal. We want to merge equivalent binfos to conserve
2041 memory and streaming overhead.
2043 The external vtables are more harmful: they contain references
2044 to external declarations of methods that may be defined in the
2045 merged LTO unit. For this reason we absolutely need to remove
2046 them and replace by internal variants. Not doing so will lead
2047 to incomplete answers from possible_polymorphic_call_targets.
2049 FIXME: disable for now; because ODR types are now build during
2050 streaming in, the variants do not need to be linked to the type,
2051 yet. We need to do the merging in cleanup pass to be implemented
2052 soon. */
2053 if (!flag_ltrans && merge
2054 && 0
2055 && TREE_CODE (val->type) == RECORD_TYPE
2056 && TREE_CODE (type) == RECORD_TYPE
2057 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
2058 && TYPE_MAIN_VARIANT (type) == type
2059 && TYPE_MAIN_VARIANT (val->type) == val->type
2060 && BINFO_VTABLE (TYPE_BINFO (val->type))
2061 && BINFO_VTABLE (TYPE_BINFO (type)))
2063 tree master_binfo = TYPE_BINFO (val->type);
2064 tree v1 = BINFO_VTABLE (master_binfo);
2065 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
2067 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
2069 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
2070 && operand_equal_p (TREE_OPERAND (v1, 1),
2071 TREE_OPERAND (v2, 1), 0));
2072 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
2073 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
2075 gcc_assert (DECL_ASSEMBLER_NAME (v1)
2076 == DECL_ASSEMBLER_NAME (v2));
2078 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
2080 unsigned int i;
2082 set_type_binfo (val->type, TYPE_BINFO (type));
2083 for (i = 0; i < val->types->length (); i++)
2085 if (TYPE_BINFO ((*val->types)[i])
2086 == master_binfo)
2087 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
2089 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
2091 else
2092 set_type_binfo (type, master_binfo);
2094 return build_bases;
2097 /* Get ODR type hash entry for TYPE. If INSERT is true, create
2098 possibly new entry. */
2100 odr_type
2101 get_odr_type (tree type, bool insert)
2103 odr_type_d **slot = NULL;
2104 odr_type_d **vtable_slot = NULL;
2105 odr_type val = NULL;
2106 hashval_t hash;
2107 bool build_bases = false;
2108 bool insert_to_odr_array = false;
2109 int base_id = -1;
2111 type = main_odr_variant (type);
2113 gcc_checking_assert (can_be_name_hashed_p (type)
2114 || can_be_vtable_hashed_p (type));
2116 /* Lookup entry, first try name hash, fallback to vtable hash. */
2117 if (can_be_name_hashed_p (type))
2119 hash = hash_odr_name (type);
2120 slot = odr_hash->find_slot_with_hash (type, hash,
2121 insert ? INSERT : NO_INSERT);
2123 if ((!slot || !*slot) && in_lto_p && can_be_vtable_hashed_p (type))
2125 hash = hash_odr_vtable (type);
2126 vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
2127 insert ? INSERT : NO_INSERT);
2130 if (!slot && !vtable_slot)
2131 return NULL;
2133 /* See if we already have entry for type. */
2134 if ((slot && *slot) || (vtable_slot && *vtable_slot))
2136 if (slot && *slot)
2138 val = *slot;
2139 #ifdef ENABLE_CHECKING
2140 if (in_lto_p && can_be_vtable_hashed_p (type))
2142 hash = hash_odr_vtable (type);
2143 vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
2144 NO_INSERT);
2145 gcc_assert (!vtable_slot || *vtable_slot == *slot);
2146 vtable_slot = NULL;
2148 #endif
2150 else if (*vtable_slot)
2151 val = *vtable_slot;
2153 if (val->type != type
2154 && (!val->types_set || !val->types_set->add (type)))
2156 gcc_assert (insert);
2157 /* We have type duplicate, but it may introduce vtable name or
2158 mangled name; be sure to keep hashes in sync. */
2159 if (in_lto_p && can_be_vtable_hashed_p (type)
2160 && (!vtable_slot || !*vtable_slot))
2162 if (!vtable_slot)
2164 hash = hash_odr_vtable (type);
2165 vtable_slot = odr_vtable_hash->find_slot_with_hash
2166 (type, hash, INSERT);
2167 gcc_checking_assert (!*vtable_slot || *vtable_slot == val);
2169 *vtable_slot = val;
2171 if (slot && !*slot)
2172 *slot = val;
2173 build_bases = add_type_duplicate (val, type);
2176 else
2178 val = ggc_cleared_alloc<odr_type_d> ();
2179 val->type = type;
2180 val->bases = vNULL;
2181 val->derived_types = vNULL;
2182 if (type_with_linkage_p (type))
2183 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
2184 else
2185 val->anonymous_namespace = 0;
2186 build_bases = COMPLETE_TYPE_P (val->type);
2187 insert_to_odr_array = true;
2188 if (slot)
2189 *slot = val;
2190 if (vtable_slot)
2191 *vtable_slot = val;
2194 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
2195 && type_with_linkage_p (type)
2196 && type == TYPE_MAIN_VARIANT (type))
2198 tree binfo = TYPE_BINFO (type);
2199 unsigned int i;
2201 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
2203 val->all_derivations_known = type_all_derivations_known_p (type);
2204 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
2205 /* For now record only polymorphic types. other are
2206 pointless for devirtualization and we can not precisely
2207 determine ODR equivalency of these during LTO. */
2208 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
2210 tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
2211 odr_type base = get_odr_type (base_type, true);
2212 gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
2213 base->derived_types.safe_push (val);
2214 val->bases.safe_push (base);
2215 if (base->id > base_id)
2216 base_id = base->id;
2219 /* Ensure that type always appears after bases. */
2220 if (insert_to_odr_array)
2222 if (odr_types_ptr)
2223 val->id = odr_types.length ();
2224 vec_safe_push (odr_types_ptr, val);
2226 else if (base_id > val->id)
2228 odr_types[val->id] = 0;
2229 /* Be sure we did not recorded any derived types; these may need
2230 renumbering too. */
2231 gcc_assert (val->derived_types.length() == 0);
2232 if (odr_types_ptr)
2233 val->id = odr_types.length ();
2234 vec_safe_push (odr_types_ptr, val);
2236 return val;
2239 /* Add TYPE od ODR type hash. */
2241 void
2242 register_odr_type (tree type)
2244 if (!odr_hash)
2246 odr_hash = new odr_hash_type (23);
2247 if (in_lto_p)
2248 odr_vtable_hash = new odr_vtable_hash_type (23);
2250 /* Arrange things to be nicer and insert main variants first.
2251 ??? fundamental prerecorded types do not have mangled names; this
2252 makes it possible that non-ODR type is main_odr_variant of ODR type.
2253 Things may get smoother if LTO FE set mangled name of those types same
2254 way as C++ FE does. */
2255 if (odr_type_p (main_odr_variant (TYPE_MAIN_VARIANT (type)))
2256 && odr_type_p (TYPE_MAIN_VARIANT (type)))
2257 get_odr_type (TYPE_MAIN_VARIANT (type), true);
2258 if (TYPE_MAIN_VARIANT (type) != type && odr_type_p (main_odr_variant (type)))
2259 get_odr_type (type, true);
2262 /* Return true if type is known to have no derivations. */
2264 bool
2265 type_known_to_have_no_derivations_p (tree t)
2267 return (type_all_derivations_known_p (t)
2268 && (TYPE_FINAL_P (t)
2269 || (odr_hash
2270 && !get_odr_type (t, true)->derived_types.length())));
2273 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2274 recursive printing. */
2276 static void
2277 dump_odr_type (FILE *f, odr_type t, int indent=0)
2279 unsigned int i;
2280 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2281 print_generic_expr (f, t->type, TDF_SLIM);
2282 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2283 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2284 if (TYPE_NAME (t->type))
2286 /*fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
2287 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
2288 DECL_SOURCE_LINE (TYPE_NAME (t->type)));*/
2289 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2290 fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2291 IDENTIFIER_POINTER
2292 (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2294 if (t->bases.length ())
2296 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2297 for (i = 0; i < t->bases.length (); i++)
2298 fprintf (f, " %i", t->bases[i]->id);
2299 fprintf (f, "\n");
2301 if (t->derived_types.length ())
2303 fprintf (f, "%*s derived types:\n", indent * 2, "");
2304 for (i = 0; i < t->derived_types.length (); i++)
2305 dump_odr_type (f, t->derived_types[i], indent + 1);
2307 fprintf (f, "\n");
2310 /* Dump the type inheritance graph. */
2312 static void
2313 dump_type_inheritance_graph (FILE *f)
2315 unsigned int i;
2316 if (!odr_types_ptr)
2317 return;
2318 fprintf (f, "\n\nType inheritance graph:\n");
2319 for (i = 0; i < odr_types.length (); i++)
2321 if (odr_types[i] && odr_types[i]->bases.length () == 0)
2322 dump_odr_type (f, odr_types[i]);
2324 for (i = 0; i < odr_types.length (); i++)
2326 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
2328 unsigned int j;
2329 fprintf (f, "Duplicate tree types for odr type %i\n", i);
2330 print_node (f, "", odr_types[i]->type, 0);
2331 for (j = 0; j < odr_types[i]->types->length (); j++)
2333 tree t;
2334 fprintf (f, "duplicate #%i\n", j);
2335 print_node (f, "", (*odr_types[i]->types)[j], 0);
2336 t = (*odr_types[i]->types)[j];
2337 while (TYPE_P (t) && TYPE_CONTEXT (t))
2339 t = TYPE_CONTEXT (t);
2340 print_node (f, "", t, 0);
2342 putc ('\n',f);
2348 /* Initialize IPA devirt and build inheritance tree graph. */
2350 void
2351 build_type_inheritance_graph (void)
2353 struct symtab_node *n;
2354 FILE *inheritance_dump_file;
2355 int flags;
2357 if (odr_hash)
2358 return;
2359 timevar_push (TV_IPA_INHERITANCE);
2360 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2361 odr_hash = new odr_hash_type (23);
2362 if (in_lto_p)
2363 odr_vtable_hash = new odr_vtable_hash_type (23);
2365 /* We reconstruct the graph starting of types of all methods seen in the
2366 the unit. */
2367 FOR_EACH_SYMBOL (n)
2368 if (is_a <cgraph_node *> (n)
2369 && DECL_VIRTUAL_P (n->decl)
2370 && n->real_symbol_p ())
2371 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2373 /* Look also for virtual tables of types that do not define any methods.
2375 We need it in a case where class B has virtual base of class A
2376 re-defining its virtual method and there is class C with no virtual
2377 methods with B as virtual base.
2379 Here we output B's virtual method in two variant - for non-virtual
2380 and virtual inheritance. B's virtual table has non-virtual version,
2381 while C's has virtual.
2383 For this reason we need to know about C in order to include both
2384 variants of B. More correctly, record_target_from_binfo should
2385 add both variants of the method when walking B, but we have no
2386 link in between them.
2388 We rely on fact that either the method is exported and thus we
2389 assume it is called externally or C is in anonymous namespace and
2390 thus we will see the vtable. */
2392 else if (is_a <varpool_node *> (n)
2393 && DECL_VIRTUAL_P (n->decl)
2394 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2395 && TYPE_BINFO (DECL_CONTEXT (n->decl))
2396 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2397 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2398 if (inheritance_dump_file)
2400 dump_type_inheritance_graph (inheritance_dump_file);
2401 dump_end (TDI_inheritance, inheritance_dump_file);
2403 timevar_pop (TV_IPA_INHERITANCE);
2406 /* Return true if N has reference from live virtual table
2407 (and thus can be a destination of polymorphic call).
2408 Be conservatively correct when callgraph is not built or
2409 if the method may be referred externally. */
2411 static bool
2412 referenced_from_vtable_p (struct cgraph_node *node)
2414 int i;
2415 struct ipa_ref *ref;
2416 bool found = false;
2418 if (node->externally_visible
2419 || DECL_EXTERNAL (node->decl)
2420 || node->used_from_other_partition)
2421 return true;
2423 /* Keep this test constant time.
2424 It is unlikely this can happen except for the case where speculative
2425 devirtualization introduced many speculative edges to this node.
2426 In this case the target is very likely alive anyway. */
2427 if (node->ref_list.referring.length () > 100)
2428 return true;
2430 /* We need references built. */
2431 if (symtab->state <= CONSTRUCTION)
2432 return true;
2434 for (i = 0; node->iterate_referring (i, ref); i++)
2435 if ((ref->use == IPA_REF_ALIAS
2436 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2437 || (ref->use == IPA_REF_ADDR
2438 && TREE_CODE (ref->referring->decl) == VAR_DECL
2439 && DECL_VIRTUAL_P (ref->referring->decl)))
2441 found = true;
2442 break;
2444 return found;
2447 /* If TARGET has associated node, record it in the NODES array.
2448 CAN_REFER specify if program can refer to the target directly.
2449 if TARGET is unknown (NULL) or it can not be inserted (for example because
2450 its body was already removed and there is no way to refer to it), clear
2451 COMPLETEP. */
2453 static void
2454 maybe_record_node (vec <cgraph_node *> &nodes,
2455 tree target, hash_set<tree> *inserted,
2456 bool can_refer,
2457 bool *completep)
2459 struct cgraph_node *target_node, *alias_target;
2460 enum availability avail;
2462 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
2463 list of targets; the runtime effect of calling them is undefined.
2464 Only "real" virtual methods should be accounted. */
2465 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
2466 return;
2468 if (!can_refer)
2470 /* The only case when method of anonymous namespace becomes unreferable
2471 is when we completely optimized it out. */
2472 if (flag_ltrans
2473 || !target
2474 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2475 *completep = false;
2476 return;
2479 if (!target)
2480 return;
2482 target_node = cgraph_node::get (target);
2484 /* Prefer alias target over aliases, so we do not get confused by
2485 fake duplicates. */
2486 if (target_node)
2488 alias_target = target_node->ultimate_alias_target (&avail);
2489 if (target_node != alias_target
2490 && avail >= AVAIL_AVAILABLE
2491 && target_node->get_availability ())
2492 target_node = alias_target;
2495 /* Method can only be called by polymorphic call if any
2496 of vtables referring to it are alive.
2498 While this holds for non-anonymous functions, too, there are
2499 cases where we want to keep them in the list; for example
2500 inline functions with -fno-weak are static, but we still
2501 may devirtualize them when instance comes from other unit.
2502 The same holds for LTO.
2504 Currently we ignore these functions in speculative devirtualization.
2505 ??? Maybe it would make sense to be more aggressive for LTO even
2506 elsewhere. */
2507 if (!flag_ltrans
2508 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2509 && (!target_node
2510 || !referenced_from_vtable_p (target_node)))
2512 /* See if TARGET is useful function we can deal with. */
2513 else if (target_node != NULL
2514 && (TREE_PUBLIC (target)
2515 || DECL_EXTERNAL (target)
2516 || target_node->definition)
2517 && target_node->real_symbol_p ())
2519 gcc_assert (!target_node->global.inlined_to);
2520 gcc_assert (target_node->real_symbol_p ());
2521 if (!inserted->add (target))
2523 cached_polymorphic_call_targets->add (target_node);
2524 nodes.safe_push (target_node);
2527 else if (completep
2528 && (!type_in_anonymous_namespace_p
2529 (DECL_CONTEXT (target))
2530 || flag_ltrans))
2531 *completep = false;
2534 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2535 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2536 method in vtable and insert method to NODES array
2537 or BASES_TO_CONSIDER if this array is non-NULL.
2538 Otherwise recurse to base BINFOs.
2539 This matches what get_binfo_at_offset does, but with offset
2540 being unknown.
2542 TYPE_BINFOS is a stack of BINFOS of types with defined
2543 virtual table seen on way from class type to BINFO.
2545 MATCHED_VTABLES tracks virtual tables we already did lookup
2546 for virtual function in. INSERTED tracks nodes we already
2547 inserted.
2549 ANONYMOUS is true if BINFO is part of anonymous namespace.
2551 Clear COMPLETEP when we hit unreferable target.
2554 static void
2555 record_target_from_binfo (vec <cgraph_node *> &nodes,
2556 vec <tree> *bases_to_consider,
2557 tree binfo,
2558 tree otr_type,
2559 vec <tree> &type_binfos,
2560 HOST_WIDE_INT otr_token,
2561 tree outer_type,
2562 HOST_WIDE_INT offset,
2563 hash_set<tree> *inserted,
2564 hash_set<tree> *matched_vtables,
2565 bool anonymous,
2566 bool *completep)
2568 tree type = BINFO_TYPE (binfo);
2569 int i;
2570 tree base_binfo;
2573 if (BINFO_VTABLE (binfo))
2574 type_binfos.safe_push (binfo);
2575 if (types_same_for_odr (type, outer_type))
2577 int i;
2578 tree type_binfo = NULL;
2580 /* Look up BINFO with virtual table. For normal types it is always last
2581 binfo on stack. */
2582 for (i = type_binfos.length () - 1; i >= 0; i--)
2583 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2585 type_binfo = type_binfos[i];
2586 break;
2588 if (BINFO_VTABLE (binfo))
2589 type_binfos.pop ();
2590 /* If this is duplicated BINFO for base shared by virtual inheritance,
2591 we may not have its associated vtable. This is not a problem, since
2592 we will walk it on the other path. */
2593 if (!type_binfo)
2594 return;
2595 tree inner_binfo = get_binfo_at_offset (type_binfo,
2596 offset, otr_type);
2597 if (!inner_binfo)
2599 gcc_assert (odr_violation_reported);
2600 return;
2602 /* For types in anonymous namespace first check if the respective vtable
2603 is alive. If not, we know the type can't be called. */
2604 if (!flag_ltrans && anonymous)
2606 tree vtable = BINFO_VTABLE (inner_binfo);
2607 varpool_node *vnode;
2609 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2610 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2611 vnode = varpool_node::get (vtable);
2612 if (!vnode || !vnode->definition)
2613 return;
2615 gcc_assert (inner_binfo);
2616 if (bases_to_consider
2617 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2618 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2620 bool can_refer;
2621 tree target = gimple_get_virt_method_for_binfo (otr_token,
2622 inner_binfo,
2623 &can_refer);
2624 if (!bases_to_consider)
2625 maybe_record_node (nodes, target, inserted, can_refer, completep);
2626 /* Destructors are never called via construction vtables. */
2627 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2628 bases_to_consider->safe_push (target);
2630 return;
2633 /* Walk bases. */
2634 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2635 /* Walking bases that have no virtual method is pointless exercise. */
2636 if (polymorphic_type_binfo_p (base_binfo))
2637 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2638 type_binfos,
2639 otr_token, outer_type, offset, inserted,
2640 matched_vtables, anonymous, completep);
2641 if (BINFO_VTABLE (binfo))
2642 type_binfos.pop ();
2645 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2646 of TYPE, insert them to NODES, recurse into derived nodes.
2647 INSERTED is used to avoid duplicate insertions of methods into NODES.
2648 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2649 Clear COMPLETEP if unreferable target is found.
2651 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2652 all cases where BASE_SKIPPED is true (because the base is abstract
2653 class). */
2655 static void
2656 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2657 hash_set<tree> *inserted,
2658 hash_set<tree> *matched_vtables,
2659 tree otr_type,
2660 odr_type type,
2661 HOST_WIDE_INT otr_token,
2662 tree outer_type,
2663 HOST_WIDE_INT offset,
2664 bool *completep,
2665 vec <tree> &bases_to_consider,
2666 bool consider_construction)
2668 tree binfo = TYPE_BINFO (type->type);
2669 unsigned int i;
2670 auto_vec <tree, 8> type_binfos;
2671 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2673 /* We may need to consider types w/o instances because of possible derived
2674 types using their methods either directly or via construction vtables.
2675 We are safe to skip them when all derivations are known, since we will
2676 handle them later.
2677 This is done by recording them to BASES_TO_CONSIDER array. */
2678 if (possibly_instantiated || consider_construction)
2680 record_target_from_binfo (nodes,
2681 (!possibly_instantiated
2682 && type_all_derivations_known_p (type->type))
2683 ? &bases_to_consider : NULL,
2684 binfo, otr_type, type_binfos, otr_token,
2685 outer_type, offset,
2686 inserted, matched_vtables,
2687 type->anonymous_namespace, completep);
2689 for (i = 0; i < type->derived_types.length (); i++)
2690 possible_polymorphic_call_targets_1 (nodes, inserted,
2691 matched_vtables,
2692 otr_type,
2693 type->derived_types[i],
2694 otr_token, outer_type, offset, completep,
2695 bases_to_consider, consider_construction);
2698 /* Cache of queries for polymorphic call targets.
2700 Enumerating all call targets may get expensive when there are many
2701 polymorphic calls in the program, so we memoize all the previous
2702 queries and avoid duplicated work. */
2704 struct polymorphic_call_target_d
2706 HOST_WIDE_INT otr_token;
2707 ipa_polymorphic_call_context context;
2708 odr_type type;
2709 vec <cgraph_node *> targets;
2710 tree decl_warning;
2711 int type_warning;
2712 bool complete;
2713 bool speculative;
2716 /* Polymorphic call target cache helpers. */
2718 struct polymorphic_call_target_hasher
2719 : pointer_hash <polymorphic_call_target_d>
2721 static inline hashval_t hash (const polymorphic_call_target_d *);
2722 static inline bool equal (const polymorphic_call_target_d *,
2723 const polymorphic_call_target_d *);
2724 static inline void remove (polymorphic_call_target_d *);
2727 /* Return the computed hashcode for ODR_QUERY. */
2729 inline hashval_t
2730 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2732 inchash::hash hstate (odr_query->otr_token);
2734 hstate.add_wide_int (odr_query->type->id);
2735 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2736 hstate.add_wide_int (odr_query->context.offset);
2738 if (odr_query->context.speculative_outer_type)
2740 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2741 hstate.add_wide_int (odr_query->context.speculative_offset);
2743 hstate.add_flag (odr_query->speculative);
2744 hstate.add_flag (odr_query->context.maybe_in_construction);
2745 hstate.add_flag (odr_query->context.maybe_derived_type);
2746 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2747 hstate.commit_flag ();
2748 return hstate.end ();
2751 /* Compare cache entries T1 and T2. */
2753 inline bool
2754 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2755 const polymorphic_call_target_d *t2)
2757 return (t1->type == t2->type && t1->otr_token == t2->otr_token
2758 && t1->speculative == t2->speculative
2759 && t1->context.offset == t2->context.offset
2760 && t1->context.speculative_offset == t2->context.speculative_offset
2761 && t1->context.outer_type == t2->context.outer_type
2762 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2763 && t1->context.maybe_in_construction
2764 == t2->context.maybe_in_construction
2765 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2766 && (t1->context.speculative_maybe_derived_type
2767 == t2->context.speculative_maybe_derived_type));
2770 /* Remove entry in polymorphic call target cache hash. */
2772 inline void
2773 polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2775 v->targets.release ();
2776 free (v);
2779 /* Polymorphic call target query cache. */
2781 typedef hash_table<polymorphic_call_target_hasher>
2782 polymorphic_call_target_hash_type;
2783 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2785 /* Destroy polymorphic call target query cache. */
2787 static void
2788 free_polymorphic_call_targets_hash ()
2790 if (cached_polymorphic_call_targets)
2792 delete polymorphic_call_target_hash;
2793 polymorphic_call_target_hash = NULL;
2794 delete cached_polymorphic_call_targets;
2795 cached_polymorphic_call_targets = NULL;
2799 /* When virtual function is removed, we may need to flush the cache. */
2801 static void
2802 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2804 if (cached_polymorphic_call_targets
2805 && cached_polymorphic_call_targets->contains (n))
2806 free_polymorphic_call_targets_hash ();
2809 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2811 tree
2812 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2813 tree vtable)
2815 tree v = BINFO_VTABLE (binfo);
2816 int i;
2817 tree base_binfo;
2818 unsigned HOST_WIDE_INT this_offset;
2820 if (v)
2822 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2823 gcc_unreachable ();
2825 if (offset == this_offset
2826 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2827 return binfo;
2830 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2831 if (polymorphic_type_binfo_p (base_binfo))
2833 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2834 if (base_binfo)
2835 return base_binfo;
2837 return NULL;
2840 /* T is known constant value of virtual table pointer.
2841 Store virtual table to V and its offset to OFFSET.
2842 Return false if T does not look like virtual table reference. */
2844 bool
2845 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2846 unsigned HOST_WIDE_INT *offset)
2848 /* We expect &MEM[(void *)&virtual_table + 16B].
2849 We obtain object's BINFO from the context of the virtual table.
2850 This one contains pointer to virtual table represented via
2851 POINTER_PLUS_EXPR. Verify that this pointer matches what
2852 we propagated through.
2854 In the case of virtual inheritance, the virtual tables may
2855 be nested, i.e. the offset may be different from 16 and we may
2856 need to dive into the type representation. */
2857 if (TREE_CODE (t) == ADDR_EXPR
2858 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2859 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2860 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2861 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2862 == VAR_DECL)
2863 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2864 (TREE_OPERAND (t, 0), 0), 0)))
2866 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2867 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2868 return true;
2871 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2872 We need to handle it when T comes from static variable initializer or
2873 BINFO. */
2874 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2876 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2877 t = TREE_OPERAND (t, 0);
2879 else
2880 *offset = 0;
2882 if (TREE_CODE (t) != ADDR_EXPR)
2883 return false;
2884 *v = TREE_OPERAND (t, 0);
2885 return true;
2888 /* T is known constant value of virtual table pointer. Return BINFO of the
2889 instance type. */
2891 tree
2892 vtable_pointer_value_to_binfo (const_tree t)
2894 tree vtable;
2895 unsigned HOST_WIDE_INT offset;
2897 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2898 return NULL_TREE;
2900 /* FIXME: for stores of construction vtables we return NULL,
2901 because we do not have BINFO for those. Eventually we should fix
2902 our representation to allow this case to be handled, too.
2903 In the case we see store of BINFO we however may assume
2904 that standard folding will be able to cope with it. */
2905 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2906 offset, vtable);
2909 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2910 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2911 and insert them in NODES.
2913 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2915 static void
2916 record_targets_from_bases (tree otr_type,
2917 HOST_WIDE_INT otr_token,
2918 tree outer_type,
2919 HOST_WIDE_INT offset,
2920 vec <cgraph_node *> &nodes,
2921 hash_set<tree> *inserted,
2922 hash_set<tree> *matched_vtables,
2923 bool *completep)
2925 while (true)
2927 HOST_WIDE_INT pos, size;
2928 tree base_binfo;
2929 tree fld;
2931 if (types_same_for_odr (outer_type, otr_type))
2932 return;
2934 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2936 if (TREE_CODE (fld) != FIELD_DECL)
2937 continue;
2939 pos = int_bit_position (fld);
2940 size = tree_to_shwi (DECL_SIZE (fld));
2941 if (pos <= offset && (pos + size) > offset
2942 /* Do not get confused by zero sized bases. */
2943 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2944 break;
2946 /* Within a class type we should always find corresponding fields. */
2947 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2949 /* Nonbase types should have been stripped by outer_class_type. */
2950 gcc_assert (DECL_ARTIFICIAL (fld));
2952 outer_type = TREE_TYPE (fld);
2953 offset -= pos;
2955 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2956 offset, otr_type);
2957 if (!base_binfo)
2959 gcc_assert (odr_violation_reported);
2960 return;
2962 gcc_assert (base_binfo);
2963 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2965 bool can_refer;
2966 tree target = gimple_get_virt_method_for_binfo (otr_token,
2967 base_binfo,
2968 &can_refer);
2969 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2970 maybe_record_node (nodes, target, inserted, can_refer, completep);
2971 matched_vtables->add (BINFO_VTABLE (base_binfo));
2976 /* When virtual table is removed, we may need to flush the cache. */
2978 static void
2979 devirt_variable_node_removal_hook (varpool_node *n,
2980 void *d ATTRIBUTE_UNUSED)
2982 if (cached_polymorphic_call_targets
2983 && DECL_VIRTUAL_P (n->decl)
2984 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2985 free_polymorphic_call_targets_hash ();
2988 /* Record about how many calls would benefit from given type to be final. */
2990 struct odr_type_warn_count
2992 tree type;
2993 int count;
2994 gcov_type dyn_count;
2997 /* Record about how many calls would benefit from given method to be final. */
2999 struct decl_warn_count
3001 tree decl;
3002 int count;
3003 gcov_type dyn_count;
3006 /* Information about type and decl warnings. */
3008 struct final_warning_record
3010 gcov_type dyn_count;
3011 vec<odr_type_warn_count> type_warnings;
3012 hash_map<tree, decl_warn_count> decl_warnings;
3014 struct final_warning_record *final_warning_records;
3016 /* Return vector containing possible targets of polymorphic call of type
3017 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3018 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
3019 OTR_TYPE and include their virtual method. This is useful for types
3020 possibly in construction or destruction where the virtual table may
3021 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
3022 us to walk the inheritance graph for all derivations.
3024 If COMPLETEP is non-NULL, store true if the list is complete.
3025 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3026 in the target cache. If user needs to visit every target list
3027 just once, it can memoize them.
3029 If SPECULATIVE is set, the list will not contain targets that
3030 are not speculatively taken.
3032 Returned vector is placed into cache. It is NOT caller's responsibility
3033 to free it. The vector can be freed on cgraph_remove_node call if
3034 the particular node is a virtual function present in the cache. */
3036 vec <cgraph_node *>
3037 possible_polymorphic_call_targets (tree otr_type,
3038 HOST_WIDE_INT otr_token,
3039 ipa_polymorphic_call_context context,
3040 bool *completep,
3041 void **cache_token,
3042 bool speculative)
3044 static struct cgraph_node_hook_list *node_removal_hook_holder;
3045 vec <cgraph_node *> nodes = vNULL;
3046 auto_vec <tree, 8> bases_to_consider;
3047 odr_type type, outer_type;
3048 polymorphic_call_target_d key;
3049 polymorphic_call_target_d **slot;
3050 unsigned int i;
3051 tree binfo, target;
3052 bool complete;
3053 bool can_refer = false;
3054 bool skipped = false;
3056 otr_type = TYPE_MAIN_VARIANT (otr_type);
3058 /* If ODR is not initialized or the context is invalid, return empty
3059 incomplete list. */
3060 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
3062 if (completep)
3063 *completep = context.invalid;
3064 if (cache_token)
3065 *cache_token = NULL;
3066 return nodes;
3069 /* Do not bother to compute speculative info when user do not asks for it. */
3070 if (!speculative || !context.speculative_outer_type)
3071 context.clear_speculation ();
3073 type = get_odr_type (otr_type, true);
3075 /* Recording type variants would waste results cache. */
3076 gcc_assert (!context.outer_type
3077 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3079 /* Look up the outer class type we want to walk.
3080 If we fail to do so, the context is invalid. */
3081 if ((context.outer_type || context.speculative_outer_type)
3082 && !context.restrict_to_inner_class (otr_type))
3084 if (completep)
3085 *completep = true;
3086 if (cache_token)
3087 *cache_token = NULL;
3088 return nodes;
3090 gcc_assert (!context.invalid);
3092 /* Check that restrict_to_inner_class kept the main variant. */
3093 gcc_assert (!context.outer_type
3094 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3096 /* We canonicalize our query, so we do not need extra hashtable entries. */
3098 /* Without outer type, we have no use for offset. Just do the
3099 basic search from inner type. */
3100 if (!context.outer_type)
3101 context.clear_outer_type (otr_type);
3102 /* We need to update our hierarchy if the type does not exist. */
3103 outer_type = get_odr_type (context.outer_type, true);
3104 /* If the type is complete, there are no derivations. */
3105 if (TYPE_FINAL_P (outer_type->type))
3106 context.maybe_derived_type = false;
3108 /* Initialize query cache. */
3109 if (!cached_polymorphic_call_targets)
3111 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3112 polymorphic_call_target_hash
3113 = new polymorphic_call_target_hash_type (23);
3114 if (!node_removal_hook_holder)
3116 node_removal_hook_holder =
3117 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3118 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3119 NULL);
3123 if (in_lto_p)
3125 if (context.outer_type != otr_type)
3126 context.outer_type
3127 = get_odr_type (context.outer_type, true)->type;
3128 if (context.speculative_outer_type)
3129 context.speculative_outer_type
3130 = get_odr_type (context.speculative_outer_type, true)->type;
3133 /* Look up cached answer. */
3134 key.type = type;
3135 key.otr_token = otr_token;
3136 key.speculative = speculative;
3137 key.context = context;
3138 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3139 if (cache_token)
3140 *cache_token = (void *)*slot;
3141 if (*slot)
3143 if (completep)
3144 *completep = (*slot)->complete;
3145 if ((*slot)->type_warning && final_warning_records)
3147 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3148 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3149 += final_warning_records->dyn_count;
3151 if (!speculative && (*slot)->decl_warning && final_warning_records)
3153 struct decl_warn_count *c =
3154 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3155 c->count++;
3156 c->dyn_count += final_warning_records->dyn_count;
3158 return (*slot)->targets;
3161 complete = true;
3163 /* Do actual search. */
3164 timevar_push (TV_IPA_VIRTUAL_CALL);
3165 *slot = XCNEW (polymorphic_call_target_d);
3166 if (cache_token)
3167 *cache_token = (void *)*slot;
3168 (*slot)->type = type;
3169 (*slot)->otr_token = otr_token;
3170 (*slot)->context = context;
3171 (*slot)->speculative = speculative;
3173 hash_set<tree> inserted;
3174 hash_set<tree> matched_vtables;
3176 /* First insert targets we speculatively identified as likely. */
3177 if (context.speculative_outer_type)
3179 odr_type speculative_outer_type;
3180 bool speculation_complete = true;
3182 /* First insert target from type itself and check if it may have
3183 derived types. */
3184 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3185 if (TYPE_FINAL_P (speculative_outer_type->type))
3186 context.speculative_maybe_derived_type = false;
3187 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3188 context.speculative_offset, otr_type);
3189 if (binfo)
3190 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3191 &can_refer);
3192 else
3193 target = NULL;
3195 /* In the case we get complete method, we don't need
3196 to walk derivations. */
3197 if (target && DECL_FINAL_P (target))
3198 context.speculative_maybe_derived_type = false;
3199 if (type_possibly_instantiated_p (speculative_outer_type->type))
3200 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3201 if (binfo)
3202 matched_vtables.add (BINFO_VTABLE (binfo));
3205 /* Next walk recursively all derived types. */
3206 if (context.speculative_maybe_derived_type)
3207 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3208 possible_polymorphic_call_targets_1 (nodes, &inserted,
3209 &matched_vtables,
3210 otr_type,
3211 speculative_outer_type->derived_types[i],
3212 otr_token, speculative_outer_type->type,
3213 context.speculative_offset,
3214 &speculation_complete,
3215 bases_to_consider,
3216 false);
3219 if (!speculative || !nodes.length ())
3221 /* First see virtual method of type itself. */
3222 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3223 context.offset, otr_type);
3224 if (binfo)
3225 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3226 &can_refer);
3227 else
3229 gcc_assert (odr_violation_reported);
3230 target = NULL;
3233 /* Destructors are never called through construction virtual tables,
3234 because the type is always known. */
3235 if (target && DECL_CXX_DESTRUCTOR_P (target))
3236 context.maybe_in_construction = false;
3238 if (target)
3240 /* In the case we get complete method, we don't need
3241 to walk derivations. */
3242 if (DECL_FINAL_P (target))
3243 context.maybe_derived_type = false;
3246 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3247 if (type_possibly_instantiated_p (outer_type->type))
3248 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3249 else
3250 skipped = true;
3252 if (binfo)
3253 matched_vtables.add (BINFO_VTABLE (binfo));
3255 /* Next walk recursively all derived types. */
3256 if (context.maybe_derived_type)
3258 for (i = 0; i < outer_type->derived_types.length(); i++)
3259 possible_polymorphic_call_targets_1 (nodes, &inserted,
3260 &matched_vtables,
3261 otr_type,
3262 outer_type->derived_types[i],
3263 otr_token, outer_type->type,
3264 context.offset, &complete,
3265 bases_to_consider,
3266 context.maybe_in_construction);
3268 if (!outer_type->all_derivations_known)
3270 if (!speculative && final_warning_records)
3272 if (complete
3273 && nodes.length () == 1
3274 && warn_suggest_final_types
3275 && !outer_type->derived_types.length ())
3277 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3278 final_warning_records->type_warnings.safe_grow_cleared
3279 (odr_types.length ());
3280 final_warning_records->type_warnings[outer_type->id].count++;
3281 final_warning_records->type_warnings[outer_type->id].dyn_count
3282 += final_warning_records->dyn_count;
3283 final_warning_records->type_warnings[outer_type->id].type
3284 = outer_type->type;
3285 (*slot)->type_warning = outer_type->id + 1;
3287 if (complete
3288 && warn_suggest_final_methods
3289 && nodes.length () == 1
3290 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3291 outer_type->type))
3293 bool existed;
3294 struct decl_warn_count &c =
3295 final_warning_records->decl_warnings.get_or_insert
3296 (nodes[0]->decl, &existed);
3298 if (existed)
3300 c.count++;
3301 c.dyn_count += final_warning_records->dyn_count;
3303 else
3305 c.count = 1;
3306 c.dyn_count = final_warning_records->dyn_count;
3307 c.decl = nodes[0]->decl;
3309 (*slot)->decl_warning = nodes[0]->decl;
3312 complete = false;
3316 if (!speculative)
3318 /* Destructors are never called through construction virtual tables,
3319 because the type is always known. One of entries may be
3320 cxa_pure_virtual so look to at least two of them. */
3321 if (context.maybe_in_construction)
3322 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3323 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3324 context.maybe_in_construction = false;
3325 if (context.maybe_in_construction)
3327 if (type != outer_type
3328 && (!skipped
3329 || (context.maybe_derived_type
3330 && !type_all_derivations_known_p (outer_type->type))))
3331 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3332 context.offset, nodes, &inserted,
3333 &matched_vtables, &complete);
3334 if (skipped)
3335 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3336 for (i = 0; i < bases_to_consider.length(); i++)
3337 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3342 (*slot)->targets = nodes;
3343 (*slot)->complete = complete;
3344 if (completep)
3345 *completep = complete;
3347 timevar_pop (TV_IPA_VIRTUAL_CALL);
3348 return nodes;
3351 bool
3352 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3353 vec<const decl_warn_count*> *vec)
3355 vec->safe_push (&value);
3356 return true;
3359 /* Dump target list TARGETS into FILE. */
3361 static void
3362 dump_targets (FILE *f, vec <cgraph_node *> targets)
3364 unsigned int i;
3366 for (i = 0; i < targets.length (); i++)
3368 char *name = NULL;
3369 if (in_lto_p)
3370 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3371 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
3372 if (in_lto_p)
3373 free (name);
3374 if (!targets[i]->definition)
3375 fprintf (f, " (no definition%s)",
3376 DECL_DECLARED_INLINE_P (targets[i]->decl)
3377 ? " inline" : "");
3379 fprintf (f, "\n");
3382 /* Dump all possible targets of a polymorphic call. */
3384 void
3385 dump_possible_polymorphic_call_targets (FILE *f,
3386 tree otr_type,
3387 HOST_WIDE_INT otr_token,
3388 const ipa_polymorphic_call_context &ctx)
3390 vec <cgraph_node *> targets;
3391 bool final;
3392 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3393 unsigned int len;
3395 if (!type)
3396 return;
3397 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3398 ctx,
3399 &final, NULL, false);
3400 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3401 print_generic_expr (f, type->type, TDF_SLIM);
3402 fprintf (f, " token %i\n", (int)otr_token);
3404 ctx.dump (f);
3406 fprintf (f, " %s%s%s%s\n ",
3407 final ? "This is a complete list." :
3408 "This is partial list; extra targets may be defined in other units.",
3409 ctx.maybe_in_construction ? " (base types included)" : "",
3410 ctx.maybe_derived_type ? " (derived types included)" : "",
3411 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3412 len = targets.length ();
3413 dump_targets (f, targets);
3415 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3416 ctx,
3417 &final, NULL, true);
3418 if (targets.length () != len)
3420 fprintf (f, " Speculative targets:");
3421 dump_targets (f, targets);
3423 gcc_assert (targets.length () <= len);
3424 fprintf (f, "\n");
3428 /* Return true if N can be possibly target of a polymorphic call of
3429 OTR_TYPE/OTR_TOKEN. */
3431 bool
3432 possible_polymorphic_call_target_p (tree otr_type,
3433 HOST_WIDE_INT otr_token,
3434 const ipa_polymorphic_call_context &ctx,
3435 struct cgraph_node *n)
3437 vec <cgraph_node *> targets;
3438 unsigned int i;
3439 enum built_in_function fcode;
3440 bool final;
3442 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3443 && ((fcode = DECL_FUNCTION_CODE (n->decl))
3444 == BUILT_IN_UNREACHABLE
3445 || fcode == BUILT_IN_TRAP))
3446 return true;
3448 if (!odr_hash)
3449 return true;
3450 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3451 for (i = 0; i < targets.length (); i++)
3452 if (n->semantically_equivalent_p (targets[i]))
3453 return true;
3455 /* At a moment we allow middle end to dig out new external declarations
3456 as a targets of polymorphic calls. */
3457 if (!final && !n->definition)
3458 return true;
3459 return false;
3464 /* Return true if N can be possibly target of a polymorphic call of
3465 OBJ_TYPE_REF expression REF in STMT. */
3467 bool
3468 possible_polymorphic_call_target_p (tree ref,
3469 gimple *stmt,
3470 struct cgraph_node *n)
3472 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3473 tree call_fn = gimple_call_fn (stmt);
3475 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3476 tree_to_uhwi
3477 (OBJ_TYPE_REF_TOKEN (call_fn)),
3478 context,
3483 /* After callgraph construction new external nodes may appear.
3484 Add them into the graph. */
3486 void
3487 update_type_inheritance_graph (void)
3489 struct cgraph_node *n;
3491 if (!odr_hash)
3492 return;
3493 free_polymorphic_call_targets_hash ();
3494 timevar_push (TV_IPA_INHERITANCE);
3495 /* We reconstruct the graph starting from types of all methods seen in the
3496 the unit. */
3497 FOR_EACH_FUNCTION (n)
3498 if (DECL_VIRTUAL_P (n->decl)
3499 && !n->definition
3500 && n->real_symbol_p ())
3501 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3502 timevar_pop (TV_IPA_INHERITANCE);
3506 /* Return true if N looks like likely target of a polymorphic call.
3507 Rule out cxa_pure_virtual, noreturns, function declared cold and
3508 other obvious cases. */
3510 bool
3511 likely_target_p (struct cgraph_node *n)
3513 int flags;
3514 /* cxa_pure_virtual and similar things are not likely. */
3515 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3516 return false;
3517 flags = flags_from_decl_or_type (n->decl);
3518 if (flags & ECF_NORETURN)
3519 return false;
3520 if (lookup_attribute ("cold",
3521 DECL_ATTRIBUTES (n->decl)))
3522 return false;
3523 if (n->frequency < NODE_FREQUENCY_NORMAL)
3524 return false;
3525 /* If there are no live virtual tables referring the target,
3526 the only way the target can be called is an instance coming from other
3527 compilation unit; speculative devirtualization is built around an
3528 assumption that won't happen. */
3529 if (!referenced_from_vtable_p (n))
3530 return false;
3531 return true;
3534 /* Compare type warning records P1 and P2 and choose one with larger count;
3535 helper for qsort. */
3538 type_warning_cmp (const void *p1, const void *p2)
3540 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3541 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3543 if (t1->dyn_count < t2->dyn_count)
3544 return 1;
3545 if (t1->dyn_count > t2->dyn_count)
3546 return -1;
3547 return t2->count - t1->count;
3550 /* Compare decl warning records P1 and P2 and choose one with larger count;
3551 helper for qsort. */
3554 decl_warning_cmp (const void *p1, const void *p2)
3556 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3557 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3559 if (t1->dyn_count < t2->dyn_count)
3560 return 1;
3561 if (t1->dyn_count > t2->dyn_count)
3562 return -1;
3563 return t2->count - t1->count;
3567 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3568 context CTX. */
3570 struct cgraph_node *
3571 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3572 ipa_polymorphic_call_context ctx)
3574 vec <cgraph_node *>targets
3575 = possible_polymorphic_call_targets
3576 (otr_type, otr_token, ctx, NULL, NULL, true);
3577 unsigned int i;
3578 struct cgraph_node *likely_target = NULL;
3580 for (i = 0; i < targets.length (); i++)
3581 if (likely_target_p (targets[i]))
3583 if (likely_target)
3584 return NULL;
3585 likely_target = targets[i];
3587 if (!likely_target
3588 ||!likely_target->definition
3589 || DECL_EXTERNAL (likely_target->decl))
3590 return NULL;
3592 /* Don't use an implicitly-declared destructor (c++/58678). */
3593 struct cgraph_node *non_thunk_target
3594 = likely_target->function_symbol ();
3595 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3596 return NULL;
3597 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3598 && likely_target->can_be_discarded_p ())
3599 return NULL;
3600 return likely_target;
3603 /* The ipa-devirt pass.
3604 When polymorphic call has only one likely target in the unit,
3605 turn it into a speculative call. */
3607 static unsigned int
3608 ipa_devirt (void)
3610 struct cgraph_node *n;
3611 hash_set<void *> bad_call_targets;
3612 struct cgraph_edge *e;
3614 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3615 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3616 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3617 int ndropped = 0;
3619 if (!odr_types_ptr)
3620 return 0;
3622 if (dump_file)
3623 dump_type_inheritance_graph (dump_file);
3625 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3626 This is implemented by setting up final_warning_records that are updated
3627 by get_polymorphic_call_targets.
3628 We need to clear cache in this case to trigger recomputation of all
3629 entries. */
3630 if (warn_suggest_final_methods || warn_suggest_final_types)
3632 final_warning_records = new (final_warning_record);
3633 final_warning_records->type_warnings = vNULL;
3634 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3635 free_polymorphic_call_targets_hash ();
3638 FOR_EACH_DEFINED_FUNCTION (n)
3640 bool update = false;
3641 if (!opt_for_fn (n->decl, flag_devirtualize))
3642 continue;
3643 if (dump_file && n->indirect_calls)
3644 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3645 n->name (), n->order);
3646 for (e = n->indirect_calls; e; e = e->next_callee)
3647 if (e->indirect_info->polymorphic)
3649 struct cgraph_node *likely_target = NULL;
3650 void *cache_token;
3651 bool final;
3653 if (final_warning_records)
3654 final_warning_records->dyn_count = e->count;
3656 vec <cgraph_node *>targets
3657 = possible_polymorphic_call_targets
3658 (e, &final, &cache_token, true);
3659 unsigned int i;
3661 /* Trigger warnings by calculating non-speculative targets. */
3662 if (warn_suggest_final_methods || warn_suggest_final_types)
3663 possible_polymorphic_call_targets (e);
3665 if (dump_file)
3666 dump_possible_polymorphic_call_targets
3667 (dump_file, e);
3669 npolymorphic++;
3671 /* See if the call can be devirtualized by means of ipa-prop's
3672 polymorphic call context propagation. If not, we can just
3673 forget about this call being polymorphic and avoid some heavy
3674 lifting in remove_unreachable_nodes that will otherwise try to
3675 keep all possible targets alive until inlining and in the inliner
3676 itself.
3678 This may need to be revisited once we add further ways to use
3679 the may edges, but it is a resonable thing to do right now. */
3681 if ((e->indirect_info->param_index == -1
3682 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3683 && e->indirect_info->vptr_changed))
3684 && !flag_ltrans_devirtualize)
3686 e->indirect_info->polymorphic = false;
3687 ndropped++;
3688 if (dump_file)
3689 fprintf (dump_file, "Dropping polymorphic call info;"
3690 " it can not be used by ipa-prop\n");
3693 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3694 continue;
3696 if (!e->maybe_hot_p ())
3698 if (dump_file)
3699 fprintf (dump_file, "Call is cold\n\n");
3700 ncold++;
3701 continue;
3703 if (e->speculative)
3705 if (dump_file)
3706 fprintf (dump_file, "Call is already speculated\n\n");
3707 nspeculated++;
3709 /* When dumping see if we agree with speculation. */
3710 if (!dump_file)
3711 continue;
3713 if (bad_call_targets.contains (cache_token))
3715 if (dump_file)
3716 fprintf (dump_file, "Target list is known to be useless\n\n");
3717 nmultiple++;
3718 continue;
3720 for (i = 0; i < targets.length (); i++)
3721 if (likely_target_p (targets[i]))
3723 if (likely_target)
3725 likely_target = NULL;
3726 if (dump_file)
3727 fprintf (dump_file, "More than one likely target\n\n");
3728 nmultiple++;
3729 break;
3731 likely_target = targets[i];
3733 if (!likely_target)
3735 bad_call_targets.add (cache_token);
3736 continue;
3738 /* This is reached only when dumping; check if we agree or disagree
3739 with the speculation. */
3740 if (e->speculative)
3742 struct cgraph_edge *e2;
3743 struct ipa_ref *ref;
3744 e->speculative_call_info (e2, e, ref);
3745 if (e2->callee->ultimate_alias_target ()
3746 == likely_target->ultimate_alias_target ())
3748 fprintf (dump_file, "We agree with speculation\n\n");
3749 nok++;
3751 else
3753 fprintf (dump_file, "We disagree with speculation\n\n");
3754 nwrong++;
3756 continue;
3758 if (!likely_target->definition)
3760 if (dump_file)
3761 fprintf (dump_file, "Target is not a definition\n\n");
3762 nnotdefined++;
3763 continue;
3765 /* Do not introduce new references to external symbols. While we
3766 can handle these just well, it is common for programs to
3767 incorrectly with headers defining methods they are linked
3768 with. */
3769 if (DECL_EXTERNAL (likely_target->decl))
3771 if (dump_file)
3772 fprintf (dump_file, "Target is external\n\n");
3773 nexternal++;
3774 continue;
3776 /* Don't use an implicitly-declared destructor (c++/58678). */
3777 struct cgraph_node *non_thunk_target
3778 = likely_target->function_symbol ();
3779 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3781 if (dump_file)
3782 fprintf (dump_file, "Target is artificial\n\n");
3783 nartificial++;
3784 continue;
3786 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3787 && likely_target->can_be_discarded_p ())
3789 if (dump_file)
3790 fprintf (dump_file, "Target is overwritable\n\n");
3791 noverwritable++;
3792 continue;
3794 else if (dbg_cnt (devirt))
3796 if (dump_enabled_p ())
3798 location_t locus = gimple_location_safe (e->call_stmt);
3799 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3800 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3801 n->name (), n->order,
3802 likely_target->name (),
3803 likely_target->order);
3805 if (!likely_target->can_be_discarded_p ())
3807 cgraph_node *alias;
3808 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3809 if (alias)
3810 likely_target = alias;
3812 nconverted++;
3813 update = true;
3814 e->make_speculative
3815 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3818 if (update)
3819 inline_update_overall_summary (n);
3821 if (warn_suggest_final_methods || warn_suggest_final_types)
3823 if (warn_suggest_final_types)
3825 final_warning_records->type_warnings.qsort (type_warning_cmp);
3826 for (unsigned int i = 0;
3827 i < final_warning_records->type_warnings.length (); i++)
3828 if (final_warning_records->type_warnings[i].count)
3830 tree type = final_warning_records->type_warnings[i].type;
3831 int count = final_warning_records->type_warnings[i].count;
3832 long long dyn_count
3833 = final_warning_records->type_warnings[i].dyn_count;
3835 if (!dyn_count)
3836 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3837 OPT_Wsuggest_final_types, count,
3838 "Declaring type %qD final "
3839 "would enable devirtualization of %i call",
3840 "Declaring type %qD final "
3841 "would enable devirtualization of %i calls",
3842 type,
3843 count);
3844 else
3845 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3846 OPT_Wsuggest_final_types, count,
3847 "Declaring type %qD final "
3848 "would enable devirtualization of %i call "
3849 "executed %lli times",
3850 "Declaring type %qD final "
3851 "would enable devirtualization of %i calls "
3852 "executed %lli times",
3853 type,
3854 count,
3855 dyn_count);
3859 if (warn_suggest_final_methods)
3861 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3863 final_warning_records->decl_warnings.traverse
3864 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3865 decl_warnings_vec.qsort (decl_warning_cmp);
3866 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3868 tree decl = decl_warnings_vec[i]->decl;
3869 int count = decl_warnings_vec[i]->count;
3870 long long dyn_count = decl_warnings_vec[i]->dyn_count;
3872 if (!dyn_count)
3873 if (DECL_CXX_DESTRUCTOR_P (decl))
3874 warning_n (DECL_SOURCE_LOCATION (decl),
3875 OPT_Wsuggest_final_methods, count,
3876 "Declaring virtual destructor of %qD final "
3877 "would enable devirtualization of %i call",
3878 "Declaring virtual destructor of %qD final "
3879 "would enable devirtualization of %i calls",
3880 DECL_CONTEXT (decl), count);
3881 else
3882 warning_n (DECL_SOURCE_LOCATION (decl),
3883 OPT_Wsuggest_final_methods, count,
3884 "Declaring method %qD final "
3885 "would enable devirtualization of %i call",
3886 "Declaring method %qD final "
3887 "would enable devirtualization of %i calls",
3888 decl, count);
3889 else if (DECL_CXX_DESTRUCTOR_P (decl))
3890 warning_n (DECL_SOURCE_LOCATION (decl),
3891 OPT_Wsuggest_final_methods, count,
3892 "Declaring virtual destructor of %qD final "
3893 "would enable devirtualization of %i call "
3894 "executed %lli times",
3895 "Declaring virtual destructor of %qD final "
3896 "would enable devirtualization of %i calls "
3897 "executed %lli times",
3898 DECL_CONTEXT (decl), count, dyn_count);
3899 else
3900 warning_n (DECL_SOURCE_LOCATION (decl),
3901 OPT_Wsuggest_final_methods, count,
3902 "Declaring method %qD final "
3903 "would enable devirtualization of %i call "
3904 "executed %lli times",
3905 "Declaring method %qD final "
3906 "would enable devirtualization of %i calls "
3907 "executed %lli times",
3908 decl, count, dyn_count);
3912 delete (final_warning_records);
3913 final_warning_records = 0;
3916 if (dump_file)
3917 fprintf (dump_file,
3918 "%i polymorphic calls, %i devirtualized,"
3919 " %i speculatively devirtualized, %i cold\n"
3920 "%i have multiple targets, %i overwritable,"
3921 " %i already speculated (%i agree, %i disagree),"
3922 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3923 npolymorphic, ndevirtualized, nconverted, ncold,
3924 nmultiple, noverwritable, nspeculated, nok, nwrong,
3925 nexternal, nnotdefined, nartificial, ndropped);
3926 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3929 namespace {
3931 const pass_data pass_data_ipa_devirt =
3933 IPA_PASS, /* type */
3934 "devirt", /* name */
3935 OPTGROUP_NONE, /* optinfo_flags */
3936 TV_IPA_DEVIRT, /* tv_id */
3937 0, /* properties_required */
3938 0, /* properties_provided */
3939 0, /* properties_destroyed */
3940 0, /* todo_flags_start */
3941 ( TODO_dump_symtab ), /* todo_flags_finish */
3944 class pass_ipa_devirt : public ipa_opt_pass_d
3946 public:
3947 pass_ipa_devirt (gcc::context *ctxt)
3948 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3949 NULL, /* generate_summary */
3950 NULL, /* write_summary */
3951 NULL, /* read_summary */
3952 NULL, /* write_optimization_summary */
3953 NULL, /* read_optimization_summary */
3954 NULL, /* stmt_fixup */
3955 0, /* function_transform_todo_flags_start */
3956 NULL, /* function_transform */
3957 NULL) /* variable_transform */
3960 /* opt_pass methods: */
3961 virtual bool gate (function *)
3963 /* In LTO, always run the IPA passes and decide on function basis if the
3964 pass is enabled. */
3965 if (in_lto_p)
3966 return true;
3967 return (flag_devirtualize
3968 && (flag_devirtualize_speculatively
3969 || (warn_suggest_final_methods
3970 || warn_suggest_final_types))
3971 && optimize);
3974 virtual unsigned int execute (function *) { return ipa_devirt (); }
3976 }; // class pass_ipa_devirt
3978 } // anon namespace
3980 ipa_opt_pass_d *
3981 make_pass_ipa_devirt (gcc::context *ctxt)
3983 return new pass_ipa_devirt (ctxt);
3986 #include "gt-ipa-devirt.h"