* config/sparc/sparc.c (sparc_option_override): Set MASK_FSMULD flag
[official-gcc.git] / gcc / ipa-devirt.c
blob9781acd0766334a644a133c2f94b09b48fbe7f9d
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2017 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
82 What we do here:
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "backend.h"
112 #include "rtl.h"
113 #include "tree.h"
114 #include "gimple.h"
115 #include "alloc-pool.h"
116 #include "tree-pass.h"
117 #include "cgraph.h"
118 #include "lto-streamer.h"
119 #include "fold-const.h"
120 #include "print-tree.h"
121 #include "calls.h"
122 #include "ipa-utils.h"
123 #include "gimple-fold.h"
124 #include "symbol-summary.h"
125 #include "tree-vrp.h"
126 #include "ipa-prop.h"
127 #include "ipa-fnsummary.h"
128 #include "demangle.h"
129 #include "dbgcnt.h"
130 #include "gimple-pretty-print.h"
131 #include "intl.h"
133 /* Hash based set of pairs of types. */
134 struct type_pair
136 tree first;
137 tree second;
140 template <>
141 struct default_hash_traits <type_pair>
142 : typed_noop_remove <type_pair>
144 GTY((skip)) typedef type_pair value_type;
145 GTY((skip)) typedef type_pair compare_type;
146 static hashval_t
147 hash (type_pair p)
149 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
151 static bool
152 is_empty (type_pair p)
154 return p.first == NULL;
156 static bool
157 is_deleted (type_pair p ATTRIBUTE_UNUSED)
159 return false;
161 static bool
162 equal (const type_pair &a, const type_pair &b)
164 return a.first==b.first && a.second == b.second;
166 static void
167 mark_empty (type_pair &e)
169 e.first = NULL;
173 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
174 hash_set<type_pair> *,
175 location_t, location_t);
177 static bool odr_violation_reported = false;
180 /* Pointer set of all call targets appearing in the cache. */
181 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
183 /* The node of type inheritance graph. For each type unique in
184 One Definition Rule (ODR) sense, we produce one node linking all
185 main variants of types equivalent to it, bases and derived types. */
187 struct GTY(()) odr_type_d
189 /* leader type. */
190 tree type;
191 /* All bases; built only for main variants of types. */
192 vec<odr_type> GTY((skip)) bases;
193 /* All derived types with virtual methods seen in unit;
194 built only for main variants of types. */
195 vec<odr_type> GTY((skip)) derived_types;
197 /* All equivalent types, if more than one. */
198 vec<tree, va_gc> *types;
199 /* Set of all equivalent types, if NON-NULL. */
200 hash_set<tree> * GTY((skip)) types_set;
202 /* Unique ID indexing the type in odr_types array. */
203 int id;
204 /* Is it in anonymous namespace? */
205 bool anonymous_namespace;
206 /* Do we know about all derivations of given type? */
207 bool all_derivations_known;
208 /* Did we report ODR violation here? */
209 bool odr_violated;
210 /* Set when virtual table without RTTI previaled table with. */
211 bool rtti_broken;
214 /* Return TRUE if all derived types of T are known and thus
215 we may consider the walk of derived type complete.
217 This is typically true only for final anonymous namespace types and types
218 defined within functions (that may be COMDAT and thus shared across units,
219 but with the same set of derived types). */
221 bool
222 type_all_derivations_known_p (const_tree t)
224 if (TYPE_FINAL_P (t))
225 return true;
226 if (flag_ltrans)
227 return false;
228 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
229 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
230 return true;
231 if (type_in_anonymous_namespace_p (t))
232 return true;
233 return (decl_function_context (TYPE_NAME (t)) != NULL);
236 /* Return TRUE if type's constructors are all visible. */
238 static bool
239 type_all_ctors_visible_p (tree t)
241 return !flag_ltrans
242 && symtab->state >= CONSTRUCTION
243 /* We can not always use type_all_derivations_known_p.
244 For function local types we must assume case where
245 the function is COMDAT and shared in between units.
247 TODO: These cases are quite easy to get, but we need
248 to keep track of C++ privatizing via -Wno-weak
249 as well as the IPA privatizing. */
250 && type_in_anonymous_namespace_p (t);
253 /* Return TRUE if type may have instance. */
255 static bool
256 type_possibly_instantiated_p (tree t)
258 tree vtable;
259 varpool_node *vnode;
261 /* TODO: Add abstract types here. */
262 if (!type_all_ctors_visible_p (t))
263 return true;
265 vtable = BINFO_VTABLE (TYPE_BINFO (t));
266 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
267 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
268 vnode = varpool_node::get (vtable);
269 return vnode && vnode->definition;
272 /* Hash used to unify ODR types based on their mangled name and for anonymous
273 namespace types. */
275 struct odr_name_hasher : pointer_hash <odr_type_d>
277 typedef union tree_node *compare_type;
278 static inline hashval_t hash (const odr_type_d *);
279 static inline bool equal (const odr_type_d *, const tree_node *);
280 static inline void remove (odr_type_d *);
283 /* Has used to unify ODR types based on their associated virtual table.
284 This hash is needed to keep -fno-lto-odr-type-merging to work and contains
285 only polymorphic types. Types with mangled names are inserted to both. */
287 struct odr_vtable_hasher:odr_name_hasher
289 static inline hashval_t hash (const odr_type_d *);
290 static inline bool equal (const odr_type_d *, const tree_node *);
293 /* Return type that was declared with T's name so that T is an
294 qualified variant of it. */
296 static inline tree
297 main_odr_variant (const_tree t)
299 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
300 return TREE_TYPE (TYPE_NAME (t));
301 /* Unnamed types and non-C++ produced types can be compared by variants. */
302 else
303 return TYPE_MAIN_VARIANT (t);
306 static bool
307 can_be_name_hashed_p (tree t)
309 return (!in_lto_p || odr_type_p (t));
312 /* Hash type by its ODR name. */
314 static hashval_t
315 hash_odr_name (const_tree t)
317 gcc_checking_assert (main_odr_variant (t) == t);
319 /* If not in LTO, all main variants are unique, so we can do
320 pointer hash. */
321 if (!in_lto_p)
322 return htab_hash_pointer (t);
324 /* Anonymous types are unique. */
325 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
326 return htab_hash_pointer (t);
328 gcc_checking_assert (TYPE_NAME (t)
329 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
330 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
333 /* Return the computed hashcode for ODR_TYPE. */
335 inline hashval_t
336 odr_name_hasher::hash (const odr_type_d *odr_type)
338 return hash_odr_name (odr_type->type);
341 static bool
342 can_be_vtable_hashed_p (tree t)
344 /* vtable hashing can distinguish only main variants. */
345 if (TYPE_MAIN_VARIANT (t) != t)
346 return false;
347 /* Anonymous namespace types are always handled by name hash. */
348 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
349 return false;
350 return (TREE_CODE (t) == RECORD_TYPE
351 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
354 /* Hash type by assembler name of its vtable. */
356 static hashval_t
357 hash_odr_vtable (const_tree t)
359 tree v = BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (t)));
360 inchash::hash hstate;
362 gcc_checking_assert (in_lto_p);
363 gcc_checking_assert (!type_in_anonymous_namespace_p (t));
364 gcc_checking_assert (TREE_CODE (t) == RECORD_TYPE
365 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
366 gcc_checking_assert (main_odr_variant (t) == t);
368 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
370 add_expr (TREE_OPERAND (v, 1), hstate);
371 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
374 hstate.add_wide_int (IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (v)));
375 return hstate.end ();
378 /* Return the computed hashcode for ODR_TYPE. */
380 inline hashval_t
381 odr_vtable_hasher::hash (const odr_type_d *odr_type)
383 return hash_odr_vtable (odr_type->type);
386 /* For languages with One Definition Rule, work out if
387 types are the same based on their name.
389 This is non-trivial for LTO where minor differences in
390 the type representation may have prevented type merging
391 to merge two copies of otherwise equivalent type.
393 Until we start streaming mangled type names, this function works
394 only for polymorphic types.
396 When STRICT is true, we compare types by their names for purposes of
397 ODR violation warnings. When strict is false, we consider variants
398 equivalent, because it is all that matters for devirtualization machinery.
401 bool
402 types_same_for_odr (const_tree type1, const_tree type2, bool strict)
404 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
406 type1 = main_odr_variant (type1);
407 type2 = main_odr_variant (type2);
408 if (!strict)
410 type1 = TYPE_MAIN_VARIANT (type1);
411 type2 = TYPE_MAIN_VARIANT (type2);
414 if (type1 == type2)
415 return true;
417 if (!in_lto_p)
418 return false;
420 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
421 on the corresponding TYPE_STUB_DECL. */
422 if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
423 || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
424 return false;
427 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
429 Ideally we should never need types without ODR names here. It can however
430 happen in two cases:
432 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
433 Here testing for equivalence is safe, since their MAIN_VARIANTs are
434 unique.
435 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
436 establish precise ODR equivalency, but for correctness we care only
437 about equivalency on complete polymorphic types. For these we can
438 compare assembler names of their virtual tables. */
439 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
440 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
442 /* See if types are obviously different (i.e. different codes
443 or polymorphic wrt non-polymorphic). This is not strictly correct
444 for ODR violating programs, but we can't do better without streaming
445 ODR names. */
446 if (TREE_CODE (type1) != TREE_CODE (type2))
447 return false;
448 if (TREE_CODE (type1) == RECORD_TYPE
449 && (TYPE_BINFO (type1) == NULL_TREE)
450 != (TYPE_BINFO (type2) == NULL_TREE))
451 return false;
452 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
453 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
454 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
455 return false;
457 /* At the moment we have no way to establish ODR equivalence at LTO
458 other than comparing virtual table pointers of polymorphic types.
459 Eventually we should start saving mangled names in TYPE_NAME.
460 Then this condition will become non-trivial. */
462 if (TREE_CODE (type1) == RECORD_TYPE
463 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
464 && BINFO_VTABLE (TYPE_BINFO (type1))
465 && BINFO_VTABLE (TYPE_BINFO (type2)))
467 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
468 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
469 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
470 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
471 return (operand_equal_p (TREE_OPERAND (v1, 1),
472 TREE_OPERAND (v2, 1), 0)
473 && DECL_ASSEMBLER_NAME
474 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
475 == DECL_ASSEMBLER_NAME
476 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
478 gcc_unreachable ();
480 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
481 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
484 /* Return true if we can decide on ODR equivalency.
486 In non-LTO it is always decide, in LTO however it depends in the type has
487 ODR info attached.
489 When STRICT is false, compare main variants. */
491 bool
492 types_odr_comparable (tree t1, tree t2, bool strict)
494 return (!in_lto_p
495 || (strict ? (main_odr_variant (t1) == main_odr_variant (t2)
496 && main_odr_variant (t1))
497 : TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
498 || (odr_type_p (t1) && odr_type_p (t2))
499 || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
500 && TYPE_BINFO (t1) && TYPE_BINFO (t2)
501 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
502 && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
505 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
506 known, be conservative and return false. */
508 bool
509 types_must_be_same_for_odr (tree t1, tree t2)
511 if (types_odr_comparable (t1, t2))
512 return types_same_for_odr (t1, t2);
513 else
514 return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
517 /* If T is compound type, return type it is based on. */
519 static tree
520 compound_type_base (const_tree t)
522 if (TREE_CODE (t) == ARRAY_TYPE
523 || POINTER_TYPE_P (t)
524 || TREE_CODE (t) == COMPLEX_TYPE
525 || VECTOR_TYPE_P (t))
526 return TREE_TYPE (t);
527 if (TREE_CODE (t) == METHOD_TYPE)
528 return TYPE_METHOD_BASETYPE (t);
529 if (TREE_CODE (t) == OFFSET_TYPE)
530 return TYPE_OFFSET_BASETYPE (t);
531 return NULL_TREE;
534 /* Return true if T is either ODR type or compound type based from it.
535 If the function return true, we know that T is a type originating from C++
536 source even at link-time. */
538 bool
539 odr_or_derived_type_p (const_tree t)
543 if (odr_type_p (t))
544 return true;
545 /* Function type is a tricky one. Basically we can consider it
546 ODR derived if return type or any of the parameters is.
547 We need to check all parameters because LTO streaming merges
548 common types (such as void) and they are not considered ODR then. */
549 if (TREE_CODE (t) == FUNCTION_TYPE)
551 if (TYPE_METHOD_BASETYPE (t))
552 t = TYPE_METHOD_BASETYPE (t);
553 else
555 if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
556 return true;
557 for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
558 if (odr_or_derived_type_p (TREE_VALUE (t)))
559 return true;
560 return false;
563 else
564 t = compound_type_base (t);
566 while (t);
567 return t;
570 /* Compare types T1 and T2 and return true if they are
571 equivalent. */
573 inline bool
574 odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
576 tree t1 = o1->type;
578 gcc_checking_assert (main_odr_variant (t2) == t2);
579 gcc_checking_assert (main_odr_variant (t1) == t1);
580 if (t1 == t2)
581 return true;
582 if (!in_lto_p)
583 return false;
584 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
585 on the corresponding TYPE_STUB_DECL. */
586 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
587 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
588 return false;
589 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
590 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
591 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
592 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
595 /* Compare types T1 and T2 and return true if they are
596 equivalent. */
598 inline bool
599 odr_vtable_hasher::equal (const odr_type_d *o1, const tree_node *t2)
601 tree t1 = o1->type;
603 gcc_checking_assert (main_odr_variant (t2) == t2);
604 gcc_checking_assert (main_odr_variant (t1) == t1);
605 gcc_checking_assert (in_lto_p);
606 t1 = TYPE_MAIN_VARIANT (t1);
607 t2 = TYPE_MAIN_VARIANT (t2);
608 if (t1 == t2)
609 return true;
610 tree v1 = BINFO_VTABLE (TYPE_BINFO (t1));
611 tree v2 = BINFO_VTABLE (TYPE_BINFO (t2));
612 return (operand_equal_p (TREE_OPERAND (v1, 1),
613 TREE_OPERAND (v2, 1), 0)
614 && DECL_ASSEMBLER_NAME
615 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
616 == DECL_ASSEMBLER_NAME
617 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
620 /* Free ODR type V. */
622 inline void
623 odr_name_hasher::remove (odr_type_d *v)
625 v->bases.release ();
626 v->derived_types.release ();
627 if (v->types_set)
628 delete v->types_set;
629 ggc_free (v);
632 /* ODR type hash used to look up ODR type based on tree type node. */
634 typedef hash_table<odr_name_hasher> odr_hash_type;
635 static odr_hash_type *odr_hash;
636 typedef hash_table<odr_vtable_hasher> odr_vtable_hash_type;
637 static odr_vtable_hash_type *odr_vtable_hash;
639 /* ODR types are also stored into ODR_TYPE vector to allow consistent
640 walking. Bases appear before derived types. Vector is garbage collected
641 so we won't end up visiting empty types. */
643 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
644 #define odr_types (*odr_types_ptr)
646 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
647 void
648 set_type_binfo (tree type, tree binfo)
650 for (; type; type = TYPE_NEXT_VARIANT (type))
651 if (COMPLETE_TYPE_P (type))
652 TYPE_BINFO (type) = binfo;
653 else
654 gcc_assert (!TYPE_BINFO (type));
657 /* Compare T2 and T2 based on name or structure. */
659 static bool
660 odr_subtypes_equivalent_p (tree t1, tree t2,
661 hash_set<type_pair> *visited,
662 location_t loc1, location_t loc2)
665 /* This can happen in incomplete types that should be handled earlier. */
666 gcc_assert (t1 && t2);
668 t1 = main_odr_variant (t1);
669 t2 = main_odr_variant (t2);
670 if (t1 == t2)
671 return true;
673 /* Anonymous namespace types must match exactly. */
674 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
675 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
676 return false;
678 /* For ODR types be sure to compare their names.
679 To support -wno-odr-type-merging we allow one type to be non-ODR
680 and other ODR even though it is a violation. */
681 if (types_odr_comparable (t1, t2, true))
683 if (!types_same_for_odr (t1, t2, true))
684 return false;
685 /* Limit recursion: If subtypes are ODR types and we know
686 that they are same, be happy. */
687 if (!odr_type_p (t1) || !get_odr_type (t1, true)->odr_violated)
688 return true;
691 /* Component types, builtins and possibly violating ODR types
692 have to be compared structurally. */
693 if (TREE_CODE (t1) != TREE_CODE (t2))
694 return false;
695 if (AGGREGATE_TYPE_P (t1)
696 && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
697 return false;
699 type_pair pair={t1,t2};
700 if (TYPE_UID (t1) > TYPE_UID (t2))
702 pair.first = t2;
703 pair.second = t1;
705 if (visited->add (pair))
706 return true;
707 return odr_types_equivalent_p (t1, t2, false, NULL, visited, loc1, loc2);
710 /* Return true if DECL1 and DECL2 are identical methods. Consider
711 name equivalent to name.localalias.xyz. */
713 static bool
714 methods_equal_p (tree decl1, tree decl2)
716 if (DECL_ASSEMBLER_NAME (decl1) == DECL_ASSEMBLER_NAME (decl2))
717 return true;
718 const char sep = symbol_table::symbol_suffix_separator ();
720 const char *name1 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1));
721 const char *ptr1 = strchr (name1, sep);
722 int len1 = ptr1 ? ptr1 - name1 : strlen (name1);
724 const char *name2 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2));
725 const char *ptr2 = strchr (name2, sep);
726 int len2 = ptr2 ? ptr2 - name2 : strlen (name2);
728 if (len1 != len2)
729 return false;
730 return !strncmp (name1, name2, len1);
733 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
734 violation warnings. */
736 void
737 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
739 int n1, n2;
741 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
743 odr_violation_reported = true;
744 if (DECL_VIRTUAL_P (prevailing->decl))
746 varpool_node *tmp = prevailing;
747 prevailing = vtable;
748 vtable = tmp;
750 if (warning_at (DECL_SOURCE_LOCATION
751 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
752 OPT_Wodr,
753 "virtual table of type %qD violates one definition rule",
754 DECL_CONTEXT (vtable->decl)))
755 inform (DECL_SOURCE_LOCATION (prevailing->decl),
756 "variable of same assembler name as the virtual table is "
757 "defined in another translation unit");
758 return;
760 if (!prevailing->definition || !vtable->definition)
761 return;
763 /* If we do not stream ODR type info, do not bother to do useful compare. */
764 if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
765 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
766 return;
768 odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
770 if (class_type->odr_violated)
771 return;
773 for (n1 = 0, n2 = 0; true; n1++, n2++)
775 struct ipa_ref *ref1, *ref2;
776 bool end1, end2;
778 end1 = !prevailing->iterate_reference (n1, ref1);
779 end2 = !vtable->iterate_reference (n2, ref2);
781 /* !DECL_VIRTUAL_P means RTTI entry;
782 We warn when RTTI is lost because non-RTTI previals; we silently
783 accept the other case. */
784 while (!end2
785 && (end1
786 || (methods_equal_p (ref1->referred->decl,
787 ref2->referred->decl)
788 && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
789 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
791 if (!class_type->rtti_broken
792 && warning_at (DECL_SOURCE_LOCATION
793 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
794 OPT_Wodr,
795 "virtual table of type %qD contains RTTI "
796 "information",
797 DECL_CONTEXT (vtable->decl)))
799 inform (DECL_SOURCE_LOCATION
800 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
801 "but is prevailed by one without from other translation "
802 "unit");
803 inform (DECL_SOURCE_LOCATION
804 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
805 "RTTI will not work on this type");
806 class_type->rtti_broken = true;
808 n2++;
809 end2 = !vtable->iterate_reference (n2, ref2);
811 while (!end1
812 && (end2
813 || (methods_equal_p (ref2->referred->decl, ref1->referred->decl)
814 && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
815 && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
817 n1++;
818 end1 = !prevailing->iterate_reference (n1, ref1);
821 /* Finished? */
822 if (end1 && end2)
824 /* Extra paranoia; compare the sizes. We do not have information
825 about virtual inheritance offsets, so just be sure that these
826 match.
827 Do this as very last check so the not very informative error
828 is not output too often. */
829 if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
831 class_type->odr_violated = true;
832 if (warning_at (DECL_SOURCE_LOCATION
833 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
834 OPT_Wodr,
835 "virtual table of type %qD violates "
836 "one definition rule ",
837 DECL_CONTEXT (vtable->decl)))
839 inform (DECL_SOURCE_LOCATION
840 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
841 "the conflicting type defined in another translation "
842 "unit has virtual table of different size");
845 return;
848 if (!end1 && !end2)
850 if (methods_equal_p (ref1->referred->decl, ref2->referred->decl))
851 continue;
853 class_type->odr_violated = true;
855 /* If the loops above stopped on non-virtual pointer, we have
856 mismatch in RTTI information mangling. */
857 if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
858 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
860 if (warning_at (DECL_SOURCE_LOCATION
861 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
862 OPT_Wodr,
863 "virtual table of type %qD violates "
864 "one definition rule ",
865 DECL_CONTEXT (vtable->decl)))
867 inform (DECL_SOURCE_LOCATION
868 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
869 "the conflicting type defined in another translation "
870 "unit with different RTTI information");
872 return;
874 /* At this point both REF1 and REF2 points either to virtual table
875 or virtual method. If one points to virtual table and other to
876 method we can complain the same way as if one table was shorter
877 than other pointing out the extra method. */
878 if (TREE_CODE (ref1->referred->decl)
879 != TREE_CODE (ref2->referred->decl))
881 if (VAR_P (ref1->referred->decl))
882 end1 = true;
883 else if (VAR_P (ref2->referred->decl))
884 end2 = true;
888 class_type->odr_violated = true;
890 /* Complain about size mismatch. Either we have too many virutal
891 functions or too many virtual table pointers. */
892 if (end1 || end2)
894 if (end1)
896 varpool_node *tmp = prevailing;
897 prevailing = vtable;
898 vtable = tmp;
899 ref1 = ref2;
901 if (warning_at (DECL_SOURCE_LOCATION
902 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
903 OPT_Wodr,
904 "virtual table of type %qD violates "
905 "one definition rule",
906 DECL_CONTEXT (vtable->decl)))
908 if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
910 inform (DECL_SOURCE_LOCATION
911 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
912 "the conflicting type defined in another translation "
913 "unit");
914 inform (DECL_SOURCE_LOCATION
915 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
916 "contains additional virtual method %qD",
917 ref1->referred->decl);
919 else
921 inform (DECL_SOURCE_LOCATION
922 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
923 "the conflicting type defined in another translation "
924 "unit has virtual table with more entries");
927 return;
930 /* And in the last case we have either mistmatch in between two virtual
931 methods or two virtual table pointers. */
932 if (warning_at (DECL_SOURCE_LOCATION
933 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
934 "virtual table of type %qD violates "
935 "one definition rule ",
936 DECL_CONTEXT (vtable->decl)))
938 if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
940 inform (DECL_SOURCE_LOCATION
941 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
942 "the conflicting type defined in another translation "
943 "unit");
944 gcc_assert (TREE_CODE (ref2->referred->decl)
945 == FUNCTION_DECL);
946 inform (DECL_SOURCE_LOCATION
947 (ref1->referred->ultimate_alias_target ()->decl),
948 "virtual method %qD",
949 ref1->referred->ultimate_alias_target ()->decl);
950 inform (DECL_SOURCE_LOCATION
951 (ref2->referred->ultimate_alias_target ()->decl),
952 "ought to match virtual method %qD but does not",
953 ref2->referred->ultimate_alias_target ()->decl);
955 else
956 inform (DECL_SOURCE_LOCATION
957 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
958 "the conflicting type defined in another translation "
959 "unit has virtual table with different contents");
960 return;
965 /* Output ODR violation warning about T1 and T2 with REASON.
966 Display location of ST1 and ST2 if REASON speaks about field or
967 method of the type.
968 If WARN is false, do nothing. Set WARNED if warning was indeed
969 output. */
971 void
972 warn_odr (tree t1, tree t2, tree st1, tree st2,
973 bool warn, bool *warned, const char *reason)
975 tree decl2 = TYPE_NAME (t2);
976 if (warned)
977 *warned = false;
979 if (!warn || !TYPE_NAME(t1))
980 return;
982 /* ODR warnings are output druing LTO streaming; we must apply location
983 cache for potential warnings to be output correctly. */
984 if (lto_location_cache::current_cache)
985 lto_location_cache::current_cache->apply_location_cache ();
987 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
988 "type %qT violates the C++ One Definition Rule",
989 t1))
990 return;
991 if (!st1 && !st2)
993 /* For FIELD_DECL support also case where one of fields is
994 NULL - this is used when the structures have mismatching number of
995 elements. */
996 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
998 inform (DECL_SOURCE_LOCATION (decl2),
999 "a different type is defined in another translation unit");
1000 if (!st1)
1002 st1 = st2;
1003 st2 = NULL;
1005 inform (DECL_SOURCE_LOCATION (st1),
1006 "the first difference of corresponding definitions is field %qD",
1007 st1);
1008 if (st2)
1009 decl2 = st2;
1011 else if (TREE_CODE (st1) == FUNCTION_DECL)
1013 inform (DECL_SOURCE_LOCATION (decl2),
1014 "a different type is defined in another translation unit");
1015 inform (DECL_SOURCE_LOCATION (st1),
1016 "the first difference of corresponding definitions is method %qD",
1017 st1);
1018 decl2 = st2;
1020 else
1021 return;
1022 inform (DECL_SOURCE_LOCATION (decl2), reason);
1024 if (warned)
1025 *warned = true;
1028 /* Return ture if T1 and T2 are incompatible and we want to recusively
1029 dive into them from warn_type_mismatch to give sensible answer. */
1031 static bool
1032 type_mismatch_p (tree t1, tree t2)
1034 if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
1035 && !odr_types_equivalent_p (t1, t2))
1036 return true;
1037 return !types_compatible_p (t1, t2);
1041 /* Types T1 and T2 was found to be incompatible in a context they can't
1042 (either used to declare a symbol of same assembler name or unified by
1043 ODR rule). We already output warning about this, but if possible, output
1044 extra information on how the types mismatch.
1046 This is hard to do in general. We basically handle the common cases.
1048 If LOC1 and LOC2 are meaningful locations, use it in the case the types
1049 themselves do no thave one.*/
1051 void
1052 warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
1054 /* Location of type is known only if it has TYPE_NAME and the name is
1055 TYPE_DECL. */
1056 location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1057 ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
1058 : UNKNOWN_LOCATION;
1059 location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1060 ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
1061 : UNKNOWN_LOCATION;
1062 bool loc_t2_useful = false;
1064 /* With LTO it is a common case that the location of both types match.
1065 See if T2 has a location that is different from T1. If so, we will
1066 inform user about the location.
1067 Do not consider the location passed to us in LOC1/LOC2 as those are
1068 already output. */
1069 if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
1071 if (loc_t1 <= BUILTINS_LOCATION)
1072 loc_t2_useful = true;
1073 else
1075 expanded_location xloc1 = expand_location (loc_t1);
1076 expanded_location xloc2 = expand_location (loc_t2);
1078 if (strcmp (xloc1.file, xloc2.file)
1079 || xloc1.line != xloc2.line
1080 || xloc1.column != xloc2.column)
1081 loc_t2_useful = true;
1085 if (loc_t1 <= BUILTINS_LOCATION)
1086 loc_t1 = loc1;
1087 if (loc_t2 <= BUILTINS_LOCATION)
1088 loc_t2 = loc2;
1090 location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
1092 /* It is a quite common bug to reference anonymous namespace type in
1093 non-anonymous namespace class. */
1094 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1095 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1097 if (type_with_linkage_p (t1) && !type_in_anonymous_namespace_p (t1))
1099 std::swap (t1, t2);
1100 std::swap (loc_t1, loc_t2);
1102 gcc_assert (TYPE_NAME (t1) && TYPE_NAME (t2)
1103 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1104 && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL);
1105 /* Most of the time, the type names will match, do not be unnecesarily
1106 verbose. */
1107 if (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t1)))
1108 != IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t2))))
1109 inform (loc_t1,
1110 "type %qT defined in anonymous namespace can not match "
1111 "type %qT across the translation unit boundary",
1112 t1, t2);
1113 else
1114 inform (loc_t1,
1115 "type %qT defined in anonymous namespace can not match "
1116 "across the translation unit boundary",
1117 t1);
1118 if (loc_t2_useful)
1119 inform (loc_t2,
1120 "the incompatible type defined in another translation unit");
1121 return;
1123 /* If types have mangled ODR names and they are different, it is most
1124 informative to output those.
1125 This also covers types defined in different namespaces. */
1126 if (TYPE_NAME (t1) && TYPE_NAME (t2)
1127 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1128 && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1129 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t1))
1130 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t2))
1131 && DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
1132 != DECL_ASSEMBLER_NAME (TYPE_NAME (t2)))
1134 char *name1 = xstrdup (cplus_demangle
1135 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))),
1136 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES));
1137 char *name2 = cplus_demangle
1138 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t2))),
1139 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES);
1140 if (name1 && name2 && strcmp (name1, name2))
1142 inform (loc_t1,
1143 "type name %qs should match type name %qs",
1144 name1, name2);
1145 if (loc_t2_useful)
1146 inform (loc_t2,
1147 "the incompatible type is defined here");
1148 free (name1);
1149 return;
1151 free (name1);
1153 /* A tricky case are compound types. Often they appear the same in source
1154 code and the mismatch is dragged in by type they are build from.
1155 Look for those differences in subtypes and try to be informative. In other
1156 cases just output nothing because the source code is probably different
1157 and in this case we already output a all necessary info. */
1158 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1160 if (TREE_CODE (t1) == TREE_CODE (t2))
1162 if (TREE_CODE (t1) == ARRAY_TYPE
1163 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1165 tree i1 = TYPE_DOMAIN (t1);
1166 tree i2 = TYPE_DOMAIN (t2);
1168 if (i1 && i2
1169 && TYPE_MAX_VALUE (i1)
1170 && TYPE_MAX_VALUE (i2)
1171 && !operand_equal_p (TYPE_MAX_VALUE (i1),
1172 TYPE_MAX_VALUE (i2), 0))
1174 inform (loc,
1175 "array types have different bounds");
1176 return;
1179 if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1180 && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1181 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
1182 else if (TREE_CODE (t1) == METHOD_TYPE
1183 || TREE_CODE (t1) == FUNCTION_TYPE)
1185 tree parms1 = NULL, parms2 = NULL;
1186 int count = 1;
1188 if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1190 inform (loc, "return value type mismatch");
1191 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
1192 loc_t2);
1193 return;
1195 if (prototype_p (t1) && prototype_p (t2))
1196 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1197 parms1 && parms2;
1198 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1199 count++)
1201 if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
1203 if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1204 inform (loc,
1205 "implicit this pointer type mismatch");
1206 else
1207 inform (loc,
1208 "type mismatch in parameter %i",
1209 count - (TREE_CODE (t1) == METHOD_TYPE));
1210 warn_types_mismatch (TREE_VALUE (parms1),
1211 TREE_VALUE (parms2),
1212 loc_t1, loc_t2);
1213 return;
1216 if (parms1 || parms2)
1218 inform (loc,
1219 "types have different parameter counts");
1220 return;
1224 return;
1227 if (types_odr_comparable (t1, t2, true)
1228 && types_same_for_odr (t1, t2, true))
1229 inform (loc_t1,
1230 "type %qT itself violates the C++ One Definition Rule", t1);
1231 /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1232 else if (TYPE_NAME (t1) == TYPE_NAME (t2)
1233 && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
1234 return;
1235 else
1236 inform (loc_t1, "type %qT should match type %qT",
1237 t1, t2);
1238 if (loc_t2_useful)
1239 inform (loc_t2, "the incompatible type is defined here");
1242 /* Compare T1 and T2, report ODR violations if WARN is true and set
1243 WARNED to true if anything is reported. Return true if types match.
1244 If true is returned, the types are also compatible in the sense of
1245 gimple_canonical_types_compatible_p.
1246 If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1247 about the type if the type itself do not have location. */
1249 static bool
1250 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1251 hash_set<type_pair> *visited,
1252 location_t loc1, location_t loc2)
1254 /* Check first for the obvious case of pointer identity. */
1255 if (t1 == t2)
1256 return true;
1257 gcc_assert (!type_with_linkage_p (t1) || !type_in_anonymous_namespace_p (t1));
1258 gcc_assert (!type_with_linkage_p (t2) || !type_in_anonymous_namespace_p (t2));
1260 /* Can't be the same type if the types don't have the same code. */
1261 if (TREE_CODE (t1) != TREE_CODE (t2))
1263 warn_odr (t1, t2, NULL, NULL, warn, warned,
1264 G_("a different type is defined in another translation unit"));
1265 return false;
1268 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
1270 warn_odr (t1, t2, NULL, NULL, warn, warned,
1271 G_("a type with different qualifiers is defined in another "
1272 "translation unit"));
1273 return false;
1276 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1277 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1279 /* We can not trip this when comparing ODR types, only when trying to
1280 match different ODR derivations from different declarations.
1281 So WARN should be always false. */
1282 gcc_assert (!warn);
1283 return false;
1286 if (comp_type_attributes (t1, t2) != 1)
1288 warn_odr (t1, t2, NULL, NULL, warn, warned,
1289 G_("a type with different attributes "
1290 "is defined in another translation unit"));
1291 return false;
1294 if (TREE_CODE (t1) == ENUMERAL_TYPE
1295 && TYPE_VALUES (t1) && TYPE_VALUES (t2))
1297 tree v1, v2;
1298 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
1299 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
1301 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
1303 warn_odr (t1, t2, NULL, NULL, warn, warned,
1304 G_("an enum with different value name"
1305 " is defined in another translation unit"));
1306 return false;
1308 if (TREE_VALUE (v1) != TREE_VALUE (v2)
1309 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
1310 DECL_INITIAL (TREE_VALUE (v2)), 0))
1312 warn_odr (t1, t2, NULL, NULL, warn, warned,
1313 G_("an enum with different values is defined"
1314 " in another translation unit"));
1315 return false;
1318 if (v1 || v2)
1320 warn_odr (t1, t2, NULL, NULL, warn, warned,
1321 G_("an enum with mismatching number of values "
1322 "is defined in another translation unit"));
1323 return false;
1327 /* Non-aggregate types can be handled cheaply. */
1328 if (INTEGRAL_TYPE_P (t1)
1329 || SCALAR_FLOAT_TYPE_P (t1)
1330 || FIXED_POINT_TYPE_P (t1)
1331 || TREE_CODE (t1) == VECTOR_TYPE
1332 || TREE_CODE (t1) == COMPLEX_TYPE
1333 || TREE_CODE (t1) == OFFSET_TYPE
1334 || POINTER_TYPE_P (t1))
1336 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1338 warn_odr (t1, t2, NULL, NULL, warn, warned,
1339 G_("a type with different precision is defined "
1340 "in another translation unit"));
1341 return false;
1343 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1345 warn_odr (t1, t2, NULL, NULL, warn, warned,
1346 G_("a type with different signedness is defined "
1347 "in another translation unit"));
1348 return false;
1351 if (TREE_CODE (t1) == INTEGER_TYPE
1352 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1354 /* char WRT uint_8? */
1355 warn_odr (t1, t2, NULL, NULL, warn, warned,
1356 G_("a different type is defined in another "
1357 "translation unit"));
1358 return false;
1361 /* For canonical type comparisons we do not want to build SCCs
1362 so we cannot compare pointed-to types. But we can, for now,
1363 require the same pointed-to type kind and match what
1364 useless_type_conversion_p would do. */
1365 if (POINTER_TYPE_P (t1))
1367 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1368 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1370 warn_odr (t1, t2, NULL, NULL, warn, warned,
1371 G_("it is defined as a pointer in different address "
1372 "space in another translation unit"));
1373 return false;
1376 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1377 visited, loc1, loc2))
1379 warn_odr (t1, t2, NULL, NULL, warn, warned,
1380 G_("it is defined as a pointer to different type "
1381 "in another translation unit"));
1382 if (warn && warned)
1383 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
1384 loc1, loc2);
1385 return false;
1389 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1390 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1391 visited, loc1, loc2))
1393 /* Probably specific enough. */
1394 warn_odr (t1, t2, NULL, NULL, warn, warned,
1395 G_("a different type is defined "
1396 "in another translation unit"));
1397 if (warn && warned)
1398 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1399 return false;
1402 /* Do type-specific comparisons. */
1403 else switch (TREE_CODE (t1))
1405 case ARRAY_TYPE:
1407 /* Array types are the same if the element types are the same and
1408 the number of elements are the same. */
1409 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1410 visited, loc1, loc2))
1412 warn_odr (t1, t2, NULL, NULL, warn, warned,
1413 G_("a different type is defined in another "
1414 "translation unit"));
1415 if (warn && warned)
1416 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1418 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1419 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1420 == TYPE_NONALIASED_COMPONENT (t2));
1422 tree i1 = TYPE_DOMAIN (t1);
1423 tree i2 = TYPE_DOMAIN (t2);
1425 /* For an incomplete external array, the type domain can be
1426 NULL_TREE. Check this condition also. */
1427 if (i1 == NULL_TREE || i2 == NULL_TREE)
1428 return true;
1430 tree min1 = TYPE_MIN_VALUE (i1);
1431 tree min2 = TYPE_MIN_VALUE (i2);
1432 tree max1 = TYPE_MAX_VALUE (i1);
1433 tree max2 = TYPE_MAX_VALUE (i2);
1435 /* In C++, minimums should be always 0. */
1436 gcc_assert (min1 == min2);
1437 if (!operand_equal_p (max1, max2, 0))
1439 warn_odr (t1, t2, NULL, NULL, warn, warned,
1440 G_("an array of different size is defined "
1441 "in another translation unit"));
1442 return false;
1445 break;
1447 case METHOD_TYPE:
1448 case FUNCTION_TYPE:
1449 /* Function types are the same if the return type and arguments types
1450 are the same. */
1451 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1452 visited, loc1, loc2))
1454 warn_odr (t1, t2, NULL, NULL, warn, warned,
1455 G_("has different return value "
1456 "in another translation unit"));
1457 if (warn && warned)
1458 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1459 return false;
1462 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1463 || !prototype_p (t1) || !prototype_p (t2))
1464 return true;
1465 else
1467 tree parms1, parms2;
1469 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1470 parms1 && parms2;
1471 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1473 if (!odr_subtypes_equivalent_p
1474 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited,
1475 loc1, loc2))
1477 warn_odr (t1, t2, NULL, NULL, warn, warned,
1478 G_("has different parameters in another "
1479 "translation unit"));
1480 if (warn && warned)
1481 warn_types_mismatch (TREE_VALUE (parms1),
1482 TREE_VALUE (parms2), loc1, loc2);
1483 return false;
1487 if (parms1 || parms2)
1489 warn_odr (t1, t2, NULL, NULL, warn, warned,
1490 G_("has different parameters "
1491 "in another translation unit"));
1492 return false;
1495 return true;
1498 case RECORD_TYPE:
1499 case UNION_TYPE:
1500 case QUAL_UNION_TYPE:
1502 tree f1, f2;
1504 /* For aggregate types, all the fields must be the same. */
1505 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1507 if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1508 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1509 != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1511 if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1512 warn_odr (t1, t2, NULL, NULL, warn, warned,
1513 G_("a type defined in another translation unit "
1514 "is not polymorphic"));
1515 else
1516 warn_odr (t1, t2, NULL, NULL, warn, warned,
1517 G_("a type defined in another translation unit "
1518 "is polymorphic"));
1519 return false;
1521 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1522 f1 || f2;
1523 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1525 /* Skip non-fields. */
1526 while (f1 && TREE_CODE (f1) != FIELD_DECL)
1527 f1 = TREE_CHAIN (f1);
1528 while (f2 && TREE_CODE (f2) != FIELD_DECL)
1529 f2 = TREE_CHAIN (f2);
1530 if (!f1 || !f2)
1531 break;
1532 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1534 warn_odr (t1, t2, NULL, NULL, warn, warned,
1535 G_("a type with different virtual table pointers"
1536 " is defined in another translation unit"));
1537 return false;
1539 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1541 warn_odr (t1, t2, NULL, NULL, warn, warned,
1542 G_("a type with different bases is defined "
1543 "in another translation unit"));
1544 return false;
1546 if (DECL_NAME (f1) != DECL_NAME (f2)
1547 && !DECL_ARTIFICIAL (f1))
1549 warn_odr (t1, t2, f1, f2, warn, warned,
1550 G_("a field with different name is defined "
1551 "in another translation unit"));
1552 return false;
1554 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1555 TREE_TYPE (f2), visited,
1556 loc1, loc2))
1558 /* Do not warn about artificial fields and just go into
1559 generic field mismatch warning. */
1560 if (DECL_ARTIFICIAL (f1))
1561 break;
1563 warn_odr (t1, t2, f1, f2, warn, warned,
1564 G_("a field of same name but different type "
1565 "is defined in another translation unit"));
1566 if (warn && warned)
1567 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
1568 return false;
1570 if (!gimple_compare_field_offset (f1, f2))
1572 /* Do not warn about artificial fields and just go into
1573 generic field mismatch warning. */
1574 if (DECL_ARTIFICIAL (f1))
1575 break;
1576 warn_odr (t1, t2, f1, f2, warn, warned,
1577 G_("fields have different layout "
1578 "in another translation unit"));
1579 return false;
1581 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1582 == DECL_NONADDRESSABLE_P (f2));
1585 /* If one aggregate has more fields than the other, they
1586 are not the same. */
1587 if (f1 || f2)
1589 if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1590 warn_odr (t1, t2, NULL, NULL, warn, warned,
1591 G_("a type with different virtual table pointers"
1592 " is defined in another translation unit"));
1593 else if ((f1 && DECL_ARTIFICIAL (f1))
1594 || (f2 && DECL_ARTIFICIAL (f2)))
1595 warn_odr (t1, t2, NULL, NULL, warn, warned,
1596 G_("a type with different bases is defined "
1597 "in another translation unit"));
1598 else
1599 warn_odr (t1, t2, f1, f2, warn, warned,
1600 G_("a type with different number of fields "
1601 "is defined in another translation unit"));
1603 return false;
1606 break;
1608 case VOID_TYPE:
1609 case NULLPTR_TYPE:
1610 break;
1612 default:
1613 debug_tree (t1);
1614 gcc_unreachable ();
1617 /* Those are better to come last as they are utterly uninformative. */
1618 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1619 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1621 warn_odr (t1, t2, NULL, NULL, warn, warned,
1622 G_("a type with different size "
1623 "is defined in another translation unit"));
1624 return false;
1626 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1627 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1629 warn_odr (t1, t2, NULL, NULL, warn, warned,
1630 G_("a type with different alignment "
1631 "is defined in another translation unit"));
1632 return false;
1634 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1635 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1636 TYPE_SIZE_UNIT (t2), 0));
1637 return true;
1640 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1642 bool
1643 odr_types_equivalent_p (tree type1, tree type2)
1645 gcc_checking_assert (odr_or_derived_type_p (type1)
1646 && odr_or_derived_type_p (type2));
1648 hash_set<type_pair> visited;
1649 return odr_types_equivalent_p (type1, type2, false, NULL,
1650 &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1653 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1654 from VAL->type. This may happen in LTO where tree merging did not merge
1655 all variants of the same type or due to ODR violation.
1657 Analyze and report ODR violations and add type to duplicate list.
1658 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1659 this is first time we see definition of a class return true so the
1660 base types are analyzed. */
1662 static bool
1663 add_type_duplicate (odr_type val, tree type)
1665 bool build_bases = false;
1666 bool prevail = false;
1667 bool odr_must_violate = false;
1669 if (!val->types_set)
1670 val->types_set = new hash_set<tree>;
1672 /* Chose polymorphic type as leader (this happens only in case of ODR
1673 violations. */
1674 if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1675 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1676 && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1677 || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1679 prevail = true;
1680 build_bases = true;
1682 /* Always prefer complete type to be the leader. */
1683 else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1685 prevail = true;
1686 build_bases = TYPE_BINFO (type);
1688 else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1690 else if (TREE_CODE (val->type) == ENUMERAL_TYPE
1691 && TREE_CODE (type) == ENUMERAL_TYPE
1692 && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
1693 prevail = true;
1694 else if (TREE_CODE (val->type) == RECORD_TYPE
1695 && TREE_CODE (type) == RECORD_TYPE
1696 && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1698 gcc_assert (!val->bases.length ());
1699 build_bases = true;
1700 prevail = true;
1703 if (prevail)
1704 std::swap (val->type, type);
1706 val->types_set->add (type);
1708 /* If we now have a mangled name, be sure to record it to val->type
1709 so ODR hash can work. */
1711 if (can_be_name_hashed_p (type) && !can_be_name_hashed_p (val->type))
1712 SET_DECL_ASSEMBLER_NAME (TYPE_NAME (val->type),
1713 DECL_ASSEMBLER_NAME (TYPE_NAME (type)));
1715 bool merge = true;
1716 bool base_mismatch = false;
1717 unsigned int i;
1718 bool warned = false;
1719 hash_set<type_pair> visited;
1721 gcc_assert (in_lto_p);
1722 vec_safe_push (val->types, type);
1724 /* If both are class types, compare the bases. */
1725 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1726 && TREE_CODE (val->type) == RECORD_TYPE
1727 && TREE_CODE (type) == RECORD_TYPE
1728 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1730 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1731 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1733 if (!flag_ltrans && !warned && !val->odr_violated)
1735 tree extra_base;
1736 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1737 "a type with the same name but different "
1738 "number of polymorphic bases is "
1739 "defined in another translation unit");
1740 if (warned)
1742 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1743 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1744 extra_base = BINFO_BASE_BINFO
1745 (TYPE_BINFO (type),
1746 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1747 else
1748 extra_base = BINFO_BASE_BINFO
1749 (TYPE_BINFO (val->type),
1750 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1751 tree extra_base_type = BINFO_TYPE (extra_base);
1752 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1753 "the extra base is defined here");
1756 base_mismatch = true;
1758 else
1759 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1761 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1762 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1763 tree type1 = BINFO_TYPE (base1);
1764 tree type2 = BINFO_TYPE (base2);
1766 if (types_odr_comparable (type1, type2))
1768 if (!types_same_for_odr (type1, type2))
1769 base_mismatch = true;
1771 else
1772 if (!odr_types_equivalent_p (type1, type2))
1773 base_mismatch = true;
1774 if (base_mismatch)
1776 if (!warned && !val->odr_violated)
1778 warn_odr (type, val->type, NULL, NULL,
1779 !warned, &warned,
1780 "a type with the same name but different base "
1781 "type is defined in another translation unit");
1782 if (warned)
1783 warn_types_mismatch (type1, type2,
1784 UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1786 break;
1788 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1790 base_mismatch = true;
1791 if (!warned && !val->odr_violated)
1792 warn_odr (type, val->type, NULL, NULL,
1793 !warned, &warned,
1794 "a type with the same name but different base "
1795 "layout is defined in another translation unit");
1796 break;
1798 /* One of bases is not of complete type. */
1799 if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1801 /* If we have a polymorphic type info specified for TYPE1
1802 but not for TYPE2 we possibly missed a base when recording
1803 VAL->type earlier.
1804 Be sure this does not happen. */
1805 if (TYPE_BINFO (type1)
1806 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1807 && !build_bases)
1808 odr_must_violate = true;
1809 break;
1811 /* One base is polymorphic and the other not.
1812 This ought to be diagnosed earlier, but do not ICE in the
1813 checking bellow. */
1814 else if (TYPE_BINFO (type1)
1815 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1816 != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1818 if (!warned && !val->odr_violated)
1819 warn_odr (type, val->type, NULL, NULL,
1820 !warned, &warned,
1821 "a base of the type is polymorphic only in one "
1822 "translation unit");
1823 base_mismatch = true;
1824 break;
1827 if (base_mismatch)
1829 merge = false;
1830 odr_violation_reported = true;
1831 val->odr_violated = true;
1833 if (symtab->dump_file)
1835 fprintf (symtab->dump_file, "ODR base violation\n");
1837 print_node (symtab->dump_file, "", val->type, 0);
1838 putc ('\n',symtab->dump_file);
1839 print_node (symtab->dump_file, "", type, 0);
1840 putc ('\n',symtab->dump_file);
1845 /* Next compare memory layout. */
1846 if (!odr_types_equivalent_p (val->type, type,
1847 !flag_ltrans && !val->odr_violated && !warned,
1848 &warned, &visited,
1849 DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
1850 DECL_SOURCE_LOCATION (TYPE_NAME (type))))
1852 merge = false;
1853 odr_violation_reported = true;
1854 val->odr_violated = true;
1856 gcc_assert (val->odr_violated || !odr_must_violate);
1857 /* Sanity check that all bases will be build same way again. */
1858 if (flag_checking
1859 && COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1860 && TREE_CODE (val->type) == RECORD_TYPE
1861 && TREE_CODE (type) == RECORD_TYPE
1862 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1863 && !val->odr_violated
1864 && !base_mismatch && val->bases.length ())
1866 unsigned int num_poly_bases = 0;
1867 unsigned int j;
1869 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1870 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1871 (TYPE_BINFO (type), i)))
1872 num_poly_bases++;
1873 gcc_assert (num_poly_bases == val->bases.length ());
1874 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1875 i++)
1876 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1877 (TYPE_BINFO (type), i)))
1879 odr_type base = get_odr_type
1880 (BINFO_TYPE
1881 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1882 i)),
1883 true);
1884 gcc_assert (val->bases[j] == base);
1885 j++;
1890 /* Regularize things a little. During LTO same types may come with
1891 different BINFOs. Either because their virtual table was
1892 not merged by tree merging and only later at decl merging or
1893 because one type comes with external vtable, while other
1894 with internal. We want to merge equivalent binfos to conserve
1895 memory and streaming overhead.
1897 The external vtables are more harmful: they contain references
1898 to external declarations of methods that may be defined in the
1899 merged LTO unit. For this reason we absolutely need to remove
1900 them and replace by internal variants. Not doing so will lead
1901 to incomplete answers from possible_polymorphic_call_targets.
1903 FIXME: disable for now; because ODR types are now build during
1904 streaming in, the variants do not need to be linked to the type,
1905 yet. We need to do the merging in cleanup pass to be implemented
1906 soon. */
1907 if (!flag_ltrans && merge
1908 && 0
1909 && TREE_CODE (val->type) == RECORD_TYPE
1910 && TREE_CODE (type) == RECORD_TYPE
1911 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1912 && TYPE_MAIN_VARIANT (type) == type
1913 && TYPE_MAIN_VARIANT (val->type) == val->type
1914 && BINFO_VTABLE (TYPE_BINFO (val->type))
1915 && BINFO_VTABLE (TYPE_BINFO (type)))
1917 tree master_binfo = TYPE_BINFO (val->type);
1918 tree v1 = BINFO_VTABLE (master_binfo);
1919 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1921 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1923 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1924 && operand_equal_p (TREE_OPERAND (v1, 1),
1925 TREE_OPERAND (v2, 1), 0));
1926 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1927 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1929 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1930 == DECL_ASSEMBLER_NAME (v2));
1932 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1934 unsigned int i;
1936 set_type_binfo (val->type, TYPE_BINFO (type));
1937 for (i = 0; i < val->types->length (); i++)
1939 if (TYPE_BINFO ((*val->types)[i])
1940 == master_binfo)
1941 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1943 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1945 else
1946 set_type_binfo (type, master_binfo);
1948 return build_bases;
1951 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1952 possibly new entry. */
1954 odr_type
1955 get_odr_type (tree type, bool insert)
1957 odr_type_d **slot = NULL;
1958 odr_type_d **vtable_slot = NULL;
1959 odr_type val = NULL;
1960 hashval_t hash;
1961 bool build_bases = false;
1962 bool insert_to_odr_array = false;
1963 int base_id = -1;
1965 type = main_odr_variant (type);
1967 gcc_checking_assert (can_be_name_hashed_p (type)
1968 || can_be_vtable_hashed_p (type));
1970 /* Lookup entry, first try name hash, fallback to vtable hash. */
1971 if (can_be_name_hashed_p (type))
1973 hash = hash_odr_name (type);
1974 slot = odr_hash->find_slot_with_hash (type, hash,
1975 insert ? INSERT : NO_INSERT);
1977 if ((!slot || !*slot) && in_lto_p && can_be_vtable_hashed_p (type))
1979 hash = hash_odr_vtable (type);
1980 vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
1981 insert ? INSERT : NO_INSERT);
1984 if (!slot && !vtable_slot)
1985 return NULL;
1987 /* See if we already have entry for type. */
1988 if ((slot && *slot) || (vtable_slot && *vtable_slot))
1990 if (slot && *slot)
1992 val = *slot;
1993 if (flag_checking
1994 && in_lto_p && can_be_vtable_hashed_p (type))
1996 hash = hash_odr_vtable (type);
1997 vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
1998 NO_INSERT);
1999 gcc_assert (!vtable_slot || *vtable_slot == *slot);
2000 vtable_slot = NULL;
2003 else if (*vtable_slot)
2004 val = *vtable_slot;
2006 if (val->type != type
2007 && (!val->types_set || !val->types_set->add (type)))
2009 gcc_assert (insert);
2010 /* We have type duplicate, but it may introduce vtable name or
2011 mangled name; be sure to keep hashes in sync. */
2012 if (in_lto_p && can_be_vtable_hashed_p (type)
2013 && (!vtable_slot || !*vtable_slot))
2015 if (!vtable_slot)
2017 hash = hash_odr_vtable (type);
2018 vtable_slot = odr_vtable_hash->find_slot_with_hash
2019 (type, hash, INSERT);
2020 gcc_checking_assert (!*vtable_slot || *vtable_slot == val);
2022 *vtable_slot = val;
2024 if (slot && !*slot)
2025 *slot = val;
2026 build_bases = add_type_duplicate (val, type);
2029 else
2031 val = ggc_cleared_alloc<odr_type_d> ();
2032 val->type = type;
2033 val->bases = vNULL;
2034 val->derived_types = vNULL;
2035 if (type_with_linkage_p (type))
2036 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
2037 else
2038 val->anonymous_namespace = 0;
2039 build_bases = COMPLETE_TYPE_P (val->type);
2040 insert_to_odr_array = true;
2041 if (slot)
2042 *slot = val;
2043 if (vtable_slot)
2044 *vtable_slot = val;
2047 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
2048 && type_with_linkage_p (type)
2049 && type == TYPE_MAIN_VARIANT (type))
2051 tree binfo = TYPE_BINFO (type);
2052 unsigned int i;
2054 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
2056 val->all_derivations_known = type_all_derivations_known_p (type);
2057 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
2058 /* For now record only polymorphic types. other are
2059 pointless for devirtualization and we can not precisely
2060 determine ODR equivalency of these during LTO. */
2061 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
2063 tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
2064 odr_type base = get_odr_type (base_type, true);
2065 gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
2066 base->derived_types.safe_push (val);
2067 val->bases.safe_push (base);
2068 if (base->id > base_id)
2069 base_id = base->id;
2072 /* Ensure that type always appears after bases. */
2073 if (insert_to_odr_array)
2075 if (odr_types_ptr)
2076 val->id = odr_types.length ();
2077 vec_safe_push (odr_types_ptr, val);
2079 else if (base_id > val->id)
2081 odr_types[val->id] = 0;
2082 /* Be sure we did not recorded any derived types; these may need
2083 renumbering too. */
2084 gcc_assert (val->derived_types.length() == 0);
2085 val->id = odr_types.length ();
2086 vec_safe_push (odr_types_ptr, val);
2088 return val;
2091 /* Add TYPE od ODR type hash. */
2093 void
2094 register_odr_type (tree type)
2096 if (!odr_hash)
2098 odr_hash = new odr_hash_type (23);
2099 if (in_lto_p)
2100 odr_vtable_hash = new odr_vtable_hash_type (23);
2102 /* Arrange things to be nicer and insert main variants first.
2103 ??? fundamental prerecorded types do not have mangled names; this
2104 makes it possible that non-ODR type is main_odr_variant of ODR type.
2105 Things may get smoother if LTO FE set mangled name of those types same
2106 way as C++ FE does. */
2107 if (odr_type_p (main_odr_variant (TYPE_MAIN_VARIANT (type)))
2108 && odr_type_p (TYPE_MAIN_VARIANT (type)))
2109 get_odr_type (TYPE_MAIN_VARIANT (type), true);
2110 if (TYPE_MAIN_VARIANT (type) != type && odr_type_p (main_odr_variant (type)))
2111 get_odr_type (type, true);
2114 /* Return true if type is known to have no derivations. */
2116 bool
2117 type_known_to_have_no_derivations_p (tree t)
2119 return (type_all_derivations_known_p (t)
2120 && (TYPE_FINAL_P (t)
2121 || (odr_hash
2122 && !get_odr_type (t, true)->derived_types.length())));
2125 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2126 recursive printing. */
2128 static void
2129 dump_odr_type (FILE *f, odr_type t, int indent=0)
2131 unsigned int i;
2132 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2133 print_generic_expr (f, t->type, TDF_SLIM);
2134 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2135 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2136 if (TYPE_NAME (t->type))
2138 /*fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
2139 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
2140 DECL_SOURCE_LINE (TYPE_NAME (t->type)));*/
2141 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2142 fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2143 IDENTIFIER_POINTER
2144 (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2146 if (t->bases.length ())
2148 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2149 for (i = 0; i < t->bases.length (); i++)
2150 fprintf (f, " %i", t->bases[i]->id);
2151 fprintf (f, "\n");
2153 if (t->derived_types.length ())
2155 fprintf (f, "%*s derived types:\n", indent * 2, "");
2156 for (i = 0; i < t->derived_types.length (); i++)
2157 dump_odr_type (f, t->derived_types[i], indent + 1);
2159 fprintf (f, "\n");
2162 /* Dump the type inheritance graph. */
2164 static void
2165 dump_type_inheritance_graph (FILE *f)
2167 unsigned int i;
2168 if (!odr_types_ptr)
2169 return;
2170 fprintf (f, "\n\nType inheritance graph:\n");
2171 for (i = 0; i < odr_types.length (); i++)
2173 if (odr_types[i] && odr_types[i]->bases.length () == 0)
2174 dump_odr_type (f, odr_types[i]);
2176 for (i = 0; i < odr_types.length (); i++)
2178 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
2180 unsigned int j;
2181 fprintf (f, "Duplicate tree types for odr type %i\n", i);
2182 print_node (f, "", odr_types[i]->type, 0);
2183 for (j = 0; j < odr_types[i]->types->length (); j++)
2185 tree t;
2186 fprintf (f, "duplicate #%i\n", j);
2187 print_node (f, "", (*odr_types[i]->types)[j], 0);
2188 t = (*odr_types[i]->types)[j];
2189 while (TYPE_P (t) && TYPE_CONTEXT (t))
2191 t = TYPE_CONTEXT (t);
2192 print_node (f, "", t, 0);
2194 putc ('\n',f);
2200 /* Initialize IPA devirt and build inheritance tree graph. */
2202 void
2203 build_type_inheritance_graph (void)
2205 struct symtab_node *n;
2206 FILE *inheritance_dump_file;
2207 dump_flags_t flags;
2209 if (odr_hash)
2210 return;
2211 timevar_push (TV_IPA_INHERITANCE);
2212 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2213 odr_hash = new odr_hash_type (23);
2214 if (in_lto_p)
2215 odr_vtable_hash = new odr_vtable_hash_type (23);
2217 /* We reconstruct the graph starting of types of all methods seen in the
2218 unit. */
2219 FOR_EACH_SYMBOL (n)
2220 if (is_a <cgraph_node *> (n)
2221 && DECL_VIRTUAL_P (n->decl)
2222 && n->real_symbol_p ())
2223 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2225 /* Look also for virtual tables of types that do not define any methods.
2227 We need it in a case where class B has virtual base of class A
2228 re-defining its virtual method and there is class C with no virtual
2229 methods with B as virtual base.
2231 Here we output B's virtual method in two variant - for non-virtual
2232 and virtual inheritance. B's virtual table has non-virtual version,
2233 while C's has virtual.
2235 For this reason we need to know about C in order to include both
2236 variants of B. More correctly, record_target_from_binfo should
2237 add both variants of the method when walking B, but we have no
2238 link in between them.
2240 We rely on fact that either the method is exported and thus we
2241 assume it is called externally or C is in anonymous namespace and
2242 thus we will see the vtable. */
2244 else if (is_a <varpool_node *> (n)
2245 && DECL_VIRTUAL_P (n->decl)
2246 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2247 && TYPE_BINFO (DECL_CONTEXT (n->decl))
2248 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2249 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2250 if (inheritance_dump_file)
2252 dump_type_inheritance_graph (inheritance_dump_file);
2253 dump_end (TDI_inheritance, inheritance_dump_file);
2255 timevar_pop (TV_IPA_INHERITANCE);
2258 /* Return true if N has reference from live virtual table
2259 (and thus can be a destination of polymorphic call).
2260 Be conservatively correct when callgraph is not built or
2261 if the method may be referred externally. */
2263 static bool
2264 referenced_from_vtable_p (struct cgraph_node *node)
2266 int i;
2267 struct ipa_ref *ref;
2268 bool found = false;
2270 if (node->externally_visible
2271 || DECL_EXTERNAL (node->decl)
2272 || node->used_from_other_partition)
2273 return true;
2275 /* Keep this test constant time.
2276 It is unlikely this can happen except for the case where speculative
2277 devirtualization introduced many speculative edges to this node.
2278 In this case the target is very likely alive anyway. */
2279 if (node->ref_list.referring.length () > 100)
2280 return true;
2282 /* We need references built. */
2283 if (symtab->state <= CONSTRUCTION)
2284 return true;
2286 for (i = 0; node->iterate_referring (i, ref); i++)
2287 if ((ref->use == IPA_REF_ALIAS
2288 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2289 || (ref->use == IPA_REF_ADDR
2290 && VAR_P (ref->referring->decl)
2291 && DECL_VIRTUAL_P (ref->referring->decl)))
2293 found = true;
2294 break;
2296 return found;
2299 /* Return if TARGET is cxa_pure_virtual. */
2301 static bool
2302 is_cxa_pure_virtual_p (tree target)
2304 return target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE
2305 && DECL_NAME (target)
2306 && id_equal (DECL_NAME (target),
2307 "__cxa_pure_virtual");
2310 /* If TARGET has associated node, record it in the NODES array.
2311 CAN_REFER specify if program can refer to the target directly.
2312 if TARGET is unknown (NULL) or it can not be inserted (for example because
2313 its body was already removed and there is no way to refer to it), clear
2314 COMPLETEP. */
2316 static void
2317 maybe_record_node (vec <cgraph_node *> &nodes,
2318 tree target, hash_set<tree> *inserted,
2319 bool can_refer,
2320 bool *completep)
2322 struct cgraph_node *target_node, *alias_target;
2323 enum availability avail;
2324 bool pure_virtual = is_cxa_pure_virtual_p (target);
2326 /* __builtin_unreachable do not need to be added into
2327 list of targets; the runtime effect of calling them is undefined.
2328 Only "real" virtual methods should be accounted. */
2329 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE && !pure_virtual)
2330 return;
2332 if (!can_refer)
2334 /* The only case when method of anonymous namespace becomes unreferable
2335 is when we completely optimized it out. */
2336 if (flag_ltrans
2337 || !target
2338 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2339 *completep = false;
2340 return;
2343 if (!target)
2344 return;
2346 target_node = cgraph_node::get (target);
2348 /* Prefer alias target over aliases, so we do not get confused by
2349 fake duplicates. */
2350 if (target_node)
2352 alias_target = target_node->ultimate_alias_target (&avail);
2353 if (target_node != alias_target
2354 && avail >= AVAIL_AVAILABLE
2355 && target_node->get_availability ())
2356 target_node = alias_target;
2359 /* Method can only be called by polymorphic call if any
2360 of vtables referring to it are alive.
2362 While this holds for non-anonymous functions, too, there are
2363 cases where we want to keep them in the list; for example
2364 inline functions with -fno-weak are static, but we still
2365 may devirtualize them when instance comes from other unit.
2366 The same holds for LTO.
2368 Currently we ignore these functions in speculative devirtualization.
2369 ??? Maybe it would make sense to be more aggressive for LTO even
2370 elsewhere. */
2371 if (!flag_ltrans
2372 && !pure_virtual
2373 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2374 && (!target_node
2375 || !referenced_from_vtable_p (target_node)))
2377 /* See if TARGET is useful function we can deal with. */
2378 else if (target_node != NULL
2379 && (TREE_PUBLIC (target)
2380 || DECL_EXTERNAL (target)
2381 || target_node->definition)
2382 && target_node->real_symbol_p ())
2384 gcc_assert (!target_node->global.inlined_to);
2385 gcc_assert (target_node->real_symbol_p ());
2386 /* When sanitizing, do not assume that __cxa_pure_virtual is not called
2387 by valid program. */
2388 if (flag_sanitize & SANITIZE_UNREACHABLE)
2390 /* Only add pure virtual if it is the only possible target. This way
2391 we will preserve the diagnostics about pure virtual called in many
2392 cases without disabling optimization in other. */
2393 else if (pure_virtual)
2395 if (nodes.length ())
2396 return;
2398 /* If we found a real target, take away cxa_pure_virtual. */
2399 else if (!pure_virtual && nodes.length () == 1
2400 && is_cxa_pure_virtual_p (nodes[0]->decl))
2401 nodes.pop ();
2402 if (pure_virtual && nodes.length ())
2403 return;
2404 if (!inserted->add (target))
2406 cached_polymorphic_call_targets->add (target_node);
2407 nodes.safe_push (target_node);
2410 else if (!completep)
2412 /* We have definition of __cxa_pure_virtual that is not accessible (it is
2413 optimized out or partitioned to other unit) so we can not add it. When
2414 not sanitizing, there is nothing to do.
2415 Otherwise declare the list incomplete. */
2416 else if (pure_virtual)
2418 if (flag_sanitize & SANITIZE_UNREACHABLE)
2419 *completep = false;
2421 else if (flag_ltrans
2422 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2423 *completep = false;
2426 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2427 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2428 method in vtable and insert method to NODES array
2429 or BASES_TO_CONSIDER if this array is non-NULL.
2430 Otherwise recurse to base BINFOs.
2431 This matches what get_binfo_at_offset does, but with offset
2432 being unknown.
2434 TYPE_BINFOS is a stack of BINFOS of types with defined
2435 virtual table seen on way from class type to BINFO.
2437 MATCHED_VTABLES tracks virtual tables we already did lookup
2438 for virtual function in. INSERTED tracks nodes we already
2439 inserted.
2441 ANONYMOUS is true if BINFO is part of anonymous namespace.
2443 Clear COMPLETEP when we hit unreferable target.
2446 static void
2447 record_target_from_binfo (vec <cgraph_node *> &nodes,
2448 vec <tree> *bases_to_consider,
2449 tree binfo,
2450 tree otr_type,
2451 vec <tree> &type_binfos,
2452 HOST_WIDE_INT otr_token,
2453 tree outer_type,
2454 HOST_WIDE_INT offset,
2455 hash_set<tree> *inserted,
2456 hash_set<tree> *matched_vtables,
2457 bool anonymous,
2458 bool *completep)
2460 tree type = BINFO_TYPE (binfo);
2461 int i;
2462 tree base_binfo;
2465 if (BINFO_VTABLE (binfo))
2466 type_binfos.safe_push (binfo);
2467 if (types_same_for_odr (type, outer_type))
2469 int i;
2470 tree type_binfo = NULL;
2472 /* Look up BINFO with virtual table. For normal types it is always last
2473 binfo on stack. */
2474 for (i = type_binfos.length () - 1; i >= 0; i--)
2475 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2477 type_binfo = type_binfos[i];
2478 break;
2480 if (BINFO_VTABLE (binfo))
2481 type_binfos.pop ();
2482 /* If this is duplicated BINFO for base shared by virtual inheritance,
2483 we may not have its associated vtable. This is not a problem, since
2484 we will walk it on the other path. */
2485 if (!type_binfo)
2486 return;
2487 tree inner_binfo = get_binfo_at_offset (type_binfo,
2488 offset, otr_type);
2489 if (!inner_binfo)
2491 gcc_assert (odr_violation_reported);
2492 return;
2494 /* For types in anonymous namespace first check if the respective vtable
2495 is alive. If not, we know the type can't be called. */
2496 if (!flag_ltrans && anonymous)
2498 tree vtable = BINFO_VTABLE (inner_binfo);
2499 varpool_node *vnode;
2501 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2502 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2503 vnode = varpool_node::get (vtable);
2504 if (!vnode || !vnode->definition)
2505 return;
2507 gcc_assert (inner_binfo);
2508 if (bases_to_consider
2509 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2510 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2512 bool can_refer;
2513 tree target = gimple_get_virt_method_for_binfo (otr_token,
2514 inner_binfo,
2515 &can_refer);
2516 if (!bases_to_consider)
2517 maybe_record_node (nodes, target, inserted, can_refer, completep);
2518 /* Destructors are never called via construction vtables. */
2519 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2520 bases_to_consider->safe_push (target);
2522 return;
2525 /* Walk bases. */
2526 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2527 /* Walking bases that have no virtual method is pointless exercise. */
2528 if (polymorphic_type_binfo_p (base_binfo))
2529 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2530 type_binfos,
2531 otr_token, outer_type, offset, inserted,
2532 matched_vtables, anonymous, completep);
2533 if (BINFO_VTABLE (binfo))
2534 type_binfos.pop ();
2537 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2538 of TYPE, insert them to NODES, recurse into derived nodes.
2539 INSERTED is used to avoid duplicate insertions of methods into NODES.
2540 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2541 Clear COMPLETEP if unreferable target is found.
2543 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2544 all cases where BASE_SKIPPED is true (because the base is abstract
2545 class). */
2547 static void
2548 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2549 hash_set<tree> *inserted,
2550 hash_set<tree> *matched_vtables,
2551 tree otr_type,
2552 odr_type type,
2553 HOST_WIDE_INT otr_token,
2554 tree outer_type,
2555 HOST_WIDE_INT offset,
2556 bool *completep,
2557 vec <tree> &bases_to_consider,
2558 bool consider_construction)
2560 tree binfo = TYPE_BINFO (type->type);
2561 unsigned int i;
2562 auto_vec <tree, 8> type_binfos;
2563 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2565 /* We may need to consider types w/o instances because of possible derived
2566 types using their methods either directly or via construction vtables.
2567 We are safe to skip them when all derivations are known, since we will
2568 handle them later.
2569 This is done by recording them to BASES_TO_CONSIDER array. */
2570 if (possibly_instantiated || consider_construction)
2572 record_target_from_binfo (nodes,
2573 (!possibly_instantiated
2574 && type_all_derivations_known_p (type->type))
2575 ? &bases_to_consider : NULL,
2576 binfo, otr_type, type_binfos, otr_token,
2577 outer_type, offset,
2578 inserted, matched_vtables,
2579 type->anonymous_namespace, completep);
2581 for (i = 0; i < type->derived_types.length (); i++)
2582 possible_polymorphic_call_targets_1 (nodes, inserted,
2583 matched_vtables,
2584 otr_type,
2585 type->derived_types[i],
2586 otr_token, outer_type, offset, completep,
2587 bases_to_consider, consider_construction);
2590 /* Cache of queries for polymorphic call targets.
2592 Enumerating all call targets may get expensive when there are many
2593 polymorphic calls in the program, so we memoize all the previous
2594 queries and avoid duplicated work. */
2596 struct polymorphic_call_target_d
2598 HOST_WIDE_INT otr_token;
2599 ipa_polymorphic_call_context context;
2600 odr_type type;
2601 vec <cgraph_node *> targets;
2602 tree decl_warning;
2603 int type_warning;
2604 bool complete;
2605 bool speculative;
2608 /* Polymorphic call target cache helpers. */
2610 struct polymorphic_call_target_hasher
2611 : pointer_hash <polymorphic_call_target_d>
2613 static inline hashval_t hash (const polymorphic_call_target_d *);
2614 static inline bool equal (const polymorphic_call_target_d *,
2615 const polymorphic_call_target_d *);
2616 static inline void remove (polymorphic_call_target_d *);
2619 /* Return the computed hashcode for ODR_QUERY. */
2621 inline hashval_t
2622 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2624 inchash::hash hstate (odr_query->otr_token);
2626 hstate.add_wide_int (odr_query->type->id);
2627 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2628 hstate.add_wide_int (odr_query->context.offset);
2630 if (odr_query->context.speculative_outer_type)
2632 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2633 hstate.add_wide_int (odr_query->context.speculative_offset);
2635 hstate.add_flag (odr_query->speculative);
2636 hstate.add_flag (odr_query->context.maybe_in_construction);
2637 hstate.add_flag (odr_query->context.maybe_derived_type);
2638 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2639 hstate.commit_flag ();
2640 return hstate.end ();
2643 /* Compare cache entries T1 and T2. */
2645 inline bool
2646 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2647 const polymorphic_call_target_d *t2)
2649 return (t1->type == t2->type && t1->otr_token == t2->otr_token
2650 && t1->speculative == t2->speculative
2651 && t1->context.offset == t2->context.offset
2652 && t1->context.speculative_offset == t2->context.speculative_offset
2653 && t1->context.outer_type == t2->context.outer_type
2654 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2655 && t1->context.maybe_in_construction
2656 == t2->context.maybe_in_construction
2657 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2658 && (t1->context.speculative_maybe_derived_type
2659 == t2->context.speculative_maybe_derived_type));
2662 /* Remove entry in polymorphic call target cache hash. */
2664 inline void
2665 polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2667 v->targets.release ();
2668 free (v);
2671 /* Polymorphic call target query cache. */
2673 typedef hash_table<polymorphic_call_target_hasher>
2674 polymorphic_call_target_hash_type;
2675 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2677 /* Destroy polymorphic call target query cache. */
2679 static void
2680 free_polymorphic_call_targets_hash ()
2682 if (cached_polymorphic_call_targets)
2684 delete polymorphic_call_target_hash;
2685 polymorphic_call_target_hash = NULL;
2686 delete cached_polymorphic_call_targets;
2687 cached_polymorphic_call_targets = NULL;
2691 /* When virtual function is removed, we may need to flush the cache. */
2693 static void
2694 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2696 if (cached_polymorphic_call_targets
2697 && cached_polymorphic_call_targets->contains (n))
2698 free_polymorphic_call_targets_hash ();
2701 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2703 tree
2704 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2705 tree vtable)
2707 tree v = BINFO_VTABLE (binfo);
2708 int i;
2709 tree base_binfo;
2710 unsigned HOST_WIDE_INT this_offset;
2712 if (v)
2714 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2715 gcc_unreachable ();
2717 if (offset == this_offset
2718 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2719 return binfo;
2722 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2723 if (polymorphic_type_binfo_p (base_binfo))
2725 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2726 if (base_binfo)
2727 return base_binfo;
2729 return NULL;
2732 /* T is known constant value of virtual table pointer.
2733 Store virtual table to V and its offset to OFFSET.
2734 Return false if T does not look like virtual table reference. */
2736 bool
2737 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2738 unsigned HOST_WIDE_INT *offset)
2740 /* We expect &MEM[(void *)&virtual_table + 16B].
2741 We obtain object's BINFO from the context of the virtual table.
2742 This one contains pointer to virtual table represented via
2743 POINTER_PLUS_EXPR. Verify that this pointer matches what
2744 we propagated through.
2746 In the case of virtual inheritance, the virtual tables may
2747 be nested, i.e. the offset may be different from 16 and we may
2748 need to dive into the type representation. */
2749 if (TREE_CODE (t) == ADDR_EXPR
2750 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2751 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2752 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2753 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2754 == VAR_DECL)
2755 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2756 (TREE_OPERAND (t, 0), 0), 0)))
2758 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2759 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2760 return true;
2763 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2764 We need to handle it when T comes from static variable initializer or
2765 BINFO. */
2766 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2768 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2769 t = TREE_OPERAND (t, 0);
2771 else
2772 *offset = 0;
2774 if (TREE_CODE (t) != ADDR_EXPR)
2775 return false;
2776 *v = TREE_OPERAND (t, 0);
2777 return true;
2780 /* T is known constant value of virtual table pointer. Return BINFO of the
2781 instance type. */
2783 tree
2784 vtable_pointer_value_to_binfo (const_tree t)
2786 tree vtable;
2787 unsigned HOST_WIDE_INT offset;
2789 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2790 return NULL_TREE;
2792 /* FIXME: for stores of construction vtables we return NULL,
2793 because we do not have BINFO for those. Eventually we should fix
2794 our representation to allow this case to be handled, too.
2795 In the case we see store of BINFO we however may assume
2796 that standard folding will be able to cope with it. */
2797 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2798 offset, vtable);
2801 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2802 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2803 and insert them in NODES.
2805 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2807 static void
2808 record_targets_from_bases (tree otr_type,
2809 HOST_WIDE_INT otr_token,
2810 tree outer_type,
2811 HOST_WIDE_INT offset,
2812 vec <cgraph_node *> &nodes,
2813 hash_set<tree> *inserted,
2814 hash_set<tree> *matched_vtables,
2815 bool *completep)
2817 while (true)
2819 HOST_WIDE_INT pos, size;
2820 tree base_binfo;
2821 tree fld;
2823 if (types_same_for_odr (outer_type, otr_type))
2824 return;
2826 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2828 if (TREE_CODE (fld) != FIELD_DECL)
2829 continue;
2831 pos = int_bit_position (fld);
2832 size = tree_to_shwi (DECL_SIZE (fld));
2833 if (pos <= offset && (pos + size) > offset
2834 /* Do not get confused by zero sized bases. */
2835 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2836 break;
2838 /* Within a class type we should always find corresponding fields. */
2839 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2841 /* Nonbase types should have been stripped by outer_class_type. */
2842 gcc_assert (DECL_ARTIFICIAL (fld));
2844 outer_type = TREE_TYPE (fld);
2845 offset -= pos;
2847 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2848 offset, otr_type);
2849 if (!base_binfo)
2851 gcc_assert (odr_violation_reported);
2852 return;
2854 gcc_assert (base_binfo);
2855 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2857 bool can_refer;
2858 tree target = gimple_get_virt_method_for_binfo (otr_token,
2859 base_binfo,
2860 &can_refer);
2861 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2862 maybe_record_node (nodes, target, inserted, can_refer, completep);
2863 matched_vtables->add (BINFO_VTABLE (base_binfo));
2868 /* When virtual table is removed, we may need to flush the cache. */
2870 static void
2871 devirt_variable_node_removal_hook (varpool_node *n,
2872 void *d ATTRIBUTE_UNUSED)
2874 if (cached_polymorphic_call_targets
2875 && DECL_VIRTUAL_P (n->decl)
2876 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2877 free_polymorphic_call_targets_hash ();
2880 /* Record about how many calls would benefit from given type to be final. */
2882 struct odr_type_warn_count
2884 tree type;
2885 int count;
2886 profile_count dyn_count;
2889 /* Record about how many calls would benefit from given method to be final. */
2891 struct decl_warn_count
2893 tree decl;
2894 int count;
2895 profile_count dyn_count;
2898 /* Information about type and decl warnings. */
2900 struct final_warning_record
2902 profile_count dyn_count;
2903 auto_vec<odr_type_warn_count> type_warnings;
2904 hash_map<tree, decl_warn_count> decl_warnings;
2906 struct final_warning_record *final_warning_records;
2908 /* Return vector containing possible targets of polymorphic call of type
2909 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2910 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2911 OTR_TYPE and include their virtual method. This is useful for types
2912 possibly in construction or destruction where the virtual table may
2913 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2914 us to walk the inheritance graph for all derivations.
2916 If COMPLETEP is non-NULL, store true if the list is complete.
2917 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2918 in the target cache. If user needs to visit every target list
2919 just once, it can memoize them.
2921 If SPECULATIVE is set, the list will not contain targets that
2922 are not speculatively taken.
2924 Returned vector is placed into cache. It is NOT caller's responsibility
2925 to free it. The vector can be freed on cgraph_remove_node call if
2926 the particular node is a virtual function present in the cache. */
2928 vec <cgraph_node *>
2929 possible_polymorphic_call_targets (tree otr_type,
2930 HOST_WIDE_INT otr_token,
2931 ipa_polymorphic_call_context context,
2932 bool *completep,
2933 void **cache_token,
2934 bool speculative)
2936 static struct cgraph_node_hook_list *node_removal_hook_holder;
2937 vec <cgraph_node *> nodes = vNULL;
2938 auto_vec <tree, 8> bases_to_consider;
2939 odr_type type, outer_type;
2940 polymorphic_call_target_d key;
2941 polymorphic_call_target_d **slot;
2942 unsigned int i;
2943 tree binfo, target;
2944 bool complete;
2945 bool can_refer = false;
2946 bool skipped = false;
2948 otr_type = TYPE_MAIN_VARIANT (otr_type);
2950 /* If ODR is not initialized or the context is invalid, return empty
2951 incomplete list. */
2952 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
2954 if (completep)
2955 *completep = context.invalid;
2956 if (cache_token)
2957 *cache_token = NULL;
2958 return nodes;
2961 /* Do not bother to compute speculative info when user do not asks for it. */
2962 if (!speculative || !context.speculative_outer_type)
2963 context.clear_speculation ();
2965 type = get_odr_type (otr_type, true);
2967 /* Recording type variants would waste results cache. */
2968 gcc_assert (!context.outer_type
2969 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2971 /* Look up the outer class type we want to walk.
2972 If we fail to do so, the context is invalid. */
2973 if ((context.outer_type || context.speculative_outer_type)
2974 && !context.restrict_to_inner_class (otr_type))
2976 if (completep)
2977 *completep = true;
2978 if (cache_token)
2979 *cache_token = NULL;
2980 return nodes;
2982 gcc_assert (!context.invalid);
2984 /* Check that restrict_to_inner_class kept the main variant. */
2985 gcc_assert (!context.outer_type
2986 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2988 /* We canonicalize our query, so we do not need extra hashtable entries. */
2990 /* Without outer type, we have no use for offset. Just do the
2991 basic search from inner type. */
2992 if (!context.outer_type)
2993 context.clear_outer_type (otr_type);
2994 /* We need to update our hierarchy if the type does not exist. */
2995 outer_type = get_odr_type (context.outer_type, true);
2996 /* If the type is complete, there are no derivations. */
2997 if (TYPE_FINAL_P (outer_type->type))
2998 context.maybe_derived_type = false;
3000 /* Initialize query cache. */
3001 if (!cached_polymorphic_call_targets)
3003 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3004 polymorphic_call_target_hash
3005 = new polymorphic_call_target_hash_type (23);
3006 if (!node_removal_hook_holder)
3008 node_removal_hook_holder =
3009 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3010 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3011 NULL);
3015 if (in_lto_p)
3017 if (context.outer_type != otr_type)
3018 context.outer_type
3019 = get_odr_type (context.outer_type, true)->type;
3020 if (context.speculative_outer_type)
3021 context.speculative_outer_type
3022 = get_odr_type (context.speculative_outer_type, true)->type;
3025 /* Look up cached answer. */
3026 key.type = type;
3027 key.otr_token = otr_token;
3028 key.speculative = speculative;
3029 key.context = context;
3030 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3031 if (cache_token)
3032 *cache_token = (void *)*slot;
3033 if (*slot)
3035 if (completep)
3036 *completep = (*slot)->complete;
3037 if ((*slot)->type_warning && final_warning_records)
3039 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3040 if (!final_warning_records->type_warnings
3041 [(*slot)->type_warning - 1].dyn_count.initialized_p ())
3042 final_warning_records->type_warnings
3043 [(*slot)->type_warning - 1].dyn_count = profile_count::zero ();
3044 if (final_warning_records->dyn_count > 0)
3045 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3046 = final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3047 + final_warning_records->dyn_count;
3049 if (!speculative && (*slot)->decl_warning && final_warning_records)
3051 struct decl_warn_count *c =
3052 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3053 c->count++;
3054 if (final_warning_records->dyn_count > 0)
3055 c->dyn_count += final_warning_records->dyn_count;
3057 return (*slot)->targets;
3060 complete = true;
3062 /* Do actual search. */
3063 timevar_push (TV_IPA_VIRTUAL_CALL);
3064 *slot = XCNEW (polymorphic_call_target_d);
3065 if (cache_token)
3066 *cache_token = (void *)*slot;
3067 (*slot)->type = type;
3068 (*slot)->otr_token = otr_token;
3069 (*slot)->context = context;
3070 (*slot)->speculative = speculative;
3072 hash_set<tree> inserted;
3073 hash_set<tree> matched_vtables;
3075 /* First insert targets we speculatively identified as likely. */
3076 if (context.speculative_outer_type)
3078 odr_type speculative_outer_type;
3079 bool speculation_complete = true;
3081 /* First insert target from type itself and check if it may have
3082 derived types. */
3083 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3084 if (TYPE_FINAL_P (speculative_outer_type->type))
3085 context.speculative_maybe_derived_type = false;
3086 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3087 context.speculative_offset, otr_type);
3088 if (binfo)
3089 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3090 &can_refer);
3091 else
3092 target = NULL;
3094 /* In the case we get complete method, we don't need
3095 to walk derivations. */
3096 if (target && DECL_FINAL_P (target))
3097 context.speculative_maybe_derived_type = false;
3098 if (type_possibly_instantiated_p (speculative_outer_type->type))
3099 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3100 if (binfo)
3101 matched_vtables.add (BINFO_VTABLE (binfo));
3104 /* Next walk recursively all derived types. */
3105 if (context.speculative_maybe_derived_type)
3106 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3107 possible_polymorphic_call_targets_1 (nodes, &inserted,
3108 &matched_vtables,
3109 otr_type,
3110 speculative_outer_type->derived_types[i],
3111 otr_token, speculative_outer_type->type,
3112 context.speculative_offset,
3113 &speculation_complete,
3114 bases_to_consider,
3115 false);
3118 if (!speculative || !nodes.length ())
3120 /* First see virtual method of type itself. */
3121 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3122 context.offset, otr_type);
3123 if (binfo)
3124 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3125 &can_refer);
3126 else
3128 gcc_assert (odr_violation_reported);
3129 target = NULL;
3132 /* Destructors are never called through construction virtual tables,
3133 because the type is always known. */
3134 if (target && DECL_CXX_DESTRUCTOR_P (target))
3135 context.maybe_in_construction = false;
3137 if (target)
3139 /* In the case we get complete method, we don't need
3140 to walk derivations. */
3141 if (DECL_FINAL_P (target))
3142 context.maybe_derived_type = false;
3145 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3146 if (type_possibly_instantiated_p (outer_type->type))
3147 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3148 else
3149 skipped = true;
3151 if (binfo)
3152 matched_vtables.add (BINFO_VTABLE (binfo));
3154 /* Next walk recursively all derived types. */
3155 if (context.maybe_derived_type)
3157 for (i = 0; i < outer_type->derived_types.length(); i++)
3158 possible_polymorphic_call_targets_1 (nodes, &inserted,
3159 &matched_vtables,
3160 otr_type,
3161 outer_type->derived_types[i],
3162 otr_token, outer_type->type,
3163 context.offset, &complete,
3164 bases_to_consider,
3165 context.maybe_in_construction);
3167 if (!outer_type->all_derivations_known)
3169 if (!speculative && final_warning_records
3170 && nodes.length () == 1
3171 && TREE_CODE (TREE_TYPE (nodes[0]->decl)) == METHOD_TYPE)
3173 if (complete
3174 && warn_suggest_final_types
3175 && !outer_type->derived_types.length ())
3177 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3178 final_warning_records->type_warnings.safe_grow_cleared
3179 (odr_types.length ());
3180 final_warning_records->type_warnings[outer_type->id].count++;
3181 if (!final_warning_records->type_warnings
3182 [outer_type->id].dyn_count.initialized_p ())
3183 final_warning_records->type_warnings
3184 [outer_type->id].dyn_count = profile_count::zero ();
3185 final_warning_records->type_warnings[outer_type->id].dyn_count
3186 += final_warning_records->dyn_count;
3187 final_warning_records->type_warnings[outer_type->id].type
3188 = outer_type->type;
3189 (*slot)->type_warning = outer_type->id + 1;
3191 if (complete
3192 && warn_suggest_final_methods
3193 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3194 outer_type->type))
3196 bool existed;
3197 struct decl_warn_count &c =
3198 final_warning_records->decl_warnings.get_or_insert
3199 (nodes[0]->decl, &existed);
3201 if (existed)
3203 c.count++;
3204 c.dyn_count += final_warning_records->dyn_count;
3206 else
3208 c.count = 1;
3209 c.dyn_count = final_warning_records->dyn_count;
3210 c.decl = nodes[0]->decl;
3212 (*slot)->decl_warning = nodes[0]->decl;
3215 complete = false;
3219 if (!speculative)
3221 /* Destructors are never called through construction virtual tables,
3222 because the type is always known. One of entries may be
3223 cxa_pure_virtual so look to at least two of them. */
3224 if (context.maybe_in_construction)
3225 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3226 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3227 context.maybe_in_construction = false;
3228 if (context.maybe_in_construction)
3230 if (type != outer_type
3231 && (!skipped
3232 || (context.maybe_derived_type
3233 && !type_all_derivations_known_p (outer_type->type))))
3234 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3235 context.offset, nodes, &inserted,
3236 &matched_vtables, &complete);
3237 if (skipped)
3238 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3239 for (i = 0; i < bases_to_consider.length(); i++)
3240 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3245 (*slot)->targets = nodes;
3246 (*slot)->complete = complete;
3247 if (completep)
3248 *completep = complete;
3250 timevar_pop (TV_IPA_VIRTUAL_CALL);
3251 return nodes;
3254 bool
3255 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3256 vec<const decl_warn_count*> *vec)
3258 vec->safe_push (&value);
3259 return true;
3262 /* Dump target list TARGETS into FILE. */
3264 static void
3265 dump_targets (FILE *f, vec <cgraph_node *> targets)
3267 unsigned int i;
3269 for (i = 0; i < targets.length (); i++)
3271 char *name = NULL;
3272 if (in_lto_p)
3273 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3274 fprintf (f, " %s/%i", name ? name : targets[i]->name (),
3275 targets[i]->order);
3276 if (in_lto_p)
3277 free (name);
3278 if (!targets[i]->definition)
3279 fprintf (f, " (no definition%s)",
3280 DECL_DECLARED_INLINE_P (targets[i]->decl)
3281 ? " inline" : "");
3283 fprintf (f, "\n");
3286 /* Dump all possible targets of a polymorphic call. */
3288 void
3289 dump_possible_polymorphic_call_targets (FILE *f,
3290 tree otr_type,
3291 HOST_WIDE_INT otr_token,
3292 const ipa_polymorphic_call_context &ctx)
3294 vec <cgraph_node *> targets;
3295 bool final;
3296 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3297 unsigned int len;
3299 if (!type)
3300 return;
3301 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3302 ctx,
3303 &final, NULL, false);
3304 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3305 print_generic_expr (f, type->type, TDF_SLIM);
3306 fprintf (f, " token %i\n", (int)otr_token);
3308 ctx.dump (f);
3310 fprintf (f, " %s%s%s%s\n ",
3311 final ? "This is a complete list." :
3312 "This is partial list; extra targets may be defined in other units.",
3313 ctx.maybe_in_construction ? " (base types included)" : "",
3314 ctx.maybe_derived_type ? " (derived types included)" : "",
3315 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3316 len = targets.length ();
3317 dump_targets (f, targets);
3319 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3320 ctx,
3321 &final, NULL, true);
3322 if (targets.length () != len)
3324 fprintf (f, " Speculative targets:");
3325 dump_targets (f, targets);
3327 /* Ugly: during callgraph construction the target cache may get populated
3328 before all targets are found. While this is harmless (because all local
3329 types are discovered and only in those case we devirtualize fully and we
3330 don't do speculative devirtualization before IPA stage) it triggers
3331 assert here when dumping at that stage also populates the case with
3332 speculative targets. Quietly ignore this. */
3333 gcc_assert (symtab->state < IPA_SSA || targets.length () <= len);
3334 fprintf (f, "\n");
3338 /* Return true if N can be possibly target of a polymorphic call of
3339 OTR_TYPE/OTR_TOKEN. */
3341 bool
3342 possible_polymorphic_call_target_p (tree otr_type,
3343 HOST_WIDE_INT otr_token,
3344 const ipa_polymorphic_call_context &ctx,
3345 struct cgraph_node *n)
3347 vec <cgraph_node *> targets;
3348 unsigned int i;
3349 enum built_in_function fcode;
3350 bool final;
3352 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3353 && ((fcode = DECL_FUNCTION_CODE (n->decl)) == BUILT_IN_UNREACHABLE
3354 || fcode == BUILT_IN_TRAP))
3355 return true;
3357 if (is_cxa_pure_virtual_p (n->decl))
3358 return true;
3360 if (!odr_hash)
3361 return true;
3362 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3363 for (i = 0; i < targets.length (); i++)
3364 if (n->semantically_equivalent_p (targets[i]))
3365 return true;
3367 /* At a moment we allow middle end to dig out new external declarations
3368 as a targets of polymorphic calls. */
3369 if (!final && !n->definition)
3370 return true;
3371 return false;
3376 /* Return true if N can be possibly target of a polymorphic call of
3377 OBJ_TYPE_REF expression REF in STMT. */
3379 bool
3380 possible_polymorphic_call_target_p (tree ref,
3381 gimple *stmt,
3382 struct cgraph_node *n)
3384 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3385 tree call_fn = gimple_call_fn (stmt);
3387 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3388 tree_to_uhwi
3389 (OBJ_TYPE_REF_TOKEN (call_fn)),
3390 context,
3395 /* After callgraph construction new external nodes may appear.
3396 Add them into the graph. */
3398 void
3399 update_type_inheritance_graph (void)
3401 struct cgraph_node *n;
3403 if (!odr_hash)
3404 return;
3405 free_polymorphic_call_targets_hash ();
3406 timevar_push (TV_IPA_INHERITANCE);
3407 /* We reconstruct the graph starting from types of all methods seen in the
3408 unit. */
3409 FOR_EACH_FUNCTION (n)
3410 if (DECL_VIRTUAL_P (n->decl)
3411 && !n->definition
3412 && n->real_symbol_p ())
3413 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3414 timevar_pop (TV_IPA_INHERITANCE);
3418 /* Return true if N looks like likely target of a polymorphic call.
3419 Rule out cxa_pure_virtual, noreturns, function declared cold and
3420 other obvious cases. */
3422 bool
3423 likely_target_p (struct cgraph_node *n)
3425 int flags;
3426 /* cxa_pure_virtual and similar things are not likely. */
3427 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3428 return false;
3429 flags = flags_from_decl_or_type (n->decl);
3430 if (flags & ECF_NORETURN)
3431 return false;
3432 if (lookup_attribute ("cold",
3433 DECL_ATTRIBUTES (n->decl)))
3434 return false;
3435 if (n->frequency < NODE_FREQUENCY_NORMAL)
3436 return false;
3437 /* If there are no live virtual tables referring the target,
3438 the only way the target can be called is an instance coming from other
3439 compilation unit; speculative devirtualization is built around an
3440 assumption that won't happen. */
3441 if (!referenced_from_vtable_p (n))
3442 return false;
3443 return true;
3446 /* Compare type warning records P1 and P2 and choose one with larger count;
3447 helper for qsort. */
3450 type_warning_cmp (const void *p1, const void *p2)
3452 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3453 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3455 if (t1->dyn_count < t2->dyn_count)
3456 return 1;
3457 if (t1->dyn_count > t2->dyn_count)
3458 return -1;
3459 return t2->count - t1->count;
3462 /* Compare decl warning records P1 and P2 and choose one with larger count;
3463 helper for qsort. */
3466 decl_warning_cmp (const void *p1, const void *p2)
3468 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3469 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3471 if (t1->dyn_count < t2->dyn_count)
3472 return 1;
3473 if (t1->dyn_count > t2->dyn_count)
3474 return -1;
3475 return t2->count - t1->count;
3479 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3480 context CTX. */
3482 struct cgraph_node *
3483 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3484 ipa_polymorphic_call_context ctx)
3486 vec <cgraph_node *>targets
3487 = possible_polymorphic_call_targets
3488 (otr_type, otr_token, ctx, NULL, NULL, true);
3489 unsigned int i;
3490 struct cgraph_node *likely_target = NULL;
3492 for (i = 0; i < targets.length (); i++)
3493 if (likely_target_p (targets[i]))
3495 if (likely_target)
3496 return NULL;
3497 likely_target = targets[i];
3499 if (!likely_target
3500 ||!likely_target->definition
3501 || DECL_EXTERNAL (likely_target->decl))
3502 return NULL;
3504 /* Don't use an implicitly-declared destructor (c++/58678). */
3505 struct cgraph_node *non_thunk_target
3506 = likely_target->function_symbol ();
3507 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3508 return NULL;
3509 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3510 && likely_target->can_be_discarded_p ())
3511 return NULL;
3512 return likely_target;
3515 /* The ipa-devirt pass.
3516 When polymorphic call has only one likely target in the unit,
3517 turn it into a speculative call. */
3519 static unsigned int
3520 ipa_devirt (void)
3522 struct cgraph_node *n;
3523 hash_set<void *> bad_call_targets;
3524 struct cgraph_edge *e;
3526 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3527 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3528 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3529 int ndropped = 0;
3531 if (!odr_types_ptr)
3532 return 0;
3534 if (dump_file)
3535 dump_type_inheritance_graph (dump_file);
3537 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3538 This is implemented by setting up final_warning_records that are updated
3539 by get_polymorphic_call_targets.
3540 We need to clear cache in this case to trigger recomputation of all
3541 entries. */
3542 if (warn_suggest_final_methods || warn_suggest_final_types)
3544 final_warning_records = new (final_warning_record);
3545 final_warning_records->dyn_count = profile_count::zero ();
3546 final_warning_records->type_warnings.safe_grow_cleared
3547 (odr_types.length ());
3548 free_polymorphic_call_targets_hash ();
3551 FOR_EACH_DEFINED_FUNCTION (n)
3553 bool update = false;
3554 if (!opt_for_fn (n->decl, flag_devirtualize))
3555 continue;
3556 if (dump_file && n->indirect_calls)
3557 fprintf (dump_file, "\n\nProcesing function %s\n",
3558 n->dump_name ());
3559 for (e = n->indirect_calls; e; e = e->next_callee)
3560 if (e->indirect_info->polymorphic)
3562 struct cgraph_node *likely_target = NULL;
3563 void *cache_token;
3564 bool final;
3566 if (final_warning_records)
3567 final_warning_records->dyn_count = e->count;
3569 vec <cgraph_node *>targets
3570 = possible_polymorphic_call_targets
3571 (e, &final, &cache_token, true);
3572 unsigned int i;
3574 /* Trigger warnings by calculating non-speculative targets. */
3575 if (warn_suggest_final_methods || warn_suggest_final_types)
3576 possible_polymorphic_call_targets (e);
3578 if (dump_file)
3579 dump_possible_polymorphic_call_targets
3580 (dump_file, e);
3582 npolymorphic++;
3584 /* See if the call can be devirtualized by means of ipa-prop's
3585 polymorphic call context propagation. If not, we can just
3586 forget about this call being polymorphic and avoid some heavy
3587 lifting in remove_unreachable_nodes that will otherwise try to
3588 keep all possible targets alive until inlining and in the inliner
3589 itself.
3591 This may need to be revisited once we add further ways to use
3592 the may edges, but it is a resonable thing to do right now. */
3594 if ((e->indirect_info->param_index == -1
3595 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3596 && e->indirect_info->vptr_changed))
3597 && !flag_ltrans_devirtualize)
3599 e->indirect_info->polymorphic = false;
3600 ndropped++;
3601 if (dump_file)
3602 fprintf (dump_file, "Dropping polymorphic call info;"
3603 " it can not be used by ipa-prop\n");
3606 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3607 continue;
3609 if (!e->maybe_hot_p ())
3611 if (dump_file)
3612 fprintf (dump_file, "Call is cold\n\n");
3613 ncold++;
3614 continue;
3616 if (e->speculative)
3618 if (dump_file)
3619 fprintf (dump_file, "Call is already speculated\n\n");
3620 nspeculated++;
3622 /* When dumping see if we agree with speculation. */
3623 if (!dump_file)
3624 continue;
3626 if (bad_call_targets.contains (cache_token))
3628 if (dump_file)
3629 fprintf (dump_file, "Target list is known to be useless\n\n");
3630 nmultiple++;
3631 continue;
3633 for (i = 0; i < targets.length (); i++)
3634 if (likely_target_p (targets[i]))
3636 if (likely_target)
3638 likely_target = NULL;
3639 if (dump_file)
3640 fprintf (dump_file, "More than one likely target\n\n");
3641 nmultiple++;
3642 break;
3644 likely_target = targets[i];
3646 if (!likely_target)
3648 bad_call_targets.add (cache_token);
3649 continue;
3651 /* This is reached only when dumping; check if we agree or disagree
3652 with the speculation. */
3653 if (e->speculative)
3655 struct cgraph_edge *e2;
3656 struct ipa_ref *ref;
3657 e->speculative_call_info (e2, e, ref);
3658 if (e2->callee->ultimate_alias_target ()
3659 == likely_target->ultimate_alias_target ())
3661 fprintf (dump_file, "We agree with speculation\n\n");
3662 nok++;
3664 else
3666 fprintf (dump_file, "We disagree with speculation\n\n");
3667 nwrong++;
3669 continue;
3671 if (!likely_target->definition)
3673 if (dump_file)
3674 fprintf (dump_file, "Target is not a definition\n\n");
3675 nnotdefined++;
3676 continue;
3678 /* Do not introduce new references to external symbols. While we
3679 can handle these just well, it is common for programs to
3680 incorrectly with headers defining methods they are linked
3681 with. */
3682 if (DECL_EXTERNAL (likely_target->decl))
3684 if (dump_file)
3685 fprintf (dump_file, "Target is external\n\n");
3686 nexternal++;
3687 continue;
3689 /* Don't use an implicitly-declared destructor (c++/58678). */
3690 struct cgraph_node *non_thunk_target
3691 = likely_target->function_symbol ();
3692 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3694 if (dump_file)
3695 fprintf (dump_file, "Target is artificial\n\n");
3696 nartificial++;
3697 continue;
3699 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3700 && likely_target->can_be_discarded_p ())
3702 if (dump_file)
3703 fprintf (dump_file, "Target is overwritable\n\n");
3704 noverwritable++;
3705 continue;
3707 else if (dbg_cnt (devirt))
3709 if (dump_enabled_p ())
3711 location_t locus = gimple_location_safe (e->call_stmt);
3712 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3713 "speculatively devirtualizing call "
3714 "in %s to %s\n",
3715 n->dump_name (),
3716 likely_target->dump_name ());
3718 if (!likely_target->can_be_discarded_p ())
3720 cgraph_node *alias;
3721 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3722 if (alias)
3723 likely_target = alias;
3725 nconverted++;
3726 update = true;
3727 e->make_speculative
3728 (likely_target, e->count.apply_scale (8, 10),
3729 e->frequency * 8 / 10);
3732 if (update)
3733 ipa_update_overall_fn_summary (n);
3735 if (warn_suggest_final_methods || warn_suggest_final_types)
3737 if (warn_suggest_final_types)
3739 final_warning_records->type_warnings.qsort (type_warning_cmp);
3740 for (unsigned int i = 0;
3741 i < final_warning_records->type_warnings.length (); i++)
3742 if (final_warning_records->type_warnings[i].count)
3744 tree type = final_warning_records->type_warnings[i].type;
3745 int count = final_warning_records->type_warnings[i].count;
3746 profile_count dyn_count
3747 = final_warning_records->type_warnings[i].dyn_count;
3749 if (!(dyn_count > 0))
3750 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3751 OPT_Wsuggest_final_types, count,
3752 "Declaring type %qD final "
3753 "would enable devirtualization of %i call",
3754 "Declaring type %qD final "
3755 "would enable devirtualization of %i calls",
3756 type,
3757 count);
3758 else
3759 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3760 OPT_Wsuggest_final_types, count,
3761 "Declaring type %qD final "
3762 "would enable devirtualization of %i call "
3763 "executed %lli times",
3764 "Declaring type %qD final "
3765 "would enable devirtualization of %i calls "
3766 "executed %lli times",
3767 type,
3768 count,
3769 (long long) dyn_count.to_gcov_type ());
3773 if (warn_suggest_final_methods)
3775 auto_vec<const decl_warn_count*> decl_warnings_vec;
3777 final_warning_records->decl_warnings.traverse
3778 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3779 decl_warnings_vec.qsort (decl_warning_cmp);
3780 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3782 tree decl = decl_warnings_vec[i]->decl;
3783 int count = decl_warnings_vec[i]->count;
3784 profile_count dyn_count
3785 = decl_warnings_vec[i]->dyn_count;
3787 if (!(dyn_count > 0))
3788 if (DECL_CXX_DESTRUCTOR_P (decl))
3789 warning_n (DECL_SOURCE_LOCATION (decl),
3790 OPT_Wsuggest_final_methods, count,
3791 "Declaring virtual destructor of %qD final "
3792 "would enable devirtualization of %i call",
3793 "Declaring virtual destructor of %qD final "
3794 "would enable devirtualization of %i calls",
3795 DECL_CONTEXT (decl), count);
3796 else
3797 warning_n (DECL_SOURCE_LOCATION (decl),
3798 OPT_Wsuggest_final_methods, count,
3799 "Declaring method %qD final "
3800 "would enable devirtualization of %i call",
3801 "Declaring method %qD final "
3802 "would enable devirtualization of %i calls",
3803 decl, count);
3804 else if (DECL_CXX_DESTRUCTOR_P (decl))
3805 warning_n (DECL_SOURCE_LOCATION (decl),
3806 OPT_Wsuggest_final_methods, count,
3807 "Declaring virtual destructor of %qD final "
3808 "would enable devirtualization of %i call "
3809 "executed %lli times",
3810 "Declaring virtual destructor of %qD final "
3811 "would enable devirtualization of %i calls "
3812 "executed %lli times",
3813 DECL_CONTEXT (decl), count,
3814 (long long)dyn_count.to_gcov_type ());
3815 else
3816 warning_n (DECL_SOURCE_LOCATION (decl),
3817 OPT_Wsuggest_final_methods, count,
3818 "Declaring method %qD final "
3819 "would enable devirtualization of %i call "
3820 "executed %lli times",
3821 "Declaring method %qD final "
3822 "would enable devirtualization of %i calls "
3823 "executed %lli times",
3824 decl, count,
3825 (long long)dyn_count.to_gcov_type ());
3829 delete (final_warning_records);
3830 final_warning_records = 0;
3833 if (dump_file)
3834 fprintf (dump_file,
3835 "%i polymorphic calls, %i devirtualized,"
3836 " %i speculatively devirtualized, %i cold\n"
3837 "%i have multiple targets, %i overwritable,"
3838 " %i already speculated (%i agree, %i disagree),"
3839 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3840 npolymorphic, ndevirtualized, nconverted, ncold,
3841 nmultiple, noverwritable, nspeculated, nok, nwrong,
3842 nexternal, nnotdefined, nartificial, ndropped);
3843 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3846 namespace {
3848 const pass_data pass_data_ipa_devirt =
3850 IPA_PASS, /* type */
3851 "devirt", /* name */
3852 OPTGROUP_NONE, /* optinfo_flags */
3853 TV_IPA_DEVIRT, /* tv_id */
3854 0, /* properties_required */
3855 0, /* properties_provided */
3856 0, /* properties_destroyed */
3857 0, /* todo_flags_start */
3858 ( TODO_dump_symtab ), /* todo_flags_finish */
3861 class pass_ipa_devirt : public ipa_opt_pass_d
3863 public:
3864 pass_ipa_devirt (gcc::context *ctxt)
3865 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3866 NULL, /* generate_summary */
3867 NULL, /* write_summary */
3868 NULL, /* read_summary */
3869 NULL, /* write_optimization_summary */
3870 NULL, /* read_optimization_summary */
3871 NULL, /* stmt_fixup */
3872 0, /* function_transform_todo_flags_start */
3873 NULL, /* function_transform */
3874 NULL) /* variable_transform */
3877 /* opt_pass methods: */
3878 virtual bool gate (function *)
3880 /* In LTO, always run the IPA passes and decide on function basis if the
3881 pass is enabled. */
3882 if (in_lto_p)
3883 return true;
3884 return (flag_devirtualize
3885 && (flag_devirtualize_speculatively
3886 || (warn_suggest_final_methods
3887 || warn_suggest_final_types))
3888 && optimize);
3891 virtual unsigned int execute (function *) { return ipa_devirt (); }
3893 }; // class pass_ipa_devirt
3895 } // anon namespace
3897 ipa_opt_pass_d *
3898 make_pass_ipa_devirt (gcc::context *ctxt)
3900 return new pass_ipa_devirt (ctxt);
3903 #include "gt-ipa-devirt.h"