[Ada] Add special bypass for obsolete code pattern
[official-gcc.git] / gcc / ipa-devirt.c
blob0eeb25342907a376f3ff27544b9d3d047c9cc449
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2019 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
82 What we do here:
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "backend.h"
112 #include "rtl.h"
113 #include "tree.h"
114 #include "gimple.h"
115 #include "alloc-pool.h"
116 #include "tree-pass.h"
117 #include "cgraph.h"
118 #include "lto-streamer.h"
119 #include "fold-const.h"
120 #include "print-tree.h"
121 #include "calls.h"
122 #include "ipa-utils.h"
123 #include "gimple-fold.h"
124 #include "symbol-summary.h"
125 #include "tree-vrp.h"
126 #include "ipa-prop.h"
127 #include "ipa-fnsummary.h"
128 #include "demangle.h"
129 #include "dbgcnt.h"
130 #include "gimple-pretty-print.h"
131 #include "intl.h"
132 #include "stringpool.h"
133 #include "attribs.h"
135 /* Hash based set of pairs of types. */
136 struct type_pair
138 tree first;
139 tree second;
142 template <>
143 struct default_hash_traits <type_pair>
144 : typed_noop_remove <type_pair>
146 GTY((skip)) typedef type_pair value_type;
147 GTY((skip)) typedef type_pair compare_type;
148 static hashval_t
149 hash (type_pair p)
151 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
153 static bool
154 is_empty (type_pair p)
156 return p.first == NULL;
158 static bool
159 is_deleted (type_pair p ATTRIBUTE_UNUSED)
161 return false;
163 static bool
164 equal (const type_pair &a, const type_pair &b)
166 return a.first==b.first && a.second == b.second;
168 static void
169 mark_empty (type_pair &e)
171 e.first = NULL;
175 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
176 hash_set<type_pair> *,
177 location_t, location_t);
178 static void warn_odr (tree t1, tree t2, tree st1, tree st2,
179 bool warn, bool *warned, const char *reason);
181 static bool odr_violation_reported = false;
184 /* Pointer set of all call targets appearing in the cache. */
185 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
187 /* The node of type inheritance graph. For each type unique in
188 One Definition Rule (ODR) sense, we produce one node linking all
189 main variants of types equivalent to it, bases and derived types. */
191 struct GTY(()) odr_type_d
193 /* leader type. */
194 tree type;
195 /* All bases; built only for main variants of types. */
196 vec<odr_type> GTY((skip)) bases;
197 /* All derived types with virtual methods seen in unit;
198 built only for main variants of types. */
199 vec<odr_type> GTY((skip)) derived_types;
201 /* All equivalent types, if more than one. */
202 vec<tree, va_gc> *types;
203 /* Set of all equivalent types, if NON-NULL. */
204 hash_set<tree> * GTY((skip)) types_set;
206 /* Unique ID indexing the type in odr_types array. */
207 int id;
208 /* Is it in anonymous namespace? */
209 bool anonymous_namespace;
210 /* Do we know about all derivations of given type? */
211 bool all_derivations_known;
212 /* Did we report ODR violation here? */
213 bool odr_violated;
214 /* Set when virtual table without RTTI previaled table with. */
215 bool rtti_broken;
216 /* Set when the canonical type is determined using the type name. */
217 bool tbaa_enabled;
220 /* Return TRUE if all derived types of T are known and thus
221 we may consider the walk of derived type complete.
223 This is typically true only for final anonymous namespace types and types
224 defined within functions (that may be COMDAT and thus shared across units,
225 but with the same set of derived types). */
227 bool
228 type_all_derivations_known_p (const_tree t)
230 if (TYPE_FINAL_P (t))
231 return true;
232 if (flag_ltrans)
233 return false;
234 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
235 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
236 return true;
237 if (type_in_anonymous_namespace_p (t))
238 return true;
239 return (decl_function_context (TYPE_NAME (t)) != NULL);
242 /* Return TRUE if type's constructors are all visible. */
244 static bool
245 type_all_ctors_visible_p (tree t)
247 return !flag_ltrans
248 && symtab->state >= CONSTRUCTION
249 /* We cannot always use type_all_derivations_known_p.
250 For function local types we must assume case where
251 the function is COMDAT and shared in between units.
253 TODO: These cases are quite easy to get, but we need
254 to keep track of C++ privatizing via -Wno-weak
255 as well as the IPA privatizing. */
256 && type_in_anonymous_namespace_p (t);
259 /* Return TRUE if type may have instance. */
261 static bool
262 type_possibly_instantiated_p (tree t)
264 tree vtable;
265 varpool_node *vnode;
267 /* TODO: Add abstract types here. */
268 if (!type_all_ctors_visible_p (t))
269 return true;
271 vtable = BINFO_VTABLE (TYPE_BINFO (t));
272 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
273 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
274 vnode = varpool_node::get (vtable);
275 return vnode && vnode->definition;
278 /* Hash used to unify ODR types based on their mangled name and for anonymous
279 namespace types. */
281 struct odr_name_hasher : pointer_hash <odr_type_d>
283 typedef union tree_node *compare_type;
284 static inline hashval_t hash (const odr_type_d *);
285 static inline bool equal (const odr_type_d *, const tree_node *);
286 static inline void remove (odr_type_d *);
289 static bool
290 can_be_name_hashed_p (tree t)
292 return (!in_lto_p || odr_type_p (t));
295 /* Hash type by its ODR name. */
297 static hashval_t
298 hash_odr_name (const_tree t)
300 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
302 /* If not in LTO, all main variants are unique, so we can do
303 pointer hash. */
304 if (!in_lto_p)
305 return htab_hash_pointer (t);
307 /* Anonymous types are unique. */
308 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
309 return htab_hash_pointer (t);
311 gcc_checking_assert (TYPE_NAME (t)
312 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
313 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
316 /* Return the computed hashcode for ODR_TYPE. */
318 inline hashval_t
319 odr_name_hasher::hash (const odr_type_d *odr_type)
321 return hash_odr_name (odr_type->type);
324 /* For languages with One Definition Rule, work out if
325 types are the same based on their name.
327 This is non-trivial for LTO where minor differences in
328 the type representation may have prevented type merging
329 to merge two copies of otherwise equivalent type.
331 Until we start streaming mangled type names, this function works
332 only for polymorphic types.
335 bool
336 types_same_for_odr (const_tree type1, const_tree type2)
338 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
340 type1 = TYPE_MAIN_VARIANT (type1);
341 type2 = TYPE_MAIN_VARIANT (type2);
343 if (type1 == type2)
344 return true;
346 if (!in_lto_p)
347 return false;
349 /* Anonymous namespace types are never duplicated. */
350 if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
351 || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
352 return false;
354 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
355 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
358 /* Return true if we can decide on ODR equivalency.
360 In non-LTO it is always decide, in LTO however it depends in the type has
361 ODR info attached. */
363 bool
364 types_odr_comparable (tree t1, tree t2)
366 return (!in_lto_p
367 || TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2)
368 || (odr_type_p (TYPE_MAIN_VARIANT (t1))
369 && odr_type_p (TYPE_MAIN_VARIANT (t2))));
372 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
373 known, be conservative and return false. */
375 bool
376 types_must_be_same_for_odr (tree t1, tree t2)
378 if (types_odr_comparable (t1, t2))
379 return types_same_for_odr (t1, t2);
380 else
381 return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
384 /* If T is compound type, return type it is based on. */
386 static tree
387 compound_type_base (const_tree t)
389 if (TREE_CODE (t) == ARRAY_TYPE
390 || POINTER_TYPE_P (t)
391 || TREE_CODE (t) == COMPLEX_TYPE
392 || VECTOR_TYPE_P (t))
393 return TREE_TYPE (t);
394 if (TREE_CODE (t) == METHOD_TYPE)
395 return TYPE_METHOD_BASETYPE (t);
396 if (TREE_CODE (t) == OFFSET_TYPE)
397 return TYPE_OFFSET_BASETYPE (t);
398 return NULL_TREE;
401 /* Return true if T is either ODR type or compound type based from it.
402 If the function return true, we know that T is a type originating from C++
403 source even at link-time. */
405 bool
406 odr_or_derived_type_p (const_tree t)
410 if (odr_type_p (TYPE_MAIN_VARIANT (t)))
411 return true;
412 /* Function type is a tricky one. Basically we can consider it
413 ODR derived if return type or any of the parameters is.
414 We need to check all parameters because LTO streaming merges
415 common types (such as void) and they are not considered ODR then. */
416 if (TREE_CODE (t) == FUNCTION_TYPE)
418 if (TYPE_METHOD_BASETYPE (t))
419 t = TYPE_METHOD_BASETYPE (t);
420 else
422 if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
423 return true;
424 for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
425 if (odr_or_derived_type_p (TYPE_MAIN_VARIANT (TREE_VALUE (t))))
426 return true;
427 return false;
430 else
431 t = compound_type_base (t);
433 while (t);
434 return t;
437 /* Compare types T1 and T2 and return true if they are
438 equivalent. */
440 inline bool
441 odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
443 tree t1 = o1->type;
445 gcc_checking_assert (TYPE_MAIN_VARIANT (t2) == t2);
446 gcc_checking_assert (TYPE_MAIN_VARIANT (t1) == t1);
447 if (t1 == t2)
448 return true;
449 if (!in_lto_p)
450 return false;
451 /* Check for anonymous namespaces. */
452 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
453 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
454 return false;
455 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
456 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
457 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
458 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
461 /* Free ODR type V. */
463 inline void
464 odr_name_hasher::remove (odr_type_d *v)
466 v->bases.release ();
467 v->derived_types.release ();
468 if (v->types_set)
469 delete v->types_set;
470 ggc_free (v);
473 /* ODR type hash used to look up ODR type based on tree type node. */
475 typedef hash_table<odr_name_hasher> odr_hash_type;
476 static odr_hash_type *odr_hash;
478 /* ODR types are also stored into ODR_TYPE vector to allow consistent
479 walking. Bases appear before derived types. Vector is garbage collected
480 so we won't end up visiting empty types. */
482 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
483 #define odr_types (*odr_types_ptr)
485 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
486 void
487 set_type_binfo (tree type, tree binfo)
489 for (; type; type = TYPE_NEXT_VARIANT (type))
490 if (COMPLETE_TYPE_P (type))
491 TYPE_BINFO (type) = binfo;
492 else
493 gcc_assert (!TYPE_BINFO (type));
496 /* Return true if type variants match.
497 This assumes that we already verified that T1 and T2 are variants of the
498 same type. */
500 static bool
501 type_variants_equivalent_p (tree t1, tree t2)
503 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
504 return false;
506 if (comp_type_attributes (t1, t2) != 1)
507 return false;
509 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
510 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
511 return false;
513 return true;
516 /* Compare T1 and T2 based on name or structure. */
518 static bool
519 odr_subtypes_equivalent_p (tree t1, tree t2,
520 hash_set<type_pair> *visited,
521 location_t loc1, location_t loc2)
524 /* This can happen in incomplete types that should be handled earlier. */
525 gcc_assert (t1 && t2);
527 if (t1 == t2)
528 return true;
530 /* Anonymous namespace types must match exactly. */
531 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
532 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
533 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
534 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
535 return false;
537 /* For ODR types be sure to compare their names.
538 To support -Wno-odr-type-merging we allow one type to be non-ODR
539 and other ODR even though it is a violation. */
540 if (types_odr_comparable (t1, t2))
542 if (t1 != t2
543 && odr_type_p (TYPE_MAIN_VARIANT (t1))
544 && get_odr_type (TYPE_MAIN_VARIANT (t1), true)->odr_violated)
545 return false;
546 if (!types_same_for_odr (t1, t2))
547 return false;
548 if (!type_variants_equivalent_p (t1, t2))
549 return false;
550 /* Limit recursion: If subtypes are ODR types and we know
551 that they are same, be happy. */
552 if (odr_type_p (TYPE_MAIN_VARIANT (t1)))
553 return true;
556 /* Component types, builtins and possibly violating ODR types
557 have to be compared structurally. */
558 if (TREE_CODE (t1) != TREE_CODE (t2))
559 return false;
560 if (AGGREGATE_TYPE_P (t1)
561 && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
562 return false;
564 type_pair pair={TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2)};
565 if (TYPE_UID (TYPE_MAIN_VARIANT (t1)) > TYPE_UID (TYPE_MAIN_VARIANT (t2)))
567 pair.first = TYPE_MAIN_VARIANT (t2);
568 pair.second = TYPE_MAIN_VARIANT (t1);
570 if (visited->add (pair))
571 return true;
572 if (!odr_types_equivalent_p (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
573 false, NULL, visited, loc1, loc2))
574 return false;
575 if (!type_variants_equivalent_p (t1, t2))
576 return false;
577 return true;
580 /* Return true if DECL1 and DECL2 are identical methods. Consider
581 name equivalent to name.localalias.xyz. */
583 static bool
584 methods_equal_p (tree decl1, tree decl2)
586 if (DECL_ASSEMBLER_NAME (decl1) == DECL_ASSEMBLER_NAME (decl2))
587 return true;
588 const char sep = symbol_table::symbol_suffix_separator ();
590 const char *name1 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1));
591 const char *ptr1 = strchr (name1, sep);
592 int len1 = ptr1 ? ptr1 - name1 : strlen (name1);
594 const char *name2 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2));
595 const char *ptr2 = strchr (name2, sep);
596 int len2 = ptr2 ? ptr2 - name2 : strlen (name2);
598 if (len1 != len2)
599 return false;
600 return !strncmp (name1, name2, len1);
603 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
604 violation warnings. */
606 void
607 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
609 int n1, n2;
611 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
613 odr_violation_reported = true;
614 if (DECL_VIRTUAL_P (prevailing->decl))
616 varpool_node *tmp = prevailing;
617 prevailing = vtable;
618 vtable = tmp;
620 auto_diagnostic_group d;
621 if (warning_at (DECL_SOURCE_LOCATION
622 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
623 OPT_Wodr,
624 "virtual table of type %qD violates one definition rule",
625 DECL_CONTEXT (vtable->decl)))
626 inform (DECL_SOURCE_LOCATION (prevailing->decl),
627 "variable of same assembler name as the virtual table is "
628 "defined in another translation unit");
629 return;
631 if (!prevailing->definition || !vtable->definition)
632 return;
634 /* If we do not stream ODR type info, do not bother to do useful compare. */
635 if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
636 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
637 return;
639 odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
641 if (class_type->odr_violated)
642 return;
644 for (n1 = 0, n2 = 0; true; n1++, n2++)
646 struct ipa_ref *ref1, *ref2;
647 bool end1, end2;
649 end1 = !prevailing->iterate_reference (n1, ref1);
650 end2 = !vtable->iterate_reference (n2, ref2);
652 /* !DECL_VIRTUAL_P means RTTI entry;
653 We warn when RTTI is lost because non-RTTI previals; we silently
654 accept the other case. */
655 while (!end2
656 && (end1
657 || (methods_equal_p (ref1->referred->decl,
658 ref2->referred->decl)
659 && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
660 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
662 if (!class_type->rtti_broken)
664 auto_diagnostic_group d;
665 if (warning_at (DECL_SOURCE_LOCATION
666 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
667 OPT_Wodr,
668 "virtual table of type %qD contains RTTI "
669 "information",
670 DECL_CONTEXT (vtable->decl)))
672 inform (DECL_SOURCE_LOCATION
673 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
674 "but is prevailed by one without from other"
675 " translation unit");
676 inform (DECL_SOURCE_LOCATION
677 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
678 "RTTI will not work on this type");
679 class_type->rtti_broken = true;
682 n2++;
683 end2 = !vtable->iterate_reference (n2, ref2);
685 while (!end1
686 && (end2
687 || (methods_equal_p (ref2->referred->decl, ref1->referred->decl)
688 && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
689 && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
691 n1++;
692 end1 = !prevailing->iterate_reference (n1, ref1);
695 /* Finished? */
696 if (end1 && end2)
698 /* Extra paranoia; compare the sizes. We do not have information
699 about virtual inheritance offsets, so just be sure that these
700 match.
701 Do this as very last check so the not very informative error
702 is not output too often. */
703 if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
705 class_type->odr_violated = true;
706 auto_diagnostic_group d;
707 tree ctx = TYPE_NAME (DECL_CONTEXT (vtable->decl));
708 if (warning_at (DECL_SOURCE_LOCATION (ctx), OPT_Wodr,
709 "virtual table of type %qD violates "
710 "one definition rule",
711 DECL_CONTEXT (vtable->decl)))
713 ctx = TYPE_NAME (DECL_CONTEXT (prevailing->decl));
714 inform (DECL_SOURCE_LOCATION (ctx),
715 "the conflicting type defined in another translation"
716 " unit has virtual table of different size");
719 return;
722 if (!end1 && !end2)
724 if (methods_equal_p (ref1->referred->decl, ref2->referred->decl))
725 continue;
727 class_type->odr_violated = true;
729 /* If the loops above stopped on non-virtual pointer, we have
730 mismatch in RTTI information mangling. */
731 if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
732 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
734 auto_diagnostic_group d;
735 if (warning_at (DECL_SOURCE_LOCATION
736 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
737 OPT_Wodr,
738 "virtual table of type %qD violates "
739 "one definition rule",
740 DECL_CONTEXT (vtable->decl)))
742 inform (DECL_SOURCE_LOCATION
743 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
744 "the conflicting type defined in another translation "
745 "unit with different RTTI information");
747 return;
749 /* At this point both REF1 and REF2 points either to virtual table
750 or virtual method. If one points to virtual table and other to
751 method we can complain the same way as if one table was shorter
752 than other pointing out the extra method. */
753 if (TREE_CODE (ref1->referred->decl)
754 != TREE_CODE (ref2->referred->decl))
756 if (VAR_P (ref1->referred->decl))
757 end1 = true;
758 else if (VAR_P (ref2->referred->decl))
759 end2 = true;
763 class_type->odr_violated = true;
765 /* Complain about size mismatch. Either we have too many virutal
766 functions or too many virtual table pointers. */
767 if (end1 || end2)
769 if (end1)
771 varpool_node *tmp = prevailing;
772 prevailing = vtable;
773 vtable = tmp;
774 ref1 = ref2;
776 auto_diagnostic_group d;
777 if (warning_at (DECL_SOURCE_LOCATION
778 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
779 OPT_Wodr,
780 "virtual table of type %qD violates "
781 "one definition rule",
782 DECL_CONTEXT (vtable->decl)))
784 if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
786 inform (DECL_SOURCE_LOCATION
787 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
788 "the conflicting type defined in another translation "
789 "unit");
790 inform (DECL_SOURCE_LOCATION
791 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
792 "contains additional virtual method %qD",
793 ref1->referred->decl);
795 else
797 inform (DECL_SOURCE_LOCATION
798 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
799 "the conflicting type defined in another translation "
800 "unit has virtual table with more entries");
803 return;
806 /* And in the last case we have either mistmatch in between two virtual
807 methods or two virtual table pointers. */
808 auto_diagnostic_group d;
809 if (warning_at (DECL_SOURCE_LOCATION
810 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
811 "virtual table of type %qD violates "
812 "one definition rule",
813 DECL_CONTEXT (vtable->decl)))
815 if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
817 inform (DECL_SOURCE_LOCATION
818 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
819 "the conflicting type defined in another translation "
820 "unit");
821 gcc_assert (TREE_CODE (ref2->referred->decl)
822 == FUNCTION_DECL);
823 inform (DECL_SOURCE_LOCATION
824 (ref1->referred->ultimate_alias_target ()->decl),
825 "virtual method %qD",
826 ref1->referred->ultimate_alias_target ()->decl);
827 inform (DECL_SOURCE_LOCATION
828 (ref2->referred->ultimate_alias_target ()->decl),
829 "ought to match virtual method %qD but does not",
830 ref2->referred->ultimate_alias_target ()->decl);
832 else
833 inform (DECL_SOURCE_LOCATION
834 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
835 "the conflicting type defined in another translation "
836 "unit has virtual table with different contents");
837 return;
842 /* Output ODR violation warning about T1 and T2 with REASON.
843 Display location of ST1 and ST2 if REASON speaks about field or
844 method of the type.
845 If WARN is false, do nothing. Set WARNED if warning was indeed
846 output. */
848 static void
849 warn_odr (tree t1, tree t2, tree st1, tree st2,
850 bool warn, bool *warned, const char *reason)
852 tree decl2 = TYPE_NAME (TYPE_MAIN_VARIANT (t2));
853 if (warned)
854 *warned = false;
856 if (!warn || !TYPE_NAME(TYPE_MAIN_VARIANT (t1)))
857 return;
859 /* ODR warnings are output druing LTO streaming; we must apply location
860 cache for potential warnings to be output correctly. */
861 if (lto_location_cache::current_cache)
862 lto_location_cache::current_cache->apply_location_cache ();
864 auto_diagnostic_group d;
865 if (t1 != TYPE_MAIN_VARIANT (t1)
866 && TYPE_NAME (t1) != TYPE_NAME (TYPE_MAIN_VARIANT (t1)))
868 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
869 OPT_Wodr, "type %qT (typedef of %qT) violates the "
870 "C++ One Definition Rule",
871 t1, TYPE_MAIN_VARIANT (t1)))
872 return;
874 else
876 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
877 OPT_Wodr, "type %qT violates the C++ One Definition Rule",
878 t1))
879 return;
881 if (!st1 && !st2)
883 /* For FIELD_DECL support also case where one of fields is
884 NULL - this is used when the structures have mismatching number of
885 elements. */
886 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
888 inform (DECL_SOURCE_LOCATION (decl2),
889 "a different type is defined in another translation unit");
890 if (!st1)
892 st1 = st2;
893 st2 = NULL;
895 inform (DECL_SOURCE_LOCATION (st1),
896 "the first difference of corresponding definitions is field %qD",
897 st1);
898 if (st2)
899 decl2 = st2;
901 else if (TREE_CODE (st1) == FUNCTION_DECL)
903 inform (DECL_SOURCE_LOCATION (decl2),
904 "a different type is defined in another translation unit");
905 inform (DECL_SOURCE_LOCATION (st1),
906 "the first difference of corresponding definitions is method %qD",
907 st1);
908 decl2 = st2;
910 else
911 return;
912 inform (DECL_SOURCE_LOCATION (decl2), reason);
914 if (warned)
915 *warned = true;
918 /* Return ture if T1 and T2 are incompatible and we want to recusively
919 dive into them from warn_type_mismatch to give sensible answer. */
921 static bool
922 type_mismatch_p (tree t1, tree t2)
924 if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
925 && !odr_types_equivalent_p (t1, t2))
926 return true;
927 return !types_compatible_p (t1, t2);
931 /* Types T1 and T2 was found to be incompatible in a context they can't
932 (either used to declare a symbol of same assembler name or unified by
933 ODR rule). We already output warning about this, but if possible, output
934 extra information on how the types mismatch.
936 This is hard to do in general. We basically handle the common cases.
938 If LOC1 and LOC2 are meaningful locations, use it in the case the types
939 themselves do no thave one.*/
941 void
942 warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
944 /* Location of type is known only if it has TYPE_NAME and the name is
945 TYPE_DECL. */
946 location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
947 ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
948 : UNKNOWN_LOCATION;
949 location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
950 ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
951 : UNKNOWN_LOCATION;
952 bool loc_t2_useful = false;
954 /* With LTO it is a common case that the location of both types match.
955 See if T2 has a location that is different from T1. If so, we will
956 inform user about the location.
957 Do not consider the location passed to us in LOC1/LOC2 as those are
958 already output. */
959 if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
961 if (loc_t1 <= BUILTINS_LOCATION)
962 loc_t2_useful = true;
963 else
965 expanded_location xloc1 = expand_location (loc_t1);
966 expanded_location xloc2 = expand_location (loc_t2);
968 if (strcmp (xloc1.file, xloc2.file)
969 || xloc1.line != xloc2.line
970 || xloc1.column != xloc2.column)
971 loc_t2_useful = true;
975 if (loc_t1 <= BUILTINS_LOCATION)
976 loc_t1 = loc1;
977 if (loc_t2 <= BUILTINS_LOCATION)
978 loc_t2 = loc2;
980 location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
982 /* It is a quite common bug to reference anonymous namespace type in
983 non-anonymous namespace class. */
984 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
985 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
986 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
987 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
989 if (type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
990 && !type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
992 std::swap (t1, t2);
993 std::swap (loc_t1, loc_t2);
995 gcc_assert (TYPE_NAME (t1) && TYPE_NAME (t2)
996 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
997 && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL);
998 tree n1 = TYPE_NAME (t1);
999 tree n2 = TYPE_NAME (t2);
1000 if (TREE_CODE (n1) == TYPE_DECL)
1001 n1 = DECL_NAME (n1);
1002 if (TREE_CODE (n2) == TYPE_DECL)
1003 n2 = DECL_NAME (n2);
1004 /* Most of the time, the type names will match, do not be unnecesarily
1005 verbose. */
1006 if (n1 != n2)
1007 inform (loc_t1,
1008 "type %qT defined in anonymous namespace cannot match "
1009 "type %qT across the translation unit boundary",
1010 t1, t2);
1011 else
1012 inform (loc_t1,
1013 "type %qT defined in anonymous namespace cannot match "
1014 "across the translation unit boundary",
1015 t1);
1016 if (loc_t2_useful)
1017 inform (loc_t2,
1018 "the incompatible type defined in another translation unit");
1019 return;
1021 tree mt1 = TYPE_MAIN_VARIANT (t1);
1022 tree mt2 = TYPE_MAIN_VARIANT (t2);
1023 /* If types have mangled ODR names and they are different, it is most
1024 informative to output those.
1025 This also covers types defined in different namespaces. */
1026 if (TYPE_NAME (mt1) && TYPE_NAME (mt2)
1027 && TREE_CODE (TYPE_NAME (mt1)) == TYPE_DECL
1028 && TREE_CODE (TYPE_NAME (mt2)) == TYPE_DECL
1029 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (mt1))
1030 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (mt2))
1031 && DECL_ASSEMBLER_NAME (TYPE_NAME (mt1))
1032 != DECL_ASSEMBLER_NAME (TYPE_NAME (mt2)))
1034 char *name1 = xstrdup (cplus_demangle
1035 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (mt1))),
1036 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES));
1037 char *name2 = cplus_demangle
1038 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (mt2))),
1039 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES);
1040 if (name1 && name2 && strcmp (name1, name2))
1042 inform (loc_t1,
1043 "type name %qs should match type name %qs",
1044 name1, name2);
1045 if (loc_t2_useful)
1046 inform (loc_t2,
1047 "the incompatible type is defined here");
1048 free (name1);
1049 return;
1051 free (name1);
1053 /* A tricky case are compound types. Often they appear the same in source
1054 code and the mismatch is dragged in by type they are build from.
1055 Look for those differences in subtypes and try to be informative. In other
1056 cases just output nothing because the source code is probably different
1057 and in this case we already output a all necessary info. */
1058 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1060 if (TREE_CODE (t1) == TREE_CODE (t2))
1062 if (TREE_CODE (t1) == ARRAY_TYPE
1063 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1065 tree i1 = TYPE_DOMAIN (t1);
1066 tree i2 = TYPE_DOMAIN (t2);
1068 if (i1 && i2
1069 && TYPE_MAX_VALUE (i1)
1070 && TYPE_MAX_VALUE (i2)
1071 && !operand_equal_p (TYPE_MAX_VALUE (i1),
1072 TYPE_MAX_VALUE (i2), 0))
1074 inform (loc,
1075 "array types have different bounds");
1076 return;
1079 if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1080 && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1081 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
1082 else if (TREE_CODE (t1) == METHOD_TYPE
1083 || TREE_CODE (t1) == FUNCTION_TYPE)
1085 tree parms1 = NULL, parms2 = NULL;
1086 int count = 1;
1088 if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1090 inform (loc, "return value type mismatch");
1091 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
1092 loc_t2);
1093 return;
1095 if (prototype_p (t1) && prototype_p (t2))
1096 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1097 parms1 && parms2;
1098 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1099 count++)
1101 if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
1103 if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1104 inform (loc,
1105 "implicit this pointer type mismatch");
1106 else
1107 inform (loc,
1108 "type mismatch in parameter %i",
1109 count - (TREE_CODE (t1) == METHOD_TYPE));
1110 warn_types_mismatch (TREE_VALUE (parms1),
1111 TREE_VALUE (parms2),
1112 loc_t1, loc_t2);
1113 return;
1116 if (parms1 || parms2)
1118 inform (loc,
1119 "types have different parameter counts");
1120 return;
1124 return;
1127 if (types_odr_comparable (t1, t2)
1128 /* We make assign integers mangled names to be able to handle
1129 signed/unsigned chars. Accepting them here would however lead to
1130 confussing message like
1131 "type ‘const int’ itself violates the C++ One Definition Rule" */
1132 && TREE_CODE (t1) != INTEGER_TYPE
1133 && types_same_for_odr (t1, t2))
1134 inform (loc_t1,
1135 "type %qT itself violates the C++ One Definition Rule", t1);
1136 /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1137 else if (TYPE_NAME (t1) == TYPE_NAME (t2)
1138 && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
1139 return;
1140 else
1141 inform (loc_t1, "type %qT should match type %qT",
1142 t1, t2);
1143 if (loc_t2_useful)
1144 inform (loc_t2, "the incompatible type is defined here");
1147 /* Return true if T should be ignored in TYPE_FIELDS for ODR comparsion. */
1149 static bool
1150 skip_in_fields_list_p (tree t)
1152 if (TREE_CODE (t) != FIELD_DECL)
1153 return true;
1154 /* C++ FE introduces zero sized fields depending on -std setting, see
1155 PR89358. */
1156 if (DECL_SIZE (t)
1157 && integer_zerop (DECL_SIZE (t))
1158 && DECL_ARTIFICIAL (t)
1159 && DECL_IGNORED_P (t)
1160 && !DECL_NAME (t))
1161 return true;
1162 return false;
1165 /* Compare T1 and T2, report ODR violations if WARN is true and set
1166 WARNED to true if anything is reported. Return true if types match.
1167 If true is returned, the types are also compatible in the sense of
1168 gimple_canonical_types_compatible_p.
1169 If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1170 about the type if the type itself do not have location. */
1172 static bool
1173 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1174 hash_set<type_pair> *visited,
1175 location_t loc1, location_t loc2)
1177 /* Check first for the obvious case of pointer identity. */
1178 if (t1 == t2)
1179 return true;
1181 /* Can't be the same type if the types don't have the same code. */
1182 if (TREE_CODE (t1) != TREE_CODE (t2))
1184 warn_odr (t1, t2, NULL, NULL, warn, warned,
1185 G_("a different type is defined in another translation unit"));
1186 return false;
1189 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
1190 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
1191 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
1192 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
1194 /* We cannot trip this when comparing ODR types, only when trying to
1195 match different ODR derivations from different declarations.
1196 So WARN should be always false. */
1197 gcc_assert (!warn);
1198 return false;
1201 if (TREE_CODE (t1) == ENUMERAL_TYPE
1202 && TYPE_VALUES (t1) && TYPE_VALUES (t2))
1204 tree v1, v2;
1205 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
1206 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
1208 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
1210 warn_odr (t1, t2, NULL, NULL, warn, warned,
1211 G_("an enum with different value name"
1212 " is defined in another translation unit"));
1213 return false;
1215 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2), 0))
1217 warn_odr (t1, t2, NULL, NULL, warn, warned,
1218 G_("an enum with different values is defined"
1219 " in another translation unit"));
1220 return false;
1223 if (v1 || v2)
1225 warn_odr (t1, t2, NULL, NULL, warn, warned,
1226 G_("an enum with mismatching number of values "
1227 "is defined in another translation unit"));
1228 return false;
1232 /* Non-aggregate types can be handled cheaply. */
1233 if (INTEGRAL_TYPE_P (t1)
1234 || SCALAR_FLOAT_TYPE_P (t1)
1235 || FIXED_POINT_TYPE_P (t1)
1236 || TREE_CODE (t1) == VECTOR_TYPE
1237 || TREE_CODE (t1) == COMPLEX_TYPE
1238 || TREE_CODE (t1) == OFFSET_TYPE
1239 || POINTER_TYPE_P (t1))
1241 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1243 warn_odr (t1, t2, NULL, NULL, warn, warned,
1244 G_("a type with different precision is defined "
1245 "in another translation unit"));
1246 return false;
1248 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1250 warn_odr (t1, t2, NULL, NULL, warn, warned,
1251 G_("a type with different signedness is defined "
1252 "in another translation unit"));
1253 return false;
1256 if (TREE_CODE (t1) == INTEGER_TYPE
1257 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1259 /* char WRT uint_8? */
1260 warn_odr (t1, t2, NULL, NULL, warn, warned,
1261 G_("a different type is defined in another "
1262 "translation unit"));
1263 return false;
1266 /* For canonical type comparisons we do not want to build SCCs
1267 so we cannot compare pointed-to types. But we can, for now,
1268 require the same pointed-to type kind and match what
1269 useless_type_conversion_p would do. */
1270 if (POINTER_TYPE_P (t1))
1272 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1273 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1275 warn_odr (t1, t2, NULL, NULL, warn, warned,
1276 G_("it is defined as a pointer in different address "
1277 "space in another translation unit"));
1278 return false;
1281 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1282 visited, loc1, loc2))
1284 warn_odr (t1, t2, NULL, NULL, warn, warned,
1285 G_("it is defined as a pointer to different type "
1286 "in another translation unit"));
1287 if (warn && warned)
1288 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
1289 loc1, loc2);
1290 return false;
1294 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1295 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1296 visited, loc1, loc2))
1298 /* Probably specific enough. */
1299 warn_odr (t1, t2, NULL, NULL, warn, warned,
1300 G_("a different type is defined "
1301 "in another translation unit"));
1302 if (warn && warned)
1303 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1304 return false;
1307 /* Do type-specific comparisons. */
1308 else switch (TREE_CODE (t1))
1310 case ARRAY_TYPE:
1312 /* Array types are the same if the element types are the same and
1313 the number of elements are the same. */
1314 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1315 visited, loc1, loc2))
1317 warn_odr (t1, t2, NULL, NULL, warn, warned,
1318 G_("a different type is defined in another "
1319 "translation unit"));
1320 if (warn && warned)
1321 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1323 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1324 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1325 == TYPE_NONALIASED_COMPONENT (t2));
1327 tree i1 = TYPE_DOMAIN (t1);
1328 tree i2 = TYPE_DOMAIN (t2);
1330 /* For an incomplete external array, the type domain can be
1331 NULL_TREE. Check this condition also. */
1332 if (i1 == NULL_TREE || i2 == NULL_TREE)
1333 return type_variants_equivalent_p (t1, t2);
1335 tree min1 = TYPE_MIN_VALUE (i1);
1336 tree min2 = TYPE_MIN_VALUE (i2);
1337 tree max1 = TYPE_MAX_VALUE (i1);
1338 tree max2 = TYPE_MAX_VALUE (i2);
1340 /* In C++, minimums should be always 0. */
1341 gcc_assert (min1 == min2);
1342 if (!operand_equal_p (max1, max2, 0))
1344 warn_odr (t1, t2, NULL, NULL, warn, warned,
1345 G_("an array of different size is defined "
1346 "in another translation unit"));
1347 return false;
1350 break;
1352 case METHOD_TYPE:
1353 case FUNCTION_TYPE:
1354 /* Function types are the same if the return type and arguments types
1355 are the same. */
1356 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1357 visited, loc1, loc2))
1359 warn_odr (t1, t2, NULL, NULL, warn, warned,
1360 G_("has different return value "
1361 "in another translation unit"));
1362 if (warn && warned)
1363 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1364 return false;
1367 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1368 || !prototype_p (t1) || !prototype_p (t2))
1369 return type_variants_equivalent_p (t1, t2);
1370 else
1372 tree parms1, parms2;
1374 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1375 parms1 && parms2;
1376 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1378 if (!odr_subtypes_equivalent_p
1379 (TREE_VALUE (parms1), TREE_VALUE (parms2),
1380 visited, loc1, loc2))
1382 warn_odr (t1, t2, NULL, NULL, warn, warned,
1383 G_("has different parameters in another "
1384 "translation unit"));
1385 if (warn && warned)
1386 warn_types_mismatch (TREE_VALUE (parms1),
1387 TREE_VALUE (parms2), loc1, loc2);
1388 return false;
1392 if (parms1 || parms2)
1394 warn_odr (t1, t2, NULL, NULL, warn, warned,
1395 G_("has different parameters "
1396 "in another translation unit"));
1397 return false;
1400 return type_variants_equivalent_p (t1, t2);
1403 case RECORD_TYPE:
1404 case UNION_TYPE:
1405 case QUAL_UNION_TYPE:
1407 tree f1, f2;
1409 /* For aggregate types, all the fields must be the same. */
1410 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1412 if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1413 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1414 != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1416 if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1417 warn_odr (t1, t2, NULL, NULL, warn, warned,
1418 G_("a type defined in another translation unit "
1419 "is not polymorphic"));
1420 else
1421 warn_odr (t1, t2, NULL, NULL, warn, warned,
1422 G_("a type defined in another translation unit "
1423 "is polymorphic"));
1424 return false;
1426 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1427 f1 || f2;
1428 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1430 /* Skip non-fields. */
1431 while (f1 && skip_in_fields_list_p (f1))
1432 f1 = TREE_CHAIN (f1);
1433 while (f2 && skip_in_fields_list_p (f2))
1434 f2 = TREE_CHAIN (f2);
1435 if (!f1 || !f2)
1436 break;
1437 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1439 warn_odr (t1, t2, NULL, NULL, warn, warned,
1440 G_("a type with different virtual table pointers"
1441 " is defined in another translation unit"));
1442 return false;
1444 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1446 warn_odr (t1, t2, NULL, NULL, warn, warned,
1447 G_("a type with different bases is defined "
1448 "in another translation unit"));
1449 return false;
1451 if (DECL_NAME (f1) != DECL_NAME (f2)
1452 && !DECL_ARTIFICIAL (f1))
1454 warn_odr (t1, t2, f1, f2, warn, warned,
1455 G_("a field with different name is defined "
1456 "in another translation unit"));
1457 return false;
1459 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1460 TREE_TYPE (f2),
1461 visited, loc1, loc2))
1463 /* Do not warn about artificial fields and just go into
1464 generic field mismatch warning. */
1465 if (DECL_ARTIFICIAL (f1))
1466 break;
1468 warn_odr (t1, t2, f1, f2, warn, warned,
1469 G_("a field of same name but different type "
1470 "is defined in another translation unit"));
1471 if (warn && warned)
1472 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
1473 return false;
1475 if (!gimple_compare_field_offset (f1, f2))
1477 /* Do not warn about artificial fields and just go into
1478 generic field mismatch warning. */
1479 if (DECL_ARTIFICIAL (f1))
1480 break;
1481 warn_odr (t1, t2, f1, f2, warn, warned,
1482 G_("fields have different layout "
1483 "in another translation unit"));
1484 return false;
1486 if (DECL_BIT_FIELD (f1) != DECL_BIT_FIELD (f2))
1488 warn_odr (t1, t2, f1, f2, warn, warned,
1489 G_("one field is a bitfield while the other "
1490 "is not"));
1491 return false;
1493 else
1494 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1495 == DECL_NONADDRESSABLE_P (f2));
1498 /* If one aggregate has more fields than the other, they
1499 are not the same. */
1500 if (f1 || f2)
1502 if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1503 warn_odr (t1, t2, NULL, NULL, warn, warned,
1504 G_("a type with different virtual table pointers"
1505 " is defined in another translation unit"));
1506 else if ((f1 && DECL_ARTIFICIAL (f1))
1507 || (f2 && DECL_ARTIFICIAL (f2)))
1508 warn_odr (t1, t2, NULL, NULL, warn, warned,
1509 G_("a type with different bases is defined "
1510 "in another translation unit"));
1511 else
1512 warn_odr (t1, t2, f1, f2, warn, warned,
1513 G_("a type with different number of fields "
1514 "is defined in another translation unit"));
1516 return false;
1519 break;
1521 case VOID_TYPE:
1522 case NULLPTR_TYPE:
1523 break;
1525 default:
1526 debug_tree (t1);
1527 gcc_unreachable ();
1530 /* Those are better to come last as they are utterly uninformative. */
1531 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1532 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1534 warn_odr (t1, t2, NULL, NULL, warn, warned,
1535 G_("a type with different size "
1536 "is defined in another translation unit"));
1537 return false;
1540 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1541 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1542 TYPE_SIZE_UNIT (t2), 0));
1543 return type_variants_equivalent_p (t1, t2);
1546 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1548 bool
1549 odr_types_equivalent_p (tree type1, tree type2)
1551 gcc_checking_assert (odr_or_derived_type_p (type1)
1552 && odr_or_derived_type_p (type2));
1554 hash_set<type_pair> visited;
1555 return odr_types_equivalent_p (type1, type2, false, NULL,
1556 &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1559 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1560 from VAL->type. This may happen in LTO where tree merging did not merge
1561 all variants of the same type or due to ODR violation.
1563 Analyze and report ODR violations and add type to duplicate list.
1564 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1565 this is first time we see definition of a class return true so the
1566 base types are analyzed. */
1568 static bool
1569 add_type_duplicate (odr_type val, tree type)
1571 bool build_bases = false;
1572 bool prevail = false;
1573 bool odr_must_violate = false;
1575 if (!val->types_set)
1576 val->types_set = new hash_set<tree>;
1578 /* Chose polymorphic type as leader (this happens only in case of ODR
1579 violations. */
1580 if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1581 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1582 && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1583 || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1585 prevail = true;
1586 build_bases = true;
1588 /* Always prefer complete type to be the leader. */
1589 else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1591 prevail = true;
1592 if (TREE_CODE (type) == RECORD_TYPE)
1593 build_bases = TYPE_BINFO (type);
1595 else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1597 else if (TREE_CODE (val->type) == ENUMERAL_TYPE
1598 && TREE_CODE (type) == ENUMERAL_TYPE
1599 && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
1600 prevail = true;
1601 else if (TREE_CODE (val->type) == RECORD_TYPE
1602 && TREE_CODE (type) == RECORD_TYPE
1603 && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1605 gcc_assert (!val->bases.length ());
1606 build_bases = true;
1607 prevail = true;
1610 if (prevail)
1611 std::swap (val->type, type);
1613 val->types_set->add (type);
1615 if (!odr_hash)
1616 return false;
1618 gcc_checking_assert (can_be_name_hashed_p (type)
1619 && can_be_name_hashed_p (val->type));
1621 bool merge = true;
1622 bool base_mismatch = false;
1623 unsigned int i;
1624 bool warned = false;
1625 hash_set<type_pair> visited;
1627 gcc_assert (in_lto_p);
1628 vec_safe_push (val->types, type);
1630 /* If both are class types, compare the bases. */
1631 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1632 && TREE_CODE (val->type) == RECORD_TYPE
1633 && TREE_CODE (type) == RECORD_TYPE
1634 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1636 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1637 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1639 if (!flag_ltrans && !warned && !val->odr_violated)
1641 tree extra_base;
1642 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1643 "a type with the same name but different "
1644 "number of polymorphic bases is "
1645 "defined in another translation unit");
1646 if (warned)
1648 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1649 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1650 extra_base = BINFO_BASE_BINFO
1651 (TYPE_BINFO (type),
1652 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1653 else
1654 extra_base = BINFO_BASE_BINFO
1655 (TYPE_BINFO (val->type),
1656 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1657 tree extra_base_type = BINFO_TYPE (extra_base);
1658 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1659 "the extra base is defined here");
1662 base_mismatch = true;
1664 else
1665 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1667 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1668 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1669 tree type1 = BINFO_TYPE (base1);
1670 tree type2 = BINFO_TYPE (base2);
1672 if (types_odr_comparable (type1, type2))
1674 if (!types_same_for_odr (type1, type2))
1675 base_mismatch = true;
1677 else
1678 if (!odr_types_equivalent_p (type1, type2))
1679 base_mismatch = true;
1680 if (base_mismatch)
1682 if (!warned && !val->odr_violated)
1684 warn_odr (type, val->type, NULL, NULL,
1685 !warned, &warned,
1686 "a type with the same name but different base "
1687 "type is defined in another translation unit");
1688 if (warned)
1689 warn_types_mismatch (type1, type2,
1690 UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1692 break;
1694 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1696 base_mismatch = true;
1697 if (!warned && !val->odr_violated)
1698 warn_odr (type, val->type, NULL, NULL,
1699 !warned, &warned,
1700 "a type with the same name but different base "
1701 "layout is defined in another translation unit");
1702 break;
1704 /* One of bases is not of complete type. */
1705 if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1707 /* If we have a polymorphic type info specified for TYPE1
1708 but not for TYPE2 we possibly missed a base when recording
1709 VAL->type earlier.
1710 Be sure this does not happen. */
1711 if (TYPE_BINFO (type1)
1712 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1713 && !build_bases)
1714 odr_must_violate = true;
1715 break;
1717 /* One base is polymorphic and the other not.
1718 This ought to be diagnosed earlier, but do not ICE in the
1719 checking bellow. */
1720 else if (TYPE_BINFO (type1)
1721 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1722 != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1724 if (!warned && !val->odr_violated)
1725 warn_odr (type, val->type, NULL, NULL,
1726 !warned, &warned,
1727 "a base of the type is polymorphic only in one "
1728 "translation unit");
1729 base_mismatch = true;
1730 break;
1733 if (base_mismatch)
1735 merge = false;
1736 odr_violation_reported = true;
1737 val->odr_violated = true;
1739 if (symtab->dump_file)
1741 fprintf (symtab->dump_file, "ODR base violation\n");
1743 print_node (symtab->dump_file, "", val->type, 0);
1744 putc ('\n',symtab->dump_file);
1745 print_node (symtab->dump_file, "", type, 0);
1746 putc ('\n',symtab->dump_file);
1751 /* Next compare memory layout.
1752 The DECL_SOURCE_LOCATIONs in this invocation came from LTO streaming.
1753 We must apply the location cache to ensure that they are valid
1754 before we can pass them to odr_types_equivalent_p (PR lto/83121). */
1755 if (lto_location_cache::current_cache)
1756 lto_location_cache::current_cache->apply_location_cache ();
1757 /* As a special case we stream mangles names of integer types so we can see
1758 if they are believed to be same even though they have different
1759 representation. Avoid bogus warning on mismatches in these. */
1760 if (TREE_CODE (type) != INTEGER_TYPE
1761 && TREE_CODE (val->type) != INTEGER_TYPE
1762 && !odr_types_equivalent_p (val->type, type,
1763 !flag_ltrans && !val->odr_violated && !warned,
1764 &warned, &visited,
1765 DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
1766 DECL_SOURCE_LOCATION (TYPE_NAME (type))))
1768 merge = false;
1769 odr_violation_reported = true;
1770 val->odr_violated = true;
1772 gcc_assert (val->odr_violated || !odr_must_violate);
1773 /* Sanity check that all bases will be build same way again. */
1774 if (flag_checking
1775 && COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1776 && TREE_CODE (val->type) == RECORD_TYPE
1777 && TREE_CODE (type) == RECORD_TYPE
1778 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1779 && !val->odr_violated
1780 && !base_mismatch && val->bases.length ())
1782 unsigned int num_poly_bases = 0;
1783 unsigned int j;
1785 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1786 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1787 (TYPE_BINFO (type), i)))
1788 num_poly_bases++;
1789 gcc_assert (num_poly_bases == val->bases.length ());
1790 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1791 i++)
1792 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1793 (TYPE_BINFO (type), i)))
1795 odr_type base = get_odr_type
1796 (BINFO_TYPE
1797 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1798 i)),
1799 true);
1800 gcc_assert (val->bases[j] == base);
1801 j++;
1806 /* Regularize things a little. During LTO same types may come with
1807 different BINFOs. Either because their virtual table was
1808 not merged by tree merging and only later at decl merging or
1809 because one type comes with external vtable, while other
1810 with internal. We want to merge equivalent binfos to conserve
1811 memory and streaming overhead.
1813 The external vtables are more harmful: they contain references
1814 to external declarations of methods that may be defined in the
1815 merged LTO unit. For this reason we absolutely need to remove
1816 them and replace by internal variants. Not doing so will lead
1817 to incomplete answers from possible_polymorphic_call_targets.
1819 FIXME: disable for now; because ODR types are now build during
1820 streaming in, the variants do not need to be linked to the type,
1821 yet. We need to do the merging in cleanup pass to be implemented
1822 soon. */
1823 if (!flag_ltrans && merge
1824 && 0
1825 && TREE_CODE (val->type) == RECORD_TYPE
1826 && TREE_CODE (type) == RECORD_TYPE
1827 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1828 && TYPE_MAIN_VARIANT (type) == type
1829 && TYPE_MAIN_VARIANT (val->type) == val->type
1830 && BINFO_VTABLE (TYPE_BINFO (val->type))
1831 && BINFO_VTABLE (TYPE_BINFO (type)))
1833 tree master_binfo = TYPE_BINFO (val->type);
1834 tree v1 = BINFO_VTABLE (master_binfo);
1835 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1837 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1839 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1840 && operand_equal_p (TREE_OPERAND (v1, 1),
1841 TREE_OPERAND (v2, 1), 0));
1842 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1843 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1845 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1846 == DECL_ASSEMBLER_NAME (v2));
1848 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1850 unsigned int i;
1852 set_type_binfo (val->type, TYPE_BINFO (type));
1853 for (i = 0; i < val->types->length (); i++)
1855 if (TYPE_BINFO ((*val->types)[i])
1856 == master_binfo)
1857 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1859 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1861 else
1862 set_type_binfo (type, master_binfo);
1864 return build_bases;
1867 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
1869 tree
1870 obj_type_ref_class (const_tree ref)
1872 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
1873 ref = TREE_TYPE (ref);
1874 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
1875 ref = TREE_TYPE (ref);
1876 /* We look for type THIS points to. ObjC also builds
1877 OBJ_TYPE_REF with non-method calls, Their first parameter
1878 ID however also corresponds to class type. */
1879 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
1880 || TREE_CODE (ref) == FUNCTION_TYPE);
1881 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
1882 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
1883 tree ret = TREE_TYPE (ref);
1884 if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (ret))
1885 ret = TYPE_CANONICAL (ret);
1886 else
1887 ret = get_odr_type (ret)->type;
1888 return ret;
1891 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1892 possibly new entry. */
1894 odr_type
1895 get_odr_type (tree type, bool insert)
1897 odr_type_d **slot = NULL;
1898 odr_type val = NULL;
1899 hashval_t hash;
1900 bool build_bases = false;
1901 bool insert_to_odr_array = false;
1902 int base_id = -1;
1904 type = TYPE_MAIN_VARIANT (type);
1905 if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (type))
1906 type = TYPE_CANONICAL (type);
1908 gcc_checking_assert (can_be_name_hashed_p (type));
1910 hash = hash_odr_name (type);
1911 slot = odr_hash->find_slot_with_hash (type, hash,
1912 insert ? INSERT : NO_INSERT);
1914 if (!slot)
1915 return NULL;
1917 /* See if we already have entry for type. */
1918 if (*slot)
1920 val = *slot;
1922 if (val->type != type && insert
1923 && (!val->types_set || !val->types_set->add (type)))
1924 build_bases = add_type_duplicate (val, type);
1926 else
1928 val = ggc_cleared_alloc<odr_type_d> ();
1929 val->type = type;
1930 val->bases = vNULL;
1931 val->derived_types = vNULL;
1932 if (type_with_linkage_p (type))
1933 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1934 else
1935 val->anonymous_namespace = 0;
1936 build_bases = COMPLETE_TYPE_P (val->type);
1937 insert_to_odr_array = true;
1938 *slot = val;
1941 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1942 && type_with_linkage_p (type)
1943 && type == TYPE_MAIN_VARIANT (type))
1945 tree binfo = TYPE_BINFO (type);
1946 unsigned int i;
1948 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
1950 val->all_derivations_known = type_all_derivations_known_p (type);
1951 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1952 /* For now record only polymorphic types. other are
1953 pointless for devirtualization and we cannot precisely
1954 determine ODR equivalency of these during LTO. */
1955 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1957 tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
1958 odr_type base = get_odr_type (base_type, true);
1959 gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
1960 base->derived_types.safe_push (val);
1961 val->bases.safe_push (base);
1962 if (base->id > base_id)
1963 base_id = base->id;
1966 /* Ensure that type always appears after bases. */
1967 if (insert_to_odr_array)
1969 if (odr_types_ptr)
1970 val->id = odr_types.length ();
1971 vec_safe_push (odr_types_ptr, val);
1973 else if (base_id > val->id)
1975 odr_types[val->id] = 0;
1976 /* Be sure we did not recorded any derived types; these may need
1977 renumbering too. */
1978 gcc_assert (val->derived_types.length() == 0);
1979 val->id = odr_types.length ();
1980 vec_safe_push (odr_types_ptr, val);
1982 return val;
1985 /* Return type that in ODR type hash prevailed TYPE. Be careful and punt
1986 on ODR violations. */
1988 tree
1989 prevailing_odr_type (tree type)
1991 odr_type t = get_odr_type (type, false);
1992 if (!t || t->odr_violated)
1993 return type;
1994 return t->type;
1997 /* Set tbaa_enabled flag for TYPE. */
1999 void
2000 enable_odr_based_tbaa (tree type)
2002 odr_type t = get_odr_type (type, true);
2003 t->tbaa_enabled = true;
2006 /* True if canonical type of TYPE is determined using ODR name. */
2008 bool
2009 odr_based_tbaa_p (const_tree type)
2011 if (!RECORD_OR_UNION_TYPE_P (type))
2012 return false;
2013 odr_type t = get_odr_type (const_cast <tree> (type), false);
2014 if (!t || !t->tbaa_enabled)
2015 return false;
2016 return true;
2019 /* Set TYPE_CANONICAL of type and all its variants and duplicates
2020 to CANONICAL. */
2022 void
2023 set_type_canonical_for_odr_type (tree type, tree canonical)
2025 odr_type t = get_odr_type (type, false);
2026 unsigned int i;
2027 tree tt;
2029 for (tree t2 = t->type; t2; t2 = TYPE_NEXT_VARIANT (t2))
2030 TYPE_CANONICAL (t2) = canonical;
2031 if (t->types)
2032 FOR_EACH_VEC_ELT (*t->types, i, tt)
2033 for (tree t2 = tt; t2; t2 = TYPE_NEXT_VARIANT (t2))
2034 TYPE_CANONICAL (t2) = canonical;
2037 /* Return true if we reported some ODR violation on TYPE. */
2039 bool
2040 odr_type_violation_reported_p (tree type)
2042 return get_odr_type (type, false)->odr_violated;
2045 /* Add TYPE od ODR type hash. */
2047 void
2048 register_odr_type (tree type)
2050 if (!odr_hash)
2051 odr_hash = new odr_hash_type (23);
2052 if (type == TYPE_MAIN_VARIANT (type))
2054 /* To get ODR warings right, first register all sub-types. */
2055 if (RECORD_OR_UNION_TYPE_P (type)
2056 && COMPLETE_TYPE_P (type))
2058 /* Limit recursion on types which are already registered. */
2059 odr_type ot = get_odr_type (type, false);
2060 if (ot
2061 && (ot->type == type
2062 || (ot->types_set
2063 && ot->types_set->contains (type))))
2064 return;
2065 for (tree f = TYPE_FIELDS (type); f; f = TREE_CHAIN (f))
2066 if (TREE_CODE (f) == FIELD_DECL)
2068 tree subtype = TREE_TYPE (f);
2070 while (TREE_CODE (subtype) == ARRAY_TYPE)
2071 subtype = TREE_TYPE (subtype);
2072 if (type_with_linkage_p (TYPE_MAIN_VARIANT (subtype)))
2073 register_odr_type (TYPE_MAIN_VARIANT (subtype));
2075 if (TYPE_BINFO (type))
2076 for (unsigned int i = 0;
2077 i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
2078 register_odr_type (BINFO_TYPE (BINFO_BASE_BINFO
2079 (TYPE_BINFO (type), i)));
2081 get_odr_type (type, true);
2085 /* Return true if type is known to have no derivations. */
2087 bool
2088 type_known_to_have_no_derivations_p (tree t)
2090 return (type_all_derivations_known_p (t)
2091 && (TYPE_FINAL_P (t)
2092 || (odr_hash
2093 && !get_odr_type (t, true)->derived_types.length())));
2096 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2097 recursive printing. */
2099 static void
2100 dump_odr_type (FILE *f, odr_type t, int indent=0)
2102 unsigned int i;
2103 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2104 print_generic_expr (f, t->type, TDF_SLIM);
2105 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2106 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2107 if (TYPE_NAME (t->type))
2109 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2110 fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2111 IDENTIFIER_POINTER
2112 (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2114 if (t->bases.length ())
2116 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2117 for (i = 0; i < t->bases.length (); i++)
2118 fprintf (f, " %i", t->bases[i]->id);
2119 fprintf (f, "\n");
2121 if (t->derived_types.length ())
2123 fprintf (f, "%*s derived types:\n", indent * 2, "");
2124 for (i = 0; i < t->derived_types.length (); i++)
2125 dump_odr_type (f, t->derived_types[i], indent + 1);
2127 fprintf (f, "\n");
2130 /* Dump the type inheritance graph. */
2132 static void
2133 dump_type_inheritance_graph (FILE *f)
2135 unsigned int i;
2136 unsigned int num_all_types = 0, num_types = 0, num_duplicates = 0;
2137 if (!odr_types_ptr)
2138 return;
2139 fprintf (f, "\n\nType inheritance graph:\n");
2140 for (i = 0; i < odr_types.length (); i++)
2142 if (odr_types[i] && odr_types[i]->bases.length () == 0)
2143 dump_odr_type (f, odr_types[i]);
2145 for (i = 0; i < odr_types.length (); i++)
2147 if (!odr_types[i])
2148 continue;
2150 num_all_types++;
2151 if (!odr_types[i]->types || !odr_types[i]->types->length ())
2152 continue;
2154 /* To aid ODR warnings we also mangle integer constants but do
2155 not consinder duplicates there. */
2156 if (TREE_CODE (odr_types[i]->type) == INTEGER_TYPE)
2157 continue;
2159 /* It is normal to have one duplicate and one normal variant. */
2160 if (odr_types[i]->types->length () == 1
2161 && COMPLETE_TYPE_P (odr_types[i]->type)
2162 && !COMPLETE_TYPE_P ((*odr_types[i]->types)[0]))
2163 continue;
2165 num_types ++;
2167 unsigned int j;
2168 fprintf (f, "Duplicate tree types for odr type %i\n", i);
2169 print_node (f, "", odr_types[i]->type, 0);
2170 print_node (f, "", TYPE_NAME (odr_types[i]->type), 0);
2171 putc ('\n',f);
2172 for (j = 0; j < odr_types[i]->types->length (); j++)
2174 tree t;
2175 num_duplicates ++;
2176 fprintf (f, "duplicate #%i\n", j);
2177 print_node (f, "", (*odr_types[i]->types)[j], 0);
2178 t = (*odr_types[i]->types)[j];
2179 while (TYPE_P (t) && TYPE_CONTEXT (t))
2181 t = TYPE_CONTEXT (t);
2182 print_node (f, "", t, 0);
2184 print_node (f, "", TYPE_NAME ((*odr_types[i]->types)[j]), 0);
2185 putc ('\n',f);
2188 fprintf (f, "Out of %i types there are %i types with duplicates; "
2189 "%i duplicates overall\n", num_all_types, num_types, num_duplicates);
2192 /* Save some WPA->ltrans streaming by freeing stuff needed only for good
2193 ODR warnings.
2194 We free TYPE_VALUES of enums and also make TYPE_DECLs to not point back
2195 to the type (which is needed to keep them in the same SCC and preserve
2196 location information to output warnings) and subsequently we make all
2197 TYPE_DECLS of same assembler name equivalent. */
2199 static void
2200 free_odr_warning_data ()
2202 static bool odr_data_freed = false;
2204 if (odr_data_freed || !flag_wpa || !odr_types_ptr)
2205 return;
2207 odr_data_freed = true;
2209 for (unsigned int i = 0; i < odr_types.length (); i++)
2210 if (odr_types[i])
2212 tree t = odr_types[i]->type;
2214 if (TREE_CODE (t) == ENUMERAL_TYPE)
2215 TYPE_VALUES (t) = NULL;
2216 TREE_TYPE (TYPE_NAME (t)) = void_type_node;
2218 if (odr_types[i]->types)
2219 for (unsigned int j = 0; j < odr_types[i]->types->length (); j++)
2221 tree td = (*odr_types[i]->types)[j];
2223 if (TREE_CODE (td) == ENUMERAL_TYPE)
2224 TYPE_VALUES (td) = NULL;
2225 TYPE_NAME (td) = TYPE_NAME (t);
2228 odr_data_freed = true;
2231 /* Initialize IPA devirt and build inheritance tree graph. */
2233 void
2234 build_type_inheritance_graph (void)
2236 struct symtab_node *n;
2237 FILE *inheritance_dump_file;
2238 dump_flags_t flags;
2240 if (odr_hash)
2242 free_odr_warning_data ();
2243 return;
2245 timevar_push (TV_IPA_INHERITANCE);
2246 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2247 odr_hash = new odr_hash_type (23);
2249 /* We reconstruct the graph starting of types of all methods seen in the
2250 unit. */
2251 FOR_EACH_SYMBOL (n)
2252 if (is_a <cgraph_node *> (n)
2253 && DECL_VIRTUAL_P (n->decl)
2254 && n->real_symbol_p ())
2255 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2257 /* Look also for virtual tables of types that do not define any methods.
2259 We need it in a case where class B has virtual base of class A
2260 re-defining its virtual method and there is class C with no virtual
2261 methods with B as virtual base.
2263 Here we output B's virtual method in two variant - for non-virtual
2264 and virtual inheritance. B's virtual table has non-virtual version,
2265 while C's has virtual.
2267 For this reason we need to know about C in order to include both
2268 variants of B. More correctly, record_target_from_binfo should
2269 add both variants of the method when walking B, but we have no
2270 link in between them.
2272 We rely on fact that either the method is exported and thus we
2273 assume it is called externally or C is in anonymous namespace and
2274 thus we will see the vtable. */
2276 else if (is_a <varpool_node *> (n)
2277 && DECL_VIRTUAL_P (n->decl)
2278 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2279 && TYPE_BINFO (DECL_CONTEXT (n->decl))
2280 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2281 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2282 if (inheritance_dump_file)
2284 dump_type_inheritance_graph (inheritance_dump_file);
2285 dump_end (TDI_inheritance, inheritance_dump_file);
2287 free_odr_warning_data ();
2288 timevar_pop (TV_IPA_INHERITANCE);
2291 /* Return true if N has reference from live virtual table
2292 (and thus can be a destination of polymorphic call).
2293 Be conservatively correct when callgraph is not built or
2294 if the method may be referred externally. */
2296 static bool
2297 referenced_from_vtable_p (struct cgraph_node *node)
2299 int i;
2300 struct ipa_ref *ref;
2301 bool found = false;
2303 if (node->externally_visible
2304 || DECL_EXTERNAL (node->decl)
2305 || node->used_from_other_partition)
2306 return true;
2308 /* Keep this test constant time.
2309 It is unlikely this can happen except for the case where speculative
2310 devirtualization introduced many speculative edges to this node.
2311 In this case the target is very likely alive anyway. */
2312 if (node->ref_list.referring.length () > 100)
2313 return true;
2315 /* We need references built. */
2316 if (symtab->state <= CONSTRUCTION)
2317 return true;
2319 for (i = 0; node->iterate_referring (i, ref); i++)
2320 if ((ref->use == IPA_REF_ALIAS
2321 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2322 || (ref->use == IPA_REF_ADDR
2323 && VAR_P (ref->referring->decl)
2324 && DECL_VIRTUAL_P (ref->referring->decl)))
2326 found = true;
2327 break;
2329 return found;
2332 /* Return if TARGET is cxa_pure_virtual. */
2334 static bool
2335 is_cxa_pure_virtual_p (tree target)
2337 return target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE
2338 && DECL_NAME (target)
2339 && id_equal (DECL_NAME (target),
2340 "__cxa_pure_virtual");
2343 /* If TARGET has associated node, record it in the NODES array.
2344 CAN_REFER specify if program can refer to the target directly.
2345 if TARGET is unknown (NULL) or it cannot be inserted (for example because
2346 its body was already removed and there is no way to refer to it), clear
2347 COMPLETEP. */
2349 static void
2350 maybe_record_node (vec <cgraph_node *> &nodes,
2351 tree target, hash_set<tree> *inserted,
2352 bool can_refer,
2353 bool *completep)
2355 struct cgraph_node *target_node, *alias_target;
2356 enum availability avail;
2357 bool pure_virtual = is_cxa_pure_virtual_p (target);
2359 /* __builtin_unreachable do not need to be added into
2360 list of targets; the runtime effect of calling them is undefined.
2361 Only "real" virtual methods should be accounted. */
2362 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE && !pure_virtual)
2363 return;
2365 if (!can_refer)
2367 /* The only case when method of anonymous namespace becomes unreferable
2368 is when we completely optimized it out. */
2369 if (flag_ltrans
2370 || !target
2371 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2372 *completep = false;
2373 return;
2376 if (!target)
2377 return;
2379 target_node = cgraph_node::get (target);
2381 /* Prefer alias target over aliases, so we do not get confused by
2382 fake duplicates. */
2383 if (target_node)
2385 alias_target = target_node->ultimate_alias_target (&avail);
2386 if (target_node != alias_target
2387 && avail >= AVAIL_AVAILABLE
2388 && target_node->get_availability ())
2389 target_node = alias_target;
2392 /* Method can only be called by polymorphic call if any
2393 of vtables referring to it are alive.
2395 While this holds for non-anonymous functions, too, there are
2396 cases where we want to keep them in the list; for example
2397 inline functions with -fno-weak are static, but we still
2398 may devirtualize them when instance comes from other unit.
2399 The same holds for LTO.
2401 Currently we ignore these functions in speculative devirtualization.
2402 ??? Maybe it would make sense to be more aggressive for LTO even
2403 elsewhere. */
2404 if (!flag_ltrans
2405 && !pure_virtual
2406 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2407 && (!target_node
2408 || !referenced_from_vtable_p (target_node)))
2410 /* See if TARGET is useful function we can deal with. */
2411 else if (target_node != NULL
2412 && (TREE_PUBLIC (target)
2413 || DECL_EXTERNAL (target)
2414 || target_node->definition)
2415 && target_node->real_symbol_p ())
2417 gcc_assert (!target_node->global.inlined_to);
2418 gcc_assert (target_node->real_symbol_p ());
2419 /* When sanitizing, do not assume that __cxa_pure_virtual is not called
2420 by valid program. */
2421 if (flag_sanitize & SANITIZE_UNREACHABLE)
2423 /* Only add pure virtual if it is the only possible target. This way
2424 we will preserve the diagnostics about pure virtual called in many
2425 cases without disabling optimization in other. */
2426 else if (pure_virtual)
2428 if (nodes.length ())
2429 return;
2431 /* If we found a real target, take away cxa_pure_virtual. */
2432 else if (!pure_virtual && nodes.length () == 1
2433 && is_cxa_pure_virtual_p (nodes[0]->decl))
2434 nodes.pop ();
2435 if (pure_virtual && nodes.length ())
2436 return;
2437 if (!inserted->add (target))
2439 cached_polymorphic_call_targets->add (target_node);
2440 nodes.safe_push (target_node);
2443 else if (!completep)
2445 /* We have definition of __cxa_pure_virtual that is not accessible (it is
2446 optimized out or partitioned to other unit) so we cannot add it. When
2447 not sanitizing, there is nothing to do.
2448 Otherwise declare the list incomplete. */
2449 else if (pure_virtual)
2451 if (flag_sanitize & SANITIZE_UNREACHABLE)
2452 *completep = false;
2454 else if (flag_ltrans
2455 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2456 *completep = false;
2459 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2460 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2461 method in vtable and insert method to NODES array
2462 or BASES_TO_CONSIDER if this array is non-NULL.
2463 Otherwise recurse to base BINFOs.
2464 This matches what get_binfo_at_offset does, but with offset
2465 being unknown.
2467 TYPE_BINFOS is a stack of BINFOS of types with defined
2468 virtual table seen on way from class type to BINFO.
2470 MATCHED_VTABLES tracks virtual tables we already did lookup
2471 for virtual function in. INSERTED tracks nodes we already
2472 inserted.
2474 ANONYMOUS is true if BINFO is part of anonymous namespace.
2476 Clear COMPLETEP when we hit unreferable target.
2479 static void
2480 record_target_from_binfo (vec <cgraph_node *> &nodes,
2481 vec <tree> *bases_to_consider,
2482 tree binfo,
2483 tree otr_type,
2484 vec <tree> &type_binfos,
2485 HOST_WIDE_INT otr_token,
2486 tree outer_type,
2487 HOST_WIDE_INT offset,
2488 hash_set<tree> *inserted,
2489 hash_set<tree> *matched_vtables,
2490 bool anonymous,
2491 bool *completep)
2493 tree type = BINFO_TYPE (binfo);
2494 int i;
2495 tree base_binfo;
2498 if (BINFO_VTABLE (binfo))
2499 type_binfos.safe_push (binfo);
2500 if (types_same_for_odr (type, outer_type))
2502 int i;
2503 tree type_binfo = NULL;
2505 /* Look up BINFO with virtual table. For normal types it is always last
2506 binfo on stack. */
2507 for (i = type_binfos.length () - 1; i >= 0; i--)
2508 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2510 type_binfo = type_binfos[i];
2511 break;
2513 if (BINFO_VTABLE (binfo))
2514 type_binfos.pop ();
2515 /* If this is duplicated BINFO for base shared by virtual inheritance,
2516 we may not have its associated vtable. This is not a problem, since
2517 we will walk it on the other path. */
2518 if (!type_binfo)
2519 return;
2520 tree inner_binfo = get_binfo_at_offset (type_binfo,
2521 offset, otr_type);
2522 if (!inner_binfo)
2524 gcc_assert (odr_violation_reported);
2525 return;
2527 /* For types in anonymous namespace first check if the respective vtable
2528 is alive. If not, we know the type can't be called. */
2529 if (!flag_ltrans && anonymous)
2531 tree vtable = BINFO_VTABLE (inner_binfo);
2532 varpool_node *vnode;
2534 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2535 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2536 vnode = varpool_node::get (vtable);
2537 if (!vnode || !vnode->definition)
2538 return;
2540 gcc_assert (inner_binfo);
2541 if (bases_to_consider
2542 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2543 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2545 bool can_refer;
2546 tree target = gimple_get_virt_method_for_binfo (otr_token,
2547 inner_binfo,
2548 &can_refer);
2549 if (!bases_to_consider)
2550 maybe_record_node (nodes, target, inserted, can_refer, completep);
2551 /* Destructors are never called via construction vtables. */
2552 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2553 bases_to_consider->safe_push (target);
2555 return;
2558 /* Walk bases. */
2559 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2560 /* Walking bases that have no virtual method is pointless exercise. */
2561 if (polymorphic_type_binfo_p (base_binfo))
2562 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2563 type_binfos,
2564 otr_token, outer_type, offset, inserted,
2565 matched_vtables, anonymous, completep);
2566 if (BINFO_VTABLE (binfo))
2567 type_binfos.pop ();
2570 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2571 of TYPE, insert them to NODES, recurse into derived nodes.
2572 INSERTED is used to avoid duplicate insertions of methods into NODES.
2573 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2574 Clear COMPLETEP if unreferable target is found.
2576 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2577 all cases where BASE_SKIPPED is true (because the base is abstract
2578 class). */
2580 static void
2581 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2582 hash_set<tree> *inserted,
2583 hash_set<tree> *matched_vtables,
2584 tree otr_type,
2585 odr_type type,
2586 HOST_WIDE_INT otr_token,
2587 tree outer_type,
2588 HOST_WIDE_INT offset,
2589 bool *completep,
2590 vec <tree> &bases_to_consider,
2591 bool consider_construction)
2593 tree binfo = TYPE_BINFO (type->type);
2594 unsigned int i;
2595 auto_vec <tree, 8> type_binfos;
2596 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2598 /* We may need to consider types w/o instances because of possible derived
2599 types using their methods either directly or via construction vtables.
2600 We are safe to skip them when all derivations are known, since we will
2601 handle them later.
2602 This is done by recording them to BASES_TO_CONSIDER array. */
2603 if (possibly_instantiated || consider_construction)
2605 record_target_from_binfo (nodes,
2606 (!possibly_instantiated
2607 && type_all_derivations_known_p (type->type))
2608 ? &bases_to_consider : NULL,
2609 binfo, otr_type, type_binfos, otr_token,
2610 outer_type, offset,
2611 inserted, matched_vtables,
2612 type->anonymous_namespace, completep);
2614 for (i = 0; i < type->derived_types.length (); i++)
2615 possible_polymorphic_call_targets_1 (nodes, inserted,
2616 matched_vtables,
2617 otr_type,
2618 type->derived_types[i],
2619 otr_token, outer_type, offset, completep,
2620 bases_to_consider, consider_construction);
2623 /* Cache of queries for polymorphic call targets.
2625 Enumerating all call targets may get expensive when there are many
2626 polymorphic calls in the program, so we memoize all the previous
2627 queries and avoid duplicated work. */
2629 class polymorphic_call_target_d
2631 public:
2632 HOST_WIDE_INT otr_token;
2633 ipa_polymorphic_call_context context;
2634 odr_type type;
2635 vec <cgraph_node *> targets;
2636 tree decl_warning;
2637 int type_warning;
2638 unsigned int n_odr_types;
2639 bool complete;
2640 bool speculative;
2643 /* Polymorphic call target cache helpers. */
2645 struct polymorphic_call_target_hasher
2646 : pointer_hash <polymorphic_call_target_d>
2648 static inline hashval_t hash (const polymorphic_call_target_d *);
2649 static inline bool equal (const polymorphic_call_target_d *,
2650 const polymorphic_call_target_d *);
2651 static inline void remove (polymorphic_call_target_d *);
2654 /* Return the computed hashcode for ODR_QUERY. */
2656 inline hashval_t
2657 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2659 inchash::hash hstate (odr_query->otr_token);
2661 hstate.add_hwi (odr_query->type->id);
2662 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2663 hstate.add_hwi (odr_query->context.offset);
2664 hstate.add_hwi (odr_query->n_odr_types);
2666 if (odr_query->context.speculative_outer_type)
2668 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2669 hstate.add_hwi (odr_query->context.speculative_offset);
2671 hstate.add_flag (odr_query->speculative);
2672 hstate.add_flag (odr_query->context.maybe_in_construction);
2673 hstate.add_flag (odr_query->context.maybe_derived_type);
2674 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2675 hstate.commit_flag ();
2676 return hstate.end ();
2679 /* Compare cache entries T1 and T2. */
2681 inline bool
2682 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2683 const polymorphic_call_target_d *t2)
2685 return (t1->type == t2->type && t1->otr_token == t2->otr_token
2686 && t1->speculative == t2->speculative
2687 && t1->context.offset == t2->context.offset
2688 && t1->context.speculative_offset == t2->context.speculative_offset
2689 && t1->context.outer_type == t2->context.outer_type
2690 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2691 && t1->context.maybe_in_construction
2692 == t2->context.maybe_in_construction
2693 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2694 && (t1->context.speculative_maybe_derived_type
2695 == t2->context.speculative_maybe_derived_type)
2696 /* Adding new type may affect outcome of target search. */
2697 && t1->n_odr_types == t2->n_odr_types);
2700 /* Remove entry in polymorphic call target cache hash. */
2702 inline void
2703 polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2705 v->targets.release ();
2706 free (v);
2709 /* Polymorphic call target query cache. */
2711 typedef hash_table<polymorphic_call_target_hasher>
2712 polymorphic_call_target_hash_type;
2713 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2715 /* Destroy polymorphic call target query cache. */
2717 static void
2718 free_polymorphic_call_targets_hash ()
2720 if (cached_polymorphic_call_targets)
2722 delete polymorphic_call_target_hash;
2723 polymorphic_call_target_hash = NULL;
2724 delete cached_polymorphic_call_targets;
2725 cached_polymorphic_call_targets = NULL;
2729 /* Force rebuilding type inheritance graph from scratch.
2730 This is use to make sure that we do not keep references to types
2731 which was not visible to free_lang_data. */
2733 void
2734 rebuild_type_inheritance_graph ()
2736 if (!odr_hash)
2737 return;
2738 delete odr_hash;
2739 odr_hash = NULL;
2740 odr_types_ptr = NULL;
2741 free_polymorphic_call_targets_hash ();
2744 /* When virtual function is removed, we may need to flush the cache. */
2746 static void
2747 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2749 if (cached_polymorphic_call_targets
2750 && cached_polymorphic_call_targets->contains (n))
2751 free_polymorphic_call_targets_hash ();
2754 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2756 tree
2757 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2758 tree vtable)
2760 tree v = BINFO_VTABLE (binfo);
2761 int i;
2762 tree base_binfo;
2763 unsigned HOST_WIDE_INT this_offset;
2765 if (v)
2767 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2768 gcc_unreachable ();
2770 if (offset == this_offset
2771 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2772 return binfo;
2775 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2776 if (polymorphic_type_binfo_p (base_binfo))
2778 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2779 if (base_binfo)
2780 return base_binfo;
2782 return NULL;
2785 /* T is known constant value of virtual table pointer.
2786 Store virtual table to V and its offset to OFFSET.
2787 Return false if T does not look like virtual table reference. */
2789 bool
2790 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2791 unsigned HOST_WIDE_INT *offset)
2793 /* We expect &MEM[(void *)&virtual_table + 16B].
2794 We obtain object's BINFO from the context of the virtual table.
2795 This one contains pointer to virtual table represented via
2796 POINTER_PLUS_EXPR. Verify that this pointer matches what
2797 we propagated through.
2799 In the case of virtual inheritance, the virtual tables may
2800 be nested, i.e. the offset may be different from 16 and we may
2801 need to dive into the type representation. */
2802 if (TREE_CODE (t) == ADDR_EXPR
2803 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2804 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2805 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2806 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2807 == VAR_DECL)
2808 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2809 (TREE_OPERAND (t, 0), 0), 0)))
2811 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2812 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2813 return true;
2816 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2817 We need to handle it when T comes from static variable initializer or
2818 BINFO. */
2819 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2821 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2822 t = TREE_OPERAND (t, 0);
2824 else
2825 *offset = 0;
2827 if (TREE_CODE (t) != ADDR_EXPR)
2828 return false;
2829 *v = TREE_OPERAND (t, 0);
2830 return true;
2833 /* T is known constant value of virtual table pointer. Return BINFO of the
2834 instance type. */
2836 tree
2837 vtable_pointer_value_to_binfo (const_tree t)
2839 tree vtable;
2840 unsigned HOST_WIDE_INT offset;
2842 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2843 return NULL_TREE;
2845 /* FIXME: for stores of construction vtables we return NULL,
2846 because we do not have BINFO for those. Eventually we should fix
2847 our representation to allow this case to be handled, too.
2848 In the case we see store of BINFO we however may assume
2849 that standard folding will be able to cope with it. */
2850 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2851 offset, vtable);
2854 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2855 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2856 and insert them in NODES.
2858 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2860 static void
2861 record_targets_from_bases (tree otr_type,
2862 HOST_WIDE_INT otr_token,
2863 tree outer_type,
2864 HOST_WIDE_INT offset,
2865 vec <cgraph_node *> &nodes,
2866 hash_set<tree> *inserted,
2867 hash_set<tree> *matched_vtables,
2868 bool *completep)
2870 while (true)
2872 HOST_WIDE_INT pos, size;
2873 tree base_binfo;
2874 tree fld;
2876 if (types_same_for_odr (outer_type, otr_type))
2877 return;
2879 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2881 if (TREE_CODE (fld) != FIELD_DECL)
2882 continue;
2884 pos = int_bit_position (fld);
2885 size = tree_to_shwi (DECL_SIZE (fld));
2886 if (pos <= offset && (pos + size) > offset
2887 /* Do not get confused by zero sized bases. */
2888 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2889 break;
2891 /* Within a class type we should always find corresponding fields. */
2892 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2894 /* Nonbase types should have been stripped by outer_class_type. */
2895 gcc_assert (DECL_ARTIFICIAL (fld));
2897 outer_type = TREE_TYPE (fld);
2898 offset -= pos;
2900 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2901 offset, otr_type);
2902 if (!base_binfo)
2904 gcc_assert (odr_violation_reported);
2905 return;
2907 gcc_assert (base_binfo);
2908 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2910 bool can_refer;
2911 tree target = gimple_get_virt_method_for_binfo (otr_token,
2912 base_binfo,
2913 &can_refer);
2914 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2915 maybe_record_node (nodes, target, inserted, can_refer, completep);
2916 matched_vtables->add (BINFO_VTABLE (base_binfo));
2921 /* When virtual table is removed, we may need to flush the cache. */
2923 static void
2924 devirt_variable_node_removal_hook (varpool_node *n,
2925 void *d ATTRIBUTE_UNUSED)
2927 if (cached_polymorphic_call_targets
2928 && DECL_VIRTUAL_P (n->decl)
2929 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2930 free_polymorphic_call_targets_hash ();
2933 /* Record about how many calls would benefit from given type to be final. */
2935 struct odr_type_warn_count
2937 tree type;
2938 int count;
2939 profile_count dyn_count;
2942 /* Record about how many calls would benefit from given method to be final. */
2944 struct decl_warn_count
2946 tree decl;
2947 int count;
2948 profile_count dyn_count;
2951 /* Information about type and decl warnings. */
2953 class final_warning_record
2955 public:
2956 /* If needed grow type_warnings vector and initialize new decl_warn_count
2957 to have dyn_count set to profile_count::zero (). */
2958 void grow_type_warnings (unsigned newlen);
2960 profile_count dyn_count;
2961 auto_vec<odr_type_warn_count> type_warnings;
2962 hash_map<tree, decl_warn_count> decl_warnings;
2965 void
2966 final_warning_record::grow_type_warnings (unsigned newlen)
2968 unsigned len = type_warnings.length ();
2969 if (newlen > len)
2971 type_warnings.safe_grow_cleared (newlen);
2972 for (unsigned i = len; i < newlen; i++)
2973 type_warnings[i].dyn_count = profile_count::zero ();
2977 class final_warning_record *final_warning_records;
2979 /* Return vector containing possible targets of polymorphic call of type
2980 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2981 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2982 OTR_TYPE and include their virtual method. This is useful for types
2983 possibly in construction or destruction where the virtual table may
2984 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2985 us to walk the inheritance graph for all derivations.
2987 If COMPLETEP is non-NULL, store true if the list is complete.
2988 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2989 in the target cache. If user needs to visit every target list
2990 just once, it can memoize them.
2992 If SPECULATIVE is set, the list will not contain targets that
2993 are not speculatively taken.
2995 Returned vector is placed into cache. It is NOT caller's responsibility
2996 to free it. The vector can be freed on cgraph_remove_node call if
2997 the particular node is a virtual function present in the cache. */
2999 vec <cgraph_node *>
3000 possible_polymorphic_call_targets (tree otr_type,
3001 HOST_WIDE_INT otr_token,
3002 ipa_polymorphic_call_context context,
3003 bool *completep,
3004 void **cache_token,
3005 bool speculative)
3007 static struct cgraph_node_hook_list *node_removal_hook_holder;
3008 vec <cgraph_node *> nodes = vNULL;
3009 auto_vec <tree, 8> bases_to_consider;
3010 odr_type type, outer_type;
3011 polymorphic_call_target_d key;
3012 polymorphic_call_target_d **slot;
3013 unsigned int i;
3014 tree binfo, target;
3015 bool complete;
3016 bool can_refer = false;
3017 bool skipped = false;
3019 otr_type = TYPE_MAIN_VARIANT (otr_type);
3021 /* If ODR is not initialized or the context is invalid, return empty
3022 incomplete list. */
3023 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
3025 if (completep)
3026 *completep = context.invalid;
3027 if (cache_token)
3028 *cache_token = NULL;
3029 return nodes;
3032 /* Do not bother to compute speculative info when user do not asks for it. */
3033 if (!speculative || !context.speculative_outer_type)
3034 context.clear_speculation ();
3036 type = get_odr_type (otr_type, true);
3038 /* Recording type variants would waste results cache. */
3039 gcc_assert (!context.outer_type
3040 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3042 /* Look up the outer class type we want to walk.
3043 If we fail to do so, the context is invalid. */
3044 if ((context.outer_type || context.speculative_outer_type)
3045 && !context.restrict_to_inner_class (otr_type))
3047 if (completep)
3048 *completep = true;
3049 if (cache_token)
3050 *cache_token = NULL;
3051 return nodes;
3053 gcc_assert (!context.invalid);
3055 /* Check that restrict_to_inner_class kept the main variant. */
3056 gcc_assert (!context.outer_type
3057 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3059 /* We canonicalize our query, so we do not need extra hashtable entries. */
3061 /* Without outer type, we have no use for offset. Just do the
3062 basic search from inner type. */
3063 if (!context.outer_type)
3064 context.clear_outer_type (otr_type);
3065 /* We need to update our hierarchy if the type does not exist. */
3066 outer_type = get_odr_type (context.outer_type, true);
3067 /* If the type is complete, there are no derivations. */
3068 if (TYPE_FINAL_P (outer_type->type))
3069 context.maybe_derived_type = false;
3071 /* Initialize query cache. */
3072 if (!cached_polymorphic_call_targets)
3074 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3075 polymorphic_call_target_hash
3076 = new polymorphic_call_target_hash_type (23);
3077 if (!node_removal_hook_holder)
3079 node_removal_hook_holder =
3080 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3081 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3082 NULL);
3086 if (in_lto_p)
3088 if (context.outer_type != otr_type)
3089 context.outer_type
3090 = get_odr_type (context.outer_type, true)->type;
3091 if (context.speculative_outer_type)
3092 context.speculative_outer_type
3093 = get_odr_type (context.speculative_outer_type, true)->type;
3096 /* Look up cached answer. */
3097 key.type = type;
3098 key.otr_token = otr_token;
3099 key.speculative = speculative;
3100 key.context = context;
3101 key.n_odr_types = odr_types.length ();
3102 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3103 if (cache_token)
3104 *cache_token = (void *)*slot;
3105 if (*slot)
3107 if (completep)
3108 *completep = (*slot)->complete;
3109 if ((*slot)->type_warning && final_warning_records)
3111 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3112 if (!final_warning_records->type_warnings
3113 [(*slot)->type_warning - 1].dyn_count.initialized_p ())
3114 final_warning_records->type_warnings
3115 [(*slot)->type_warning - 1].dyn_count = profile_count::zero ();
3116 if (final_warning_records->dyn_count > 0)
3117 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3118 = final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3119 + final_warning_records->dyn_count;
3121 if (!speculative && (*slot)->decl_warning && final_warning_records)
3123 struct decl_warn_count *c =
3124 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3125 c->count++;
3126 if (final_warning_records->dyn_count > 0)
3127 c->dyn_count += final_warning_records->dyn_count;
3129 return (*slot)->targets;
3132 complete = true;
3134 /* Do actual search. */
3135 timevar_push (TV_IPA_VIRTUAL_CALL);
3136 *slot = XCNEW (polymorphic_call_target_d);
3137 if (cache_token)
3138 *cache_token = (void *)*slot;
3139 (*slot)->type = type;
3140 (*slot)->otr_token = otr_token;
3141 (*slot)->context = context;
3142 (*slot)->speculative = speculative;
3144 hash_set<tree> inserted;
3145 hash_set<tree> matched_vtables;
3147 /* First insert targets we speculatively identified as likely. */
3148 if (context.speculative_outer_type)
3150 odr_type speculative_outer_type;
3151 bool speculation_complete = true;
3153 /* First insert target from type itself and check if it may have
3154 derived types. */
3155 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3156 if (TYPE_FINAL_P (speculative_outer_type->type))
3157 context.speculative_maybe_derived_type = false;
3158 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3159 context.speculative_offset, otr_type);
3160 if (binfo)
3161 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3162 &can_refer);
3163 else
3164 target = NULL;
3166 /* In the case we get complete method, we don't need
3167 to walk derivations. */
3168 if (target && DECL_FINAL_P (target))
3169 context.speculative_maybe_derived_type = false;
3170 if (type_possibly_instantiated_p (speculative_outer_type->type))
3171 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3172 if (binfo)
3173 matched_vtables.add (BINFO_VTABLE (binfo));
3176 /* Next walk recursively all derived types. */
3177 if (context.speculative_maybe_derived_type)
3178 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3179 possible_polymorphic_call_targets_1 (nodes, &inserted,
3180 &matched_vtables,
3181 otr_type,
3182 speculative_outer_type->derived_types[i],
3183 otr_token, speculative_outer_type->type,
3184 context.speculative_offset,
3185 &speculation_complete,
3186 bases_to_consider,
3187 false);
3190 if (!speculative || !nodes.length ())
3192 /* First see virtual method of type itself. */
3193 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3194 context.offset, otr_type);
3195 if (binfo)
3196 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3197 &can_refer);
3198 else
3200 gcc_assert (odr_violation_reported);
3201 target = NULL;
3204 /* Destructors are never called through construction virtual tables,
3205 because the type is always known. */
3206 if (target && DECL_CXX_DESTRUCTOR_P (target))
3207 context.maybe_in_construction = false;
3209 if (target)
3211 /* In the case we get complete method, we don't need
3212 to walk derivations. */
3213 if (DECL_FINAL_P (target))
3214 context.maybe_derived_type = false;
3217 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3218 if (type_possibly_instantiated_p (outer_type->type))
3219 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3220 else
3221 skipped = true;
3223 if (binfo)
3224 matched_vtables.add (BINFO_VTABLE (binfo));
3226 /* Next walk recursively all derived types. */
3227 if (context.maybe_derived_type)
3229 for (i = 0; i < outer_type->derived_types.length(); i++)
3230 possible_polymorphic_call_targets_1 (nodes, &inserted,
3231 &matched_vtables,
3232 otr_type,
3233 outer_type->derived_types[i],
3234 otr_token, outer_type->type,
3235 context.offset, &complete,
3236 bases_to_consider,
3237 context.maybe_in_construction);
3239 if (!outer_type->all_derivations_known)
3241 if (!speculative && final_warning_records
3242 && nodes.length () == 1
3243 && TREE_CODE (TREE_TYPE (nodes[0]->decl)) == METHOD_TYPE)
3245 if (complete
3246 && warn_suggest_final_types
3247 && !outer_type->derived_types.length ())
3249 final_warning_records->grow_type_warnings
3250 (outer_type->id);
3251 final_warning_records->type_warnings[outer_type->id].count++;
3252 if (!final_warning_records->type_warnings
3253 [outer_type->id].dyn_count.initialized_p ())
3254 final_warning_records->type_warnings
3255 [outer_type->id].dyn_count = profile_count::zero ();
3256 final_warning_records->type_warnings[outer_type->id].dyn_count
3257 += final_warning_records->dyn_count;
3258 final_warning_records->type_warnings[outer_type->id].type
3259 = outer_type->type;
3260 (*slot)->type_warning = outer_type->id + 1;
3262 if (complete
3263 && warn_suggest_final_methods
3264 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3265 outer_type->type))
3267 bool existed;
3268 struct decl_warn_count &c =
3269 final_warning_records->decl_warnings.get_or_insert
3270 (nodes[0]->decl, &existed);
3272 if (existed)
3274 c.count++;
3275 c.dyn_count += final_warning_records->dyn_count;
3277 else
3279 c.count = 1;
3280 c.dyn_count = final_warning_records->dyn_count;
3281 c.decl = nodes[0]->decl;
3283 (*slot)->decl_warning = nodes[0]->decl;
3286 complete = false;
3290 if (!speculative)
3292 /* Destructors are never called through construction virtual tables,
3293 because the type is always known. One of entries may be
3294 cxa_pure_virtual so look to at least two of them. */
3295 if (context.maybe_in_construction)
3296 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3297 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3298 context.maybe_in_construction = false;
3299 if (context.maybe_in_construction)
3301 if (type != outer_type
3302 && (!skipped
3303 || (context.maybe_derived_type
3304 && !type_all_derivations_known_p (outer_type->type))))
3305 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3306 context.offset, nodes, &inserted,
3307 &matched_vtables, &complete);
3308 if (skipped)
3309 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3310 for (i = 0; i < bases_to_consider.length(); i++)
3311 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3316 (*slot)->targets = nodes;
3317 (*slot)->complete = complete;
3318 (*slot)->n_odr_types = odr_types.length ();
3319 if (completep)
3320 *completep = complete;
3322 timevar_pop (TV_IPA_VIRTUAL_CALL);
3323 return nodes;
3326 bool
3327 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3328 vec<const decl_warn_count*> *vec)
3330 vec->safe_push (&value);
3331 return true;
3334 /* Dump target list TARGETS into FILE. */
3336 static void
3337 dump_targets (FILE *f, vec <cgraph_node *> targets, bool verbose)
3339 unsigned int i;
3341 for (i = 0; i < targets.length (); i++)
3343 char *name = NULL;
3344 if (in_lto_p)
3345 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3346 fprintf (f, " %s/%i", name ? name : targets[i]->name (),
3347 targets[i]->order);
3348 if (in_lto_p)
3349 free (name);
3350 if (!targets[i]->definition)
3351 fprintf (f, " (no definition%s)",
3352 DECL_DECLARED_INLINE_P (targets[i]->decl)
3353 ? " inline" : "");
3354 /* With many targets for every call polymorphic dumps are going to
3355 be quadratic in size. */
3356 if (i > 10 && !verbose)
3358 fprintf (f, " ... and %i more targets\n", targets.length () - i);
3359 return;
3362 fprintf (f, "\n");
3365 /* Dump all possible targets of a polymorphic call. */
3367 void
3368 dump_possible_polymorphic_call_targets (FILE *f,
3369 tree otr_type,
3370 HOST_WIDE_INT otr_token,
3371 const ipa_polymorphic_call_context &ctx,
3372 bool verbose)
3374 vec <cgraph_node *> targets;
3375 bool final;
3376 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3377 unsigned int len;
3379 if (!type)
3380 return;
3381 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3382 ctx,
3383 &final, NULL, false);
3384 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3385 print_generic_expr (f, type->type, TDF_SLIM);
3386 fprintf (f, " token %i\n", (int)otr_token);
3388 ctx.dump (f);
3390 fprintf (f, " %s%s%s%s\n ",
3391 final ? "This is a complete list." :
3392 "This is partial list; extra targets may be defined in other units.",
3393 ctx.maybe_in_construction ? " (base types included)" : "",
3394 ctx.maybe_derived_type ? " (derived types included)" : "",
3395 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3396 len = targets.length ();
3397 dump_targets (f, targets, verbose);
3399 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3400 ctx,
3401 &final, NULL, true);
3402 if (targets.length () != len)
3404 fprintf (f, " Speculative targets:");
3405 dump_targets (f, targets, verbose);
3407 /* Ugly: during callgraph construction the target cache may get populated
3408 before all targets are found. While this is harmless (because all local
3409 types are discovered and only in those case we devirtualize fully and we
3410 don't do speculative devirtualization before IPA stage) it triggers
3411 assert here when dumping at that stage also populates the case with
3412 speculative targets. Quietly ignore this. */
3413 gcc_assert (symtab->state < IPA_SSA || targets.length () <= len);
3414 fprintf (f, "\n");
3418 /* Return true if N can be possibly target of a polymorphic call of
3419 OTR_TYPE/OTR_TOKEN. */
3421 bool
3422 possible_polymorphic_call_target_p (tree otr_type,
3423 HOST_WIDE_INT otr_token,
3424 const ipa_polymorphic_call_context &ctx,
3425 struct cgraph_node *n)
3427 vec <cgraph_node *> targets;
3428 unsigned int i;
3429 enum built_in_function fcode;
3430 bool final;
3432 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3433 && ((fcode = DECL_FUNCTION_CODE (n->decl)) == BUILT_IN_UNREACHABLE
3434 || fcode == BUILT_IN_TRAP))
3435 return true;
3437 if (is_cxa_pure_virtual_p (n->decl))
3438 return true;
3440 if (!odr_hash)
3441 return true;
3442 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3443 for (i = 0; i < targets.length (); i++)
3444 if (n->semantically_equivalent_p (targets[i]))
3445 return true;
3447 /* At a moment we allow middle end to dig out new external declarations
3448 as a targets of polymorphic calls. */
3449 if (!final && !n->definition)
3450 return true;
3451 return false;
3456 /* Return true if N can be possibly target of a polymorphic call of
3457 OBJ_TYPE_REF expression REF in STMT. */
3459 bool
3460 possible_polymorphic_call_target_p (tree ref,
3461 gimple *stmt,
3462 struct cgraph_node *n)
3464 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3465 tree call_fn = gimple_call_fn (stmt);
3467 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3468 tree_to_uhwi
3469 (OBJ_TYPE_REF_TOKEN (call_fn)),
3470 context,
3475 /* After callgraph construction new external nodes may appear.
3476 Add them into the graph. */
3478 void
3479 update_type_inheritance_graph (void)
3481 struct cgraph_node *n;
3483 if (!odr_hash)
3484 return;
3485 free_polymorphic_call_targets_hash ();
3486 timevar_push (TV_IPA_INHERITANCE);
3487 /* We reconstruct the graph starting from types of all methods seen in the
3488 unit. */
3489 FOR_EACH_FUNCTION (n)
3490 if (DECL_VIRTUAL_P (n->decl)
3491 && !n->definition
3492 && n->real_symbol_p ())
3493 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3494 timevar_pop (TV_IPA_INHERITANCE);
3498 /* Return true if N looks like likely target of a polymorphic call.
3499 Rule out cxa_pure_virtual, noreturns, function declared cold and
3500 other obvious cases. */
3502 bool
3503 likely_target_p (struct cgraph_node *n)
3505 int flags;
3506 /* cxa_pure_virtual and similar things are not likely. */
3507 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3508 return false;
3509 flags = flags_from_decl_or_type (n->decl);
3510 if (flags & ECF_NORETURN)
3511 return false;
3512 if (lookup_attribute ("cold",
3513 DECL_ATTRIBUTES (n->decl)))
3514 return false;
3515 if (n->frequency < NODE_FREQUENCY_NORMAL)
3516 return false;
3517 /* If there are no live virtual tables referring the target,
3518 the only way the target can be called is an instance coming from other
3519 compilation unit; speculative devirtualization is built around an
3520 assumption that won't happen. */
3521 if (!referenced_from_vtable_p (n))
3522 return false;
3523 return true;
3526 /* Compare type warning records P1 and P2 and choose one with larger count;
3527 helper for qsort. */
3529 static int
3530 type_warning_cmp (const void *p1, const void *p2)
3532 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3533 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3535 if (t1->dyn_count < t2->dyn_count)
3536 return 1;
3537 if (t1->dyn_count > t2->dyn_count)
3538 return -1;
3539 return t2->count - t1->count;
3542 /* Compare decl warning records P1 and P2 and choose one with larger count;
3543 helper for qsort. */
3545 static int
3546 decl_warning_cmp (const void *p1, const void *p2)
3548 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3549 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3551 if (t1->dyn_count < t2->dyn_count)
3552 return 1;
3553 if (t1->dyn_count > t2->dyn_count)
3554 return -1;
3555 return t2->count - t1->count;
3559 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3560 context CTX. */
3562 struct cgraph_node *
3563 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3564 ipa_polymorphic_call_context ctx)
3566 vec <cgraph_node *>targets
3567 = possible_polymorphic_call_targets
3568 (otr_type, otr_token, ctx, NULL, NULL, true);
3569 unsigned int i;
3570 struct cgraph_node *likely_target = NULL;
3572 for (i = 0; i < targets.length (); i++)
3573 if (likely_target_p (targets[i]))
3575 if (likely_target)
3576 return NULL;
3577 likely_target = targets[i];
3579 if (!likely_target
3580 ||!likely_target->definition
3581 || DECL_EXTERNAL (likely_target->decl))
3582 return NULL;
3584 /* Don't use an implicitly-declared destructor (c++/58678). */
3585 struct cgraph_node *non_thunk_target
3586 = likely_target->function_symbol ();
3587 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3588 return NULL;
3589 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3590 && likely_target->can_be_discarded_p ())
3591 return NULL;
3592 return likely_target;
3595 /* The ipa-devirt pass.
3596 When polymorphic call has only one likely target in the unit,
3597 turn it into a speculative call. */
3599 static unsigned int
3600 ipa_devirt (void)
3602 struct cgraph_node *n;
3603 hash_set<void *> bad_call_targets;
3604 struct cgraph_edge *e;
3606 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3607 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3608 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3609 int ndropped = 0;
3611 if (!odr_types_ptr)
3612 return 0;
3614 if (dump_file)
3615 dump_type_inheritance_graph (dump_file);
3617 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3618 This is implemented by setting up final_warning_records that are updated
3619 by get_polymorphic_call_targets.
3620 We need to clear cache in this case to trigger recomputation of all
3621 entries. */
3622 if (warn_suggest_final_methods || warn_suggest_final_types)
3624 final_warning_records = new (final_warning_record);
3625 final_warning_records->dyn_count = profile_count::zero ();
3626 final_warning_records->grow_type_warnings (odr_types.length ());
3627 free_polymorphic_call_targets_hash ();
3630 FOR_EACH_DEFINED_FUNCTION (n)
3632 bool update = false;
3633 if (!opt_for_fn (n->decl, flag_devirtualize))
3634 continue;
3635 if (dump_file && n->indirect_calls)
3636 fprintf (dump_file, "\n\nProcesing function %s\n",
3637 n->dump_name ());
3638 for (e = n->indirect_calls; e; e = e->next_callee)
3639 if (e->indirect_info->polymorphic)
3641 struct cgraph_node *likely_target = NULL;
3642 void *cache_token;
3643 bool final;
3645 if (final_warning_records)
3646 final_warning_records->dyn_count = e->count.ipa ();
3648 vec <cgraph_node *>targets
3649 = possible_polymorphic_call_targets
3650 (e, &final, &cache_token, true);
3651 unsigned int i;
3653 /* Trigger warnings by calculating non-speculative targets. */
3654 if (warn_suggest_final_methods || warn_suggest_final_types)
3655 possible_polymorphic_call_targets (e);
3657 if (dump_file)
3658 dump_possible_polymorphic_call_targets
3659 (dump_file, e, (dump_flags & TDF_DETAILS));
3661 npolymorphic++;
3663 /* See if the call can be devirtualized by means of ipa-prop's
3664 polymorphic call context propagation. If not, we can just
3665 forget about this call being polymorphic and avoid some heavy
3666 lifting in remove_unreachable_nodes that will otherwise try to
3667 keep all possible targets alive until inlining and in the inliner
3668 itself.
3670 This may need to be revisited once we add further ways to use
3671 the may edges, but it is a resonable thing to do right now. */
3673 if ((e->indirect_info->param_index == -1
3674 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3675 && e->indirect_info->vptr_changed))
3676 && !flag_ltrans_devirtualize)
3678 e->indirect_info->polymorphic = false;
3679 ndropped++;
3680 if (dump_file)
3681 fprintf (dump_file, "Dropping polymorphic call info;"
3682 " it cannot be used by ipa-prop\n");
3685 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3686 continue;
3688 if (!e->maybe_hot_p ())
3690 if (dump_file)
3691 fprintf (dump_file, "Call is cold\n\n");
3692 ncold++;
3693 continue;
3695 if (e->speculative)
3697 if (dump_file)
3698 fprintf (dump_file, "Call is already speculated\n\n");
3699 nspeculated++;
3701 /* When dumping see if we agree with speculation. */
3702 if (!dump_file)
3703 continue;
3705 if (bad_call_targets.contains (cache_token))
3707 if (dump_file)
3708 fprintf (dump_file, "Target list is known to be useless\n\n");
3709 nmultiple++;
3710 continue;
3712 for (i = 0; i < targets.length (); i++)
3713 if (likely_target_p (targets[i]))
3715 if (likely_target)
3717 likely_target = NULL;
3718 if (dump_file)
3719 fprintf (dump_file, "More than one likely target\n\n");
3720 nmultiple++;
3721 break;
3723 likely_target = targets[i];
3725 if (!likely_target)
3727 bad_call_targets.add (cache_token);
3728 continue;
3730 /* This is reached only when dumping; check if we agree or disagree
3731 with the speculation. */
3732 if (e->speculative)
3734 struct cgraph_edge *e2;
3735 struct ipa_ref *ref;
3736 e->speculative_call_info (e2, e, ref);
3737 if (e2->callee->ultimate_alias_target ()
3738 == likely_target->ultimate_alias_target ())
3740 fprintf (dump_file, "We agree with speculation\n\n");
3741 nok++;
3743 else
3745 fprintf (dump_file, "We disagree with speculation\n\n");
3746 nwrong++;
3748 continue;
3750 if (!likely_target->definition)
3752 if (dump_file)
3753 fprintf (dump_file, "Target is not a definition\n\n");
3754 nnotdefined++;
3755 continue;
3757 /* Do not introduce new references to external symbols. While we
3758 can handle these just well, it is common for programs to
3759 incorrectly with headers defining methods they are linked
3760 with. */
3761 if (DECL_EXTERNAL (likely_target->decl))
3763 if (dump_file)
3764 fprintf (dump_file, "Target is external\n\n");
3765 nexternal++;
3766 continue;
3768 /* Don't use an implicitly-declared destructor (c++/58678). */
3769 struct cgraph_node *non_thunk_target
3770 = likely_target->function_symbol ();
3771 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3773 if (dump_file)
3774 fprintf (dump_file, "Target is artificial\n\n");
3775 nartificial++;
3776 continue;
3778 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3779 && likely_target->can_be_discarded_p ())
3781 if (dump_file)
3782 fprintf (dump_file, "Target is overwritable\n\n");
3783 noverwritable++;
3784 continue;
3786 else if (dbg_cnt (devirt))
3788 if (dump_enabled_p ())
3790 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, e->call_stmt,
3791 "speculatively devirtualizing call "
3792 "in %s to %s\n",
3793 n->dump_name (),
3794 likely_target->dump_name ());
3796 if (!likely_target->can_be_discarded_p ())
3798 cgraph_node *alias;
3799 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3800 if (alias)
3801 likely_target = alias;
3803 nconverted++;
3804 update = true;
3805 e->make_speculative
3806 (likely_target, e->count.apply_scale (8, 10));
3809 if (update)
3810 ipa_update_overall_fn_summary (n);
3812 if (warn_suggest_final_methods || warn_suggest_final_types)
3814 if (warn_suggest_final_types)
3816 final_warning_records->type_warnings.qsort (type_warning_cmp);
3817 for (unsigned int i = 0;
3818 i < final_warning_records->type_warnings.length (); i++)
3819 if (final_warning_records->type_warnings[i].count)
3821 tree type = final_warning_records->type_warnings[i].type;
3822 int count = final_warning_records->type_warnings[i].count;
3823 profile_count dyn_count
3824 = final_warning_records->type_warnings[i].dyn_count;
3826 if (!(dyn_count > 0))
3827 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3828 OPT_Wsuggest_final_types, count,
3829 "Declaring type %qD final "
3830 "would enable devirtualization of %i call",
3831 "Declaring type %qD final "
3832 "would enable devirtualization of %i calls",
3833 type,
3834 count);
3835 else
3836 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3837 OPT_Wsuggest_final_types, count,
3838 "Declaring type %qD final "
3839 "would enable devirtualization of %i call "
3840 "executed %lli times",
3841 "Declaring type %qD final "
3842 "would enable devirtualization of %i calls "
3843 "executed %lli times",
3844 type,
3845 count,
3846 (long long) dyn_count.to_gcov_type ());
3850 if (warn_suggest_final_methods)
3852 auto_vec<const decl_warn_count*> decl_warnings_vec;
3854 final_warning_records->decl_warnings.traverse
3855 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3856 decl_warnings_vec.qsort (decl_warning_cmp);
3857 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3859 tree decl = decl_warnings_vec[i]->decl;
3860 int count = decl_warnings_vec[i]->count;
3861 profile_count dyn_count
3862 = decl_warnings_vec[i]->dyn_count;
3864 if (!(dyn_count > 0))
3865 if (DECL_CXX_DESTRUCTOR_P (decl))
3866 warning_n (DECL_SOURCE_LOCATION (decl),
3867 OPT_Wsuggest_final_methods, count,
3868 "Declaring virtual destructor of %qD final "
3869 "would enable devirtualization of %i call",
3870 "Declaring virtual destructor of %qD final "
3871 "would enable devirtualization of %i calls",
3872 DECL_CONTEXT (decl), count);
3873 else
3874 warning_n (DECL_SOURCE_LOCATION (decl),
3875 OPT_Wsuggest_final_methods, count,
3876 "Declaring method %qD final "
3877 "would enable devirtualization of %i call",
3878 "Declaring method %qD final "
3879 "would enable devirtualization of %i calls",
3880 decl, count);
3881 else if (DECL_CXX_DESTRUCTOR_P (decl))
3882 warning_n (DECL_SOURCE_LOCATION (decl),
3883 OPT_Wsuggest_final_methods, count,
3884 "Declaring virtual destructor of %qD final "
3885 "would enable devirtualization of %i call "
3886 "executed %lli times",
3887 "Declaring virtual destructor of %qD final "
3888 "would enable devirtualization of %i calls "
3889 "executed %lli times",
3890 DECL_CONTEXT (decl), count,
3891 (long long)dyn_count.to_gcov_type ());
3892 else
3893 warning_n (DECL_SOURCE_LOCATION (decl),
3894 OPT_Wsuggest_final_methods, count,
3895 "Declaring method %qD final "
3896 "would enable devirtualization of %i call "
3897 "executed %lli times",
3898 "Declaring method %qD final "
3899 "would enable devirtualization of %i calls "
3900 "executed %lli times",
3901 decl, count,
3902 (long long)dyn_count.to_gcov_type ());
3906 delete (final_warning_records);
3907 final_warning_records = 0;
3910 if (dump_file)
3911 fprintf (dump_file,
3912 "%i polymorphic calls, %i devirtualized,"
3913 " %i speculatively devirtualized, %i cold\n"
3914 "%i have multiple targets, %i overwritable,"
3915 " %i already speculated (%i agree, %i disagree),"
3916 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3917 npolymorphic, ndevirtualized, nconverted, ncold,
3918 nmultiple, noverwritable, nspeculated, nok, nwrong,
3919 nexternal, nnotdefined, nartificial, ndropped);
3920 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3923 namespace {
3925 const pass_data pass_data_ipa_devirt =
3927 IPA_PASS, /* type */
3928 "devirt", /* name */
3929 OPTGROUP_NONE, /* optinfo_flags */
3930 TV_IPA_DEVIRT, /* tv_id */
3931 0, /* properties_required */
3932 0, /* properties_provided */
3933 0, /* properties_destroyed */
3934 0, /* todo_flags_start */
3935 ( TODO_dump_symtab ), /* todo_flags_finish */
3938 class pass_ipa_devirt : public ipa_opt_pass_d
3940 public:
3941 pass_ipa_devirt (gcc::context *ctxt)
3942 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3943 NULL, /* generate_summary */
3944 NULL, /* write_summary */
3945 NULL, /* read_summary */
3946 NULL, /* write_optimization_summary */
3947 NULL, /* read_optimization_summary */
3948 NULL, /* stmt_fixup */
3949 0, /* function_transform_todo_flags_start */
3950 NULL, /* function_transform */
3951 NULL) /* variable_transform */
3954 /* opt_pass methods: */
3955 virtual bool gate (function *)
3957 /* In LTO, always run the IPA passes and decide on function basis if the
3958 pass is enabled. */
3959 if (in_lto_p)
3960 return true;
3961 return (flag_devirtualize
3962 && (flag_devirtualize_speculatively
3963 || (warn_suggest_final_methods
3964 || warn_suggest_final_types))
3965 && optimize);
3968 virtual unsigned int execute (function *) { return ipa_devirt (); }
3970 }; // class pass_ipa_devirt
3972 } // anon namespace
3974 ipa_opt_pass_d *
3975 make_pass_ipa_devirt (gcc::context *ctxt)
3977 return new pass_ipa_devirt (ctxt);
3980 #include "gt-ipa-devirt.h"