1 /* Basic IPA utilities for type inheritance graph construction and
3 Copyright (C) 2013-2019 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
84 build_type_inheritance_graph triggers a construction of the type inheritance
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
110 #include "coretypes.h"
115 #include "alloc-pool.h"
116 #include "tree-pass.h"
118 #include "lto-streamer.h"
119 #include "fold-const.h"
120 #include "print-tree.h"
122 #include "ipa-utils.h"
123 #include "gimple-fold.h"
124 #include "symbol-summary.h"
125 #include "tree-vrp.h"
126 #include "ipa-prop.h"
127 #include "ipa-fnsummary.h"
128 #include "demangle.h"
130 #include "gimple-pretty-print.h"
132 #include "stringpool.h"
135 /* Hash based set of pairs of types. */
143 struct default_hash_traits
<type_pair
>
144 : typed_noop_remove
<type_pair
>
146 GTY((skip
)) typedef type_pair value_type
;
147 GTY((skip
)) typedef type_pair compare_type
;
151 return TYPE_UID (p
.first
) ^ TYPE_UID (p
.second
);
154 is_empty (type_pair p
)
156 return p
.first
== NULL
;
159 is_deleted (type_pair p ATTRIBUTE_UNUSED
)
164 equal (const type_pair
&a
, const type_pair
&b
)
166 return a
.first
==b
.first
&& a
.second
== b
.second
;
169 mark_empty (type_pair
&e
)
175 static bool odr_types_equivalent_p (tree
, tree
, bool, bool *,
176 hash_set
<type_pair
> *,
177 location_t
, location_t
);
178 static void warn_odr (tree t1
, tree t2
, tree st1
, tree st2
,
179 bool warn
, bool *warned
, const char *reason
);
181 static bool odr_violation_reported
= false;
184 /* Pointer set of all call targets appearing in the cache. */
185 static hash_set
<cgraph_node
*> *cached_polymorphic_call_targets
;
187 /* The node of type inheritance graph. For each type unique in
188 One Definition Rule (ODR) sense, we produce one node linking all
189 main variants of types equivalent to it, bases and derived types. */
191 struct GTY(()) odr_type_d
195 /* All bases; built only for main variants of types. */
196 vec
<odr_type
> GTY((skip
)) bases
;
197 /* All derived types with virtual methods seen in unit;
198 built only for main variants of types. */
199 vec
<odr_type
> GTY((skip
)) derived_types
;
201 /* All equivalent types, if more than one. */
202 vec
<tree
, va_gc
> *types
;
203 /* Set of all equivalent types, if NON-NULL. */
204 hash_set
<tree
> * GTY((skip
)) types_set
;
206 /* Unique ID indexing the type in odr_types array. */
208 /* Is it in anonymous namespace? */
209 bool anonymous_namespace
;
210 /* Do we know about all derivations of given type? */
211 bool all_derivations_known
;
212 /* Did we report ODR violation here? */
214 /* Set when virtual table without RTTI previaled table with. */
216 /* Set when the canonical type is determined using the type name. */
220 /* Return TRUE if all derived types of T are known and thus
221 we may consider the walk of derived type complete.
223 This is typically true only for final anonymous namespace types and types
224 defined within functions (that may be COMDAT and thus shared across units,
225 but with the same set of derived types). */
228 type_all_derivations_known_p (const_tree t
)
230 if (TYPE_FINAL_P (t
))
234 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
235 if (!TYPE_NAME (t
) || TREE_CODE (TYPE_NAME (t
)) != TYPE_DECL
)
237 if (type_in_anonymous_namespace_p (t
))
239 return (decl_function_context (TYPE_NAME (t
)) != NULL
);
242 /* Return TRUE if type's constructors are all visible. */
245 type_all_ctors_visible_p (tree t
)
248 && symtab
->state
>= CONSTRUCTION
249 /* We cannot always use type_all_derivations_known_p.
250 For function local types we must assume case where
251 the function is COMDAT and shared in between units.
253 TODO: These cases are quite easy to get, but we need
254 to keep track of C++ privatizing via -Wno-weak
255 as well as the IPA privatizing. */
256 && type_in_anonymous_namespace_p (t
);
259 /* Return TRUE if type may have instance. */
262 type_possibly_instantiated_p (tree t
)
267 /* TODO: Add abstract types here. */
268 if (!type_all_ctors_visible_p (t
))
271 vtable
= BINFO_VTABLE (TYPE_BINFO (t
));
272 if (TREE_CODE (vtable
) == POINTER_PLUS_EXPR
)
273 vtable
= TREE_OPERAND (TREE_OPERAND (vtable
, 0), 0);
274 vnode
= varpool_node::get (vtable
);
275 return vnode
&& vnode
->definition
;
278 /* Hash used to unify ODR types based on their mangled name and for anonymous
281 struct odr_name_hasher
: pointer_hash
<odr_type_d
>
283 typedef union tree_node
*compare_type
;
284 static inline hashval_t
hash (const odr_type_d
*);
285 static inline bool equal (const odr_type_d
*, const tree_node
*);
286 static inline void remove (odr_type_d
*);
290 can_be_name_hashed_p (tree t
)
292 return (!in_lto_p
|| odr_type_p (t
));
295 /* Hash type by its ODR name. */
298 hash_odr_name (const_tree t
)
300 gcc_checking_assert (TYPE_MAIN_VARIANT (t
) == t
);
302 /* If not in LTO, all main variants are unique, so we can do
305 return htab_hash_pointer (t
);
307 /* Anonymous types are unique. */
308 if (type_with_linkage_p (t
) && type_in_anonymous_namespace_p (t
))
309 return htab_hash_pointer (t
);
311 gcc_checking_assert (TYPE_NAME (t
)
312 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t
)));
313 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t
)));
316 /* Return the computed hashcode for ODR_TYPE. */
319 odr_name_hasher::hash (const odr_type_d
*odr_type
)
321 return hash_odr_name (odr_type
->type
);
324 /* For languages with One Definition Rule, work out if
325 types are the same based on their name.
327 This is non-trivial for LTO where minor differences in
328 the type representation may have prevented type merging
329 to merge two copies of otherwise equivalent type.
331 Until we start streaming mangled type names, this function works
332 only for polymorphic types.
336 types_same_for_odr (const_tree type1
, const_tree type2
)
338 gcc_checking_assert (TYPE_P (type1
) && TYPE_P (type2
));
340 type1
= TYPE_MAIN_VARIANT (type1
);
341 type2
= TYPE_MAIN_VARIANT (type2
);
349 /* Anonymous namespace types are never duplicated. */
350 if ((type_with_linkage_p (type1
) && type_in_anonymous_namespace_p (type1
))
351 || (type_with_linkage_p (type2
) && type_in_anonymous_namespace_p (type2
)))
354 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1
))
355 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2
)));
358 /* Return true if we can decide on ODR equivalency.
360 In non-LTO it is always decide, in LTO however it depends in the type has
361 ODR info attached. */
364 types_odr_comparable (tree t1
, tree t2
)
367 || TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
)
368 || (odr_type_p (TYPE_MAIN_VARIANT (t1
))
369 && odr_type_p (TYPE_MAIN_VARIANT (t2
))));
372 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
373 known, be conservative and return false. */
376 types_must_be_same_for_odr (tree t1
, tree t2
)
378 if (types_odr_comparable (t1
, t2
))
379 return types_same_for_odr (t1
, t2
);
381 return TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
);
384 /* If T is compound type, return type it is based on. */
387 compound_type_base (const_tree t
)
389 if (TREE_CODE (t
) == ARRAY_TYPE
390 || POINTER_TYPE_P (t
)
391 || TREE_CODE (t
) == COMPLEX_TYPE
392 || VECTOR_TYPE_P (t
))
393 return TREE_TYPE (t
);
394 if (TREE_CODE (t
) == METHOD_TYPE
)
395 return TYPE_METHOD_BASETYPE (t
);
396 if (TREE_CODE (t
) == OFFSET_TYPE
)
397 return TYPE_OFFSET_BASETYPE (t
);
401 /* Return true if T is either ODR type or compound type based from it.
402 If the function return true, we know that T is a type originating from C++
403 source even at link-time. */
406 odr_or_derived_type_p (const_tree t
)
410 if (odr_type_p (TYPE_MAIN_VARIANT (t
)))
412 /* Function type is a tricky one. Basically we can consider it
413 ODR derived if return type or any of the parameters is.
414 We need to check all parameters because LTO streaming merges
415 common types (such as void) and they are not considered ODR then. */
416 if (TREE_CODE (t
) == FUNCTION_TYPE
)
418 if (TYPE_METHOD_BASETYPE (t
))
419 t
= TYPE_METHOD_BASETYPE (t
);
422 if (TREE_TYPE (t
) && odr_or_derived_type_p (TREE_TYPE (t
)))
424 for (t
= TYPE_ARG_TYPES (t
); t
; t
= TREE_CHAIN (t
))
425 if (odr_or_derived_type_p (TYPE_MAIN_VARIANT (TREE_VALUE (t
))))
431 t
= compound_type_base (t
);
437 /* Compare types T1 and T2 and return true if they are
441 odr_name_hasher::equal (const odr_type_d
*o1
, const tree_node
*t2
)
445 gcc_checking_assert (TYPE_MAIN_VARIANT (t2
) == t2
);
446 gcc_checking_assert (TYPE_MAIN_VARIANT (t1
) == t1
);
451 /* Check for anonymous namespaces. */
452 if ((type_with_linkage_p (t1
) && type_in_anonymous_namespace_p (t1
))
453 || (type_with_linkage_p (t2
) && type_in_anonymous_namespace_p (t2
)))
455 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1
)));
456 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2
)));
457 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1
))
458 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2
)));
461 /* Free ODR type V. */
464 odr_name_hasher::remove (odr_type_d
*v
)
467 v
->derived_types
.release ();
473 /* ODR type hash used to look up ODR type based on tree type node. */
475 typedef hash_table
<odr_name_hasher
> odr_hash_type
;
476 static odr_hash_type
*odr_hash
;
478 /* ODR types are also stored into ODR_TYPE vector to allow consistent
479 walking. Bases appear before derived types. Vector is garbage collected
480 so we won't end up visiting empty types. */
482 static GTY(()) vec
<odr_type
, va_gc
> *odr_types_ptr
;
483 #define odr_types (*odr_types_ptr)
485 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
487 set_type_binfo (tree type
, tree binfo
)
489 for (; type
; type
= TYPE_NEXT_VARIANT (type
))
490 if (COMPLETE_TYPE_P (type
))
491 TYPE_BINFO (type
) = binfo
;
493 gcc_assert (!TYPE_BINFO (type
));
496 /* Return true if type variants match.
497 This assumes that we already verified that T1 and T2 are variants of the
501 type_variants_equivalent_p (tree t1
, tree t2
)
503 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
506 if (comp_type_attributes (t1
, t2
) != 1)
509 if (COMPLETE_TYPE_P (t1
) && COMPLETE_TYPE_P (t2
)
510 && TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
))
516 /* Compare T1 and T2 based on name or structure. */
519 odr_subtypes_equivalent_p (tree t1
, tree t2
,
520 hash_set
<type_pair
> *visited
,
521 location_t loc1
, location_t loc2
)
524 /* This can happen in incomplete types that should be handled earlier. */
525 gcc_assert (t1
&& t2
);
530 /* Anonymous namespace types must match exactly. */
531 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1
))
532 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1
)))
533 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2
))
534 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2
))))
537 /* For ODR types be sure to compare their names.
538 To support -Wno-odr-type-merging we allow one type to be non-ODR
539 and other ODR even though it is a violation. */
540 if (types_odr_comparable (t1
, t2
))
543 && odr_type_p (TYPE_MAIN_VARIANT (t1
))
544 && get_odr_type (TYPE_MAIN_VARIANT (t1
), true)->odr_violated
)
546 if (!types_same_for_odr (t1
, t2
))
548 if (!type_variants_equivalent_p (t1
, t2
))
550 /* Limit recursion: If subtypes are ODR types and we know
551 that they are same, be happy. */
552 if (odr_type_p (TYPE_MAIN_VARIANT (t1
)))
556 /* Component types, builtins and possibly violating ODR types
557 have to be compared structurally. */
558 if (TREE_CODE (t1
) != TREE_CODE (t2
))
560 if (AGGREGATE_TYPE_P (t1
)
561 && (TYPE_NAME (t1
) == NULL_TREE
) != (TYPE_NAME (t2
) == NULL_TREE
))
564 type_pair pair
={TYPE_MAIN_VARIANT (t1
), TYPE_MAIN_VARIANT (t2
)};
565 if (TYPE_UID (TYPE_MAIN_VARIANT (t1
)) > TYPE_UID (TYPE_MAIN_VARIANT (t2
)))
567 pair
.first
= TYPE_MAIN_VARIANT (t2
);
568 pair
.second
= TYPE_MAIN_VARIANT (t1
);
570 if (visited
->add (pair
))
572 if (!odr_types_equivalent_p (TYPE_MAIN_VARIANT (t1
), TYPE_MAIN_VARIANT (t2
),
573 false, NULL
, visited
, loc1
, loc2
))
575 if (!type_variants_equivalent_p (t1
, t2
))
580 /* Return true if DECL1 and DECL2 are identical methods. Consider
581 name equivalent to name.localalias.xyz. */
584 methods_equal_p (tree decl1
, tree decl2
)
586 if (DECL_ASSEMBLER_NAME (decl1
) == DECL_ASSEMBLER_NAME (decl2
))
588 const char sep
= symbol_table::symbol_suffix_separator ();
590 const char *name1
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1
));
591 const char *ptr1
= strchr (name1
, sep
);
592 int len1
= ptr1
? ptr1
- name1
: strlen (name1
);
594 const char *name2
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2
));
595 const char *ptr2
= strchr (name2
, sep
);
596 int len2
= ptr2
? ptr2
- name2
: strlen (name2
);
600 return !strncmp (name1
, name2
, len1
);
603 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
604 violation warnings. */
607 compare_virtual_tables (varpool_node
*prevailing
, varpool_node
*vtable
)
611 if (DECL_VIRTUAL_P (prevailing
->decl
) != DECL_VIRTUAL_P (vtable
->decl
))
613 odr_violation_reported
= true;
614 if (DECL_VIRTUAL_P (prevailing
->decl
))
616 varpool_node
*tmp
= prevailing
;
620 auto_diagnostic_group d
;
621 if (warning_at (DECL_SOURCE_LOCATION
622 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
624 "virtual table of type %qD violates one definition rule",
625 DECL_CONTEXT (vtable
->decl
)))
626 inform (DECL_SOURCE_LOCATION (prevailing
->decl
),
627 "variable of same assembler name as the virtual table is "
628 "defined in another translation unit");
631 if (!prevailing
->definition
|| !vtable
->definition
)
634 /* If we do not stream ODR type info, do not bother to do useful compare. */
635 if (!TYPE_BINFO (DECL_CONTEXT (vtable
->decl
))
636 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable
->decl
))))
639 odr_type class_type
= get_odr_type (DECL_CONTEXT (vtable
->decl
), true);
641 if (class_type
->odr_violated
)
644 for (n1
= 0, n2
= 0; true; n1
++, n2
++)
646 struct ipa_ref
*ref1
, *ref2
;
649 end1
= !prevailing
->iterate_reference (n1
, ref1
);
650 end2
= !vtable
->iterate_reference (n2
, ref2
);
652 /* !DECL_VIRTUAL_P means RTTI entry;
653 We warn when RTTI is lost because non-RTTI previals; we silently
654 accept the other case. */
657 || (methods_equal_p (ref1
->referred
->decl
,
658 ref2
->referred
->decl
)
659 && TREE_CODE (ref1
->referred
->decl
) == FUNCTION_DECL
))
660 && TREE_CODE (ref2
->referred
->decl
) != FUNCTION_DECL
)
662 if (!class_type
->rtti_broken
)
664 auto_diagnostic_group d
;
665 if (warning_at (DECL_SOURCE_LOCATION
666 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
668 "virtual table of type %qD contains RTTI "
670 DECL_CONTEXT (vtable
->decl
)))
672 inform (DECL_SOURCE_LOCATION
673 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
674 "but is prevailed by one without from other"
675 " translation unit");
676 inform (DECL_SOURCE_LOCATION
677 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
678 "RTTI will not work on this type");
679 class_type
->rtti_broken
= true;
683 end2
= !vtable
->iterate_reference (n2
, ref2
);
687 || (methods_equal_p (ref2
->referred
->decl
, ref1
->referred
->decl
)
688 && TREE_CODE (ref2
->referred
->decl
) == FUNCTION_DECL
))
689 && TREE_CODE (ref1
->referred
->decl
) != FUNCTION_DECL
)
692 end1
= !prevailing
->iterate_reference (n1
, ref1
);
698 /* Extra paranoia; compare the sizes. We do not have information
699 about virtual inheritance offsets, so just be sure that these
701 Do this as very last check so the not very informative error
702 is not output too often. */
703 if (DECL_SIZE (prevailing
->decl
) != DECL_SIZE (vtable
->decl
))
705 class_type
->odr_violated
= true;
706 auto_diagnostic_group d
;
707 tree ctx
= TYPE_NAME (DECL_CONTEXT (vtable
->decl
));
708 if (warning_at (DECL_SOURCE_LOCATION (ctx
), OPT_Wodr
,
709 "virtual table of type %qD violates "
710 "one definition rule",
711 DECL_CONTEXT (vtable
->decl
)))
713 ctx
= TYPE_NAME (DECL_CONTEXT (prevailing
->decl
));
714 inform (DECL_SOURCE_LOCATION (ctx
),
715 "the conflicting type defined in another translation"
716 " unit has virtual table of different size");
724 if (methods_equal_p (ref1
->referred
->decl
, ref2
->referred
->decl
))
727 class_type
->odr_violated
= true;
729 /* If the loops above stopped on non-virtual pointer, we have
730 mismatch in RTTI information mangling. */
731 if (TREE_CODE (ref1
->referred
->decl
) != FUNCTION_DECL
732 && TREE_CODE (ref2
->referred
->decl
) != FUNCTION_DECL
)
734 auto_diagnostic_group d
;
735 if (warning_at (DECL_SOURCE_LOCATION
736 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
738 "virtual table of type %qD violates "
739 "one definition rule",
740 DECL_CONTEXT (vtable
->decl
)))
742 inform (DECL_SOURCE_LOCATION
743 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
744 "the conflicting type defined in another translation "
745 "unit with different RTTI information");
749 /* At this point both REF1 and REF2 points either to virtual table
750 or virtual method. If one points to virtual table and other to
751 method we can complain the same way as if one table was shorter
752 than other pointing out the extra method. */
753 if (TREE_CODE (ref1
->referred
->decl
)
754 != TREE_CODE (ref2
->referred
->decl
))
756 if (VAR_P (ref1
->referred
->decl
))
758 else if (VAR_P (ref2
->referred
->decl
))
763 class_type
->odr_violated
= true;
765 /* Complain about size mismatch. Either we have too many virutal
766 functions or too many virtual table pointers. */
771 varpool_node
*tmp
= prevailing
;
776 auto_diagnostic_group d
;
777 if (warning_at (DECL_SOURCE_LOCATION
778 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
780 "virtual table of type %qD violates "
781 "one definition rule",
782 DECL_CONTEXT (vtable
->decl
)))
784 if (TREE_CODE (ref1
->referring
->decl
) == FUNCTION_DECL
)
786 inform (DECL_SOURCE_LOCATION
787 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
788 "the conflicting type defined in another translation "
790 inform (DECL_SOURCE_LOCATION
791 (TYPE_NAME (DECL_CONTEXT (ref1
->referring
->decl
))),
792 "contains additional virtual method %qD",
793 ref1
->referred
->decl
);
797 inform (DECL_SOURCE_LOCATION
798 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
799 "the conflicting type defined in another translation "
800 "unit has virtual table with more entries");
806 /* And in the last case we have either mistmatch in between two virtual
807 methods or two virtual table pointers. */
808 auto_diagnostic_group d
;
809 if (warning_at (DECL_SOURCE_LOCATION
810 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))), OPT_Wodr
,
811 "virtual table of type %qD violates "
812 "one definition rule",
813 DECL_CONTEXT (vtable
->decl
)))
815 if (TREE_CODE (ref1
->referred
->decl
) == FUNCTION_DECL
)
817 inform (DECL_SOURCE_LOCATION
818 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
819 "the conflicting type defined in another translation "
821 gcc_assert (TREE_CODE (ref2
->referred
->decl
)
823 inform (DECL_SOURCE_LOCATION
824 (ref1
->referred
->ultimate_alias_target ()->decl
),
825 "virtual method %qD",
826 ref1
->referred
->ultimate_alias_target ()->decl
);
827 inform (DECL_SOURCE_LOCATION
828 (ref2
->referred
->ultimate_alias_target ()->decl
),
829 "ought to match virtual method %qD but does not",
830 ref2
->referred
->ultimate_alias_target ()->decl
);
833 inform (DECL_SOURCE_LOCATION
834 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
835 "the conflicting type defined in another translation "
836 "unit has virtual table with different contents");
842 /* Output ODR violation warning about T1 and T2 with REASON.
843 Display location of ST1 and ST2 if REASON speaks about field or
845 If WARN is false, do nothing. Set WARNED if warning was indeed
849 warn_odr (tree t1
, tree t2
, tree st1
, tree st2
,
850 bool warn
, bool *warned
, const char *reason
)
852 tree decl2
= TYPE_NAME (TYPE_MAIN_VARIANT (t2
));
856 if (!warn
|| !TYPE_NAME(TYPE_MAIN_VARIANT (t1
)))
859 /* ODR warnings are output druing LTO streaming; we must apply location
860 cache for potential warnings to be output correctly. */
861 if (lto_location_cache::current_cache
)
862 lto_location_cache::current_cache
->apply_location_cache ();
864 auto_diagnostic_group d
;
865 if (t1
!= TYPE_MAIN_VARIANT (t1
)
866 && TYPE_NAME (t1
) != TYPE_NAME (TYPE_MAIN_VARIANT (t1
)))
868 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1
))),
869 OPT_Wodr
, "type %qT (typedef of %qT) violates the "
870 "C++ One Definition Rule",
871 t1
, TYPE_MAIN_VARIANT (t1
)))
876 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1
))),
877 OPT_Wodr
, "type %qT violates the C++ One Definition Rule",
883 /* For FIELD_DECL support also case where one of fields is
884 NULL - this is used when the structures have mismatching number of
886 else if (!st1
|| TREE_CODE (st1
) == FIELD_DECL
)
888 inform (DECL_SOURCE_LOCATION (decl2
),
889 "a different type is defined in another translation unit");
895 inform (DECL_SOURCE_LOCATION (st1
),
896 "the first difference of corresponding definitions is field %qD",
901 else if (TREE_CODE (st1
) == FUNCTION_DECL
)
903 inform (DECL_SOURCE_LOCATION (decl2
),
904 "a different type is defined in another translation unit");
905 inform (DECL_SOURCE_LOCATION (st1
),
906 "the first difference of corresponding definitions is method %qD",
912 inform (DECL_SOURCE_LOCATION (decl2
), reason
);
918 /* Return ture if T1 and T2 are incompatible and we want to recusively
919 dive into them from warn_type_mismatch to give sensible answer. */
922 type_mismatch_p (tree t1
, tree t2
)
924 if (odr_or_derived_type_p (t1
) && odr_or_derived_type_p (t2
)
925 && !odr_types_equivalent_p (t1
, t2
))
927 return !types_compatible_p (t1
, t2
);
931 /* Types T1 and T2 was found to be incompatible in a context they can't
932 (either used to declare a symbol of same assembler name or unified by
933 ODR rule). We already output warning about this, but if possible, output
934 extra information on how the types mismatch.
936 This is hard to do in general. We basically handle the common cases.
938 If LOC1 and LOC2 are meaningful locations, use it in the case the types
939 themselves do no thave one.*/
942 warn_types_mismatch (tree t1
, tree t2
, location_t loc1
, location_t loc2
)
944 /* Location of type is known only if it has TYPE_NAME and the name is
946 location_t loc_t1
= TYPE_NAME (t1
) && TREE_CODE (TYPE_NAME (t1
)) == TYPE_DECL
947 ? DECL_SOURCE_LOCATION (TYPE_NAME (t1
))
949 location_t loc_t2
= TYPE_NAME (t2
) && TREE_CODE (TYPE_NAME (t2
)) == TYPE_DECL
950 ? DECL_SOURCE_LOCATION (TYPE_NAME (t2
))
952 bool loc_t2_useful
= false;
954 /* With LTO it is a common case that the location of both types match.
955 See if T2 has a location that is different from T1. If so, we will
956 inform user about the location.
957 Do not consider the location passed to us in LOC1/LOC2 as those are
959 if (loc_t2
> BUILTINS_LOCATION
&& loc_t2
!= loc_t1
)
961 if (loc_t1
<= BUILTINS_LOCATION
)
962 loc_t2_useful
= true;
965 expanded_location xloc1
= expand_location (loc_t1
);
966 expanded_location xloc2
= expand_location (loc_t2
);
968 if (strcmp (xloc1
.file
, xloc2
.file
)
969 || xloc1
.line
!= xloc2
.line
970 || xloc1
.column
!= xloc2
.column
)
971 loc_t2_useful
= true;
975 if (loc_t1
<= BUILTINS_LOCATION
)
977 if (loc_t2
<= BUILTINS_LOCATION
)
980 location_t loc
= loc_t1
<= BUILTINS_LOCATION
? loc_t2
: loc_t1
;
982 /* It is a quite common bug to reference anonymous namespace type in
983 non-anonymous namespace class. */
984 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1
))
985 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1
)))
986 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2
))
987 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2
))))
989 if (type_with_linkage_p (TYPE_MAIN_VARIANT (t1
))
990 && !type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1
)))
993 std::swap (loc_t1
, loc_t2
);
995 gcc_assert (TYPE_NAME (t1
) && TYPE_NAME (t2
)
996 && TREE_CODE (TYPE_NAME (t1
)) == TYPE_DECL
997 && TREE_CODE (TYPE_NAME (t2
)) == TYPE_DECL
);
998 tree n1
= TYPE_NAME (t1
);
999 tree n2
= TYPE_NAME (t2
);
1000 if (TREE_CODE (n1
) == TYPE_DECL
)
1001 n1
= DECL_NAME (n1
);
1002 if (TREE_CODE (n2
) == TYPE_DECL
)
1003 n2
= DECL_NAME (n2
);
1004 /* Most of the time, the type names will match, do not be unnecesarily
1006 if (IDENTIFIER_POINTER (n1
) != IDENTIFIER_POINTER (n2
))
1008 "type %qT defined in anonymous namespace cannot match "
1009 "type %qT across the translation unit boundary",
1013 "type %qT defined in anonymous namespace cannot match "
1014 "across the translation unit boundary",
1018 "the incompatible type defined in another translation unit");
1021 tree mt1
= TYPE_MAIN_VARIANT (t1
);
1022 tree mt2
= TYPE_MAIN_VARIANT (t2
);
1023 /* If types have mangled ODR names and they are different, it is most
1024 informative to output those.
1025 This also covers types defined in different namespaces. */
1026 if (TYPE_NAME (mt1
) && TYPE_NAME (mt2
)
1027 && TREE_CODE (TYPE_NAME (mt1
)) == TYPE_DECL
1028 && TREE_CODE (TYPE_NAME (mt2
)) == TYPE_DECL
1029 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (mt1
))
1030 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (mt2
))
1031 && DECL_ASSEMBLER_NAME (TYPE_NAME (mt1
))
1032 != DECL_ASSEMBLER_NAME (TYPE_NAME (mt2
)))
1034 char *name1
= xstrdup (cplus_demangle
1035 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (mt1
))),
1036 DMGL_PARAMS
| DMGL_ANSI
| DMGL_TYPES
));
1037 char *name2
= cplus_demangle
1038 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (mt2
))),
1039 DMGL_PARAMS
| DMGL_ANSI
| DMGL_TYPES
);
1040 if (name1
&& name2
&& strcmp (name1
, name2
))
1043 "type name %qs should match type name %qs",
1047 "the incompatible type is defined here");
1053 /* A tricky case are compound types. Often they appear the same in source
1054 code and the mismatch is dragged in by type they are build from.
1055 Look for those differences in subtypes and try to be informative. In other
1056 cases just output nothing because the source code is probably different
1057 and in this case we already output a all necessary info. */
1058 if (!TYPE_NAME (t1
) || !TYPE_NAME (t2
))
1060 if (TREE_CODE (t1
) == TREE_CODE (t2
))
1062 if (TREE_CODE (t1
) == ARRAY_TYPE
1063 && COMPLETE_TYPE_P (t1
) && COMPLETE_TYPE_P (t2
))
1065 tree i1
= TYPE_DOMAIN (t1
);
1066 tree i2
= TYPE_DOMAIN (t2
);
1069 && TYPE_MAX_VALUE (i1
)
1070 && TYPE_MAX_VALUE (i2
)
1071 && !operand_equal_p (TYPE_MAX_VALUE (i1
),
1072 TYPE_MAX_VALUE (i2
), 0))
1075 "array types have different bounds");
1079 if ((POINTER_TYPE_P (t1
) || TREE_CODE (t1
) == ARRAY_TYPE
)
1080 && type_mismatch_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))
1081 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc_t1
, loc_t2
);
1082 else if (TREE_CODE (t1
) == METHOD_TYPE
1083 || TREE_CODE (t1
) == FUNCTION_TYPE
)
1085 tree parms1
= NULL
, parms2
= NULL
;
1088 if (type_mismatch_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))
1090 inform (loc
, "return value type mismatch");
1091 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc_t1
,
1095 if (prototype_p (t1
) && prototype_p (t2
))
1096 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
1098 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
),
1101 if (type_mismatch_p (TREE_VALUE (parms1
), TREE_VALUE (parms2
)))
1103 if (count
== 1 && TREE_CODE (t1
) == METHOD_TYPE
)
1105 "implicit this pointer type mismatch");
1108 "type mismatch in parameter %i",
1109 count
- (TREE_CODE (t1
) == METHOD_TYPE
));
1110 warn_types_mismatch (TREE_VALUE (parms1
),
1111 TREE_VALUE (parms2
),
1116 if (parms1
|| parms2
)
1119 "types have different parameter counts");
1127 if (types_odr_comparable (t1
, t2
)
1128 /* We make assign integers mangled names to be able to handle
1129 signed/unsigned chars. Accepting them here would however lead to
1130 confussing message like
1131 "type ‘const int’ itself violates the C++ One Definition Rule" */
1132 && TREE_CODE (t1
) != INTEGER_TYPE
1133 && types_same_for_odr (t1
, t2
))
1135 "type %qT itself violates the C++ One Definition Rule", t1
);
1136 /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1137 else if (TYPE_NAME (t1
) == TYPE_NAME (t2
)
1138 && TREE_CODE (t1
) == TREE_CODE (t2
) && !loc_t2_useful
)
1141 inform (loc_t1
, "type %qT should match type %qT",
1144 inform (loc_t2
, "the incompatible type is defined here");
1147 /* Return true if T should be ignored in TYPE_FIELDS for ODR comparsion. */
1150 skip_in_fields_list_p (tree t
)
1152 if (TREE_CODE (t
) != FIELD_DECL
)
1154 /* C++ FE introduces zero sized fields depending on -std setting, see
1157 && integer_zerop (DECL_SIZE (t
))
1158 && DECL_ARTIFICIAL (t
)
1159 && DECL_IGNORED_P (t
)
1165 /* Compare T1 and T2, report ODR violations if WARN is true and set
1166 WARNED to true if anything is reported. Return true if types match.
1167 If true is returned, the types are also compatible in the sense of
1168 gimple_canonical_types_compatible_p.
1169 If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1170 about the type if the type itself do not have location. */
1173 odr_types_equivalent_p (tree t1
, tree t2
, bool warn
, bool *warned
,
1174 hash_set
<type_pair
> *visited
,
1175 location_t loc1
, location_t loc2
)
1177 /* Check first for the obvious case of pointer identity. */
1181 /* Can't be the same type if the types don't have the same code. */
1182 if (TREE_CODE (t1
) != TREE_CODE (t2
))
1184 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1185 G_("a different type is defined in another translation unit"));
1189 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1
))
1190 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1
)))
1191 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2
))
1192 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2
))))
1194 /* We cannot trip this when comparing ODR types, only when trying to
1195 match different ODR derivations from different declarations.
1196 So WARN should be always false. */
1201 if (TREE_CODE (t1
) == ENUMERAL_TYPE
1202 && TYPE_VALUES (t1
) && TYPE_VALUES (t2
))
1205 for (v1
= TYPE_VALUES (t1
), v2
= TYPE_VALUES (t2
);
1206 v1
&& v2
; v1
= TREE_CHAIN (v1
), v2
= TREE_CHAIN (v2
))
1208 if (TREE_PURPOSE (v1
) != TREE_PURPOSE (v2
))
1210 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1211 G_("an enum with different value name"
1212 " is defined in another translation unit"));
1215 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
), 0))
1217 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1218 G_("an enum with different values is defined"
1219 " in another translation unit"));
1225 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1226 G_("an enum with mismatching number of values "
1227 "is defined in another translation unit"));
1232 /* Non-aggregate types can be handled cheaply. */
1233 if (INTEGRAL_TYPE_P (t1
)
1234 || SCALAR_FLOAT_TYPE_P (t1
)
1235 || FIXED_POINT_TYPE_P (t1
)
1236 || TREE_CODE (t1
) == VECTOR_TYPE
1237 || TREE_CODE (t1
) == COMPLEX_TYPE
1238 || TREE_CODE (t1
) == OFFSET_TYPE
1239 || POINTER_TYPE_P (t1
))
1241 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
))
1243 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1244 G_("a type with different precision is defined "
1245 "in another translation unit"));
1248 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
1250 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1251 G_("a type with different signedness is defined "
1252 "in another translation unit"));
1256 if (TREE_CODE (t1
) == INTEGER_TYPE
1257 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
1259 /* char WRT uint_8? */
1260 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1261 G_("a different type is defined in another "
1262 "translation unit"));
1266 /* For canonical type comparisons we do not want to build SCCs
1267 so we cannot compare pointed-to types. But we can, for now,
1268 require the same pointed-to type kind and match what
1269 useless_type_conversion_p would do. */
1270 if (POINTER_TYPE_P (t1
))
1272 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
1273 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
1275 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1276 G_("it is defined as a pointer in different address "
1277 "space in another translation unit"));
1281 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1282 visited
, loc1
, loc2
))
1284 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1285 G_("it is defined as a pointer to different type "
1286 "in another translation unit"));
1288 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
),
1294 if ((TREE_CODE (t1
) == VECTOR_TYPE
|| TREE_CODE (t1
) == COMPLEX_TYPE
)
1295 && !odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1296 visited
, loc1
, loc2
))
1298 /* Probably specific enough. */
1299 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1300 G_("a different type is defined "
1301 "in another translation unit"));
1303 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc1
, loc2
);
1307 /* Do type-specific comparisons. */
1308 else switch (TREE_CODE (t1
))
1312 /* Array types are the same if the element types are the same and
1313 the number of elements are the same. */
1314 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1315 visited
, loc1
, loc2
))
1317 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1318 G_("a different type is defined in another "
1319 "translation unit"));
1321 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc1
, loc2
);
1323 gcc_assert (TYPE_STRING_FLAG (t1
) == TYPE_STRING_FLAG (t2
));
1324 gcc_assert (TYPE_NONALIASED_COMPONENT (t1
)
1325 == TYPE_NONALIASED_COMPONENT (t2
));
1327 tree i1
= TYPE_DOMAIN (t1
);
1328 tree i2
= TYPE_DOMAIN (t2
);
1330 /* For an incomplete external array, the type domain can be
1331 NULL_TREE. Check this condition also. */
1332 if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
1333 return type_variants_equivalent_p (t1
, t2
);
1335 tree min1
= TYPE_MIN_VALUE (i1
);
1336 tree min2
= TYPE_MIN_VALUE (i2
);
1337 tree max1
= TYPE_MAX_VALUE (i1
);
1338 tree max2
= TYPE_MAX_VALUE (i2
);
1340 /* In C++, minimums should be always 0. */
1341 gcc_assert (min1
== min2
);
1342 if (!operand_equal_p (max1
, max2
, 0))
1344 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1345 G_("an array of different size is defined "
1346 "in another translation unit"));
1354 /* Function types are the same if the return type and arguments types
1356 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1357 visited
, loc1
, loc2
))
1359 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1360 G_("has different return value "
1361 "in another translation unit"));
1363 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc1
, loc2
);
1367 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
)
1368 || !prototype_p (t1
) || !prototype_p (t2
))
1369 return type_variants_equivalent_p (t1
, t2
);
1372 tree parms1
, parms2
;
1374 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
1376 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
1378 if (!odr_subtypes_equivalent_p
1379 (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
1380 visited
, loc1
, loc2
))
1382 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1383 G_("has different parameters in another "
1384 "translation unit"));
1386 warn_types_mismatch (TREE_VALUE (parms1
),
1387 TREE_VALUE (parms2
), loc1
, loc2
);
1392 if (parms1
|| parms2
)
1394 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1395 G_("has different parameters "
1396 "in another translation unit"));
1400 return type_variants_equivalent_p (t1
, t2
);
1405 case QUAL_UNION_TYPE
:
1409 /* For aggregate types, all the fields must be the same. */
1410 if (COMPLETE_TYPE_P (t1
) && COMPLETE_TYPE_P (t2
))
1412 if (TYPE_BINFO (t1
) && TYPE_BINFO (t2
)
1413 && polymorphic_type_binfo_p (TYPE_BINFO (t1
))
1414 != polymorphic_type_binfo_p (TYPE_BINFO (t2
)))
1416 if (polymorphic_type_binfo_p (TYPE_BINFO (t1
)))
1417 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1418 G_("a type defined in another translation unit "
1419 "is not polymorphic"));
1421 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1422 G_("a type defined in another translation unit "
1426 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
1428 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
1430 /* Skip non-fields. */
1431 while (f1
&& skip_in_fields_list_p (f1
))
1432 f1
= TREE_CHAIN (f1
);
1433 while (f2
&& skip_in_fields_list_p (f2
))
1434 f2
= TREE_CHAIN (f2
);
1437 if (DECL_VIRTUAL_P (f1
) != DECL_VIRTUAL_P (f2
))
1439 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1440 G_("a type with different virtual table pointers"
1441 " is defined in another translation unit"));
1444 if (DECL_ARTIFICIAL (f1
) != DECL_ARTIFICIAL (f2
))
1446 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1447 G_("a type with different bases is defined "
1448 "in another translation unit"));
1451 if (DECL_NAME (f1
) != DECL_NAME (f2
)
1452 && !DECL_ARTIFICIAL (f1
))
1454 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1455 G_("a field with different name is defined "
1456 "in another translation unit"));
1459 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1
),
1461 visited
, loc1
, loc2
))
1463 /* Do not warn about artificial fields and just go into
1464 generic field mismatch warning. */
1465 if (DECL_ARTIFICIAL (f1
))
1468 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1469 G_("a field of same name but different type "
1470 "is defined in another translation unit"));
1472 warn_types_mismatch (TREE_TYPE (f1
), TREE_TYPE (f2
), loc1
, loc2
);
1475 if (!gimple_compare_field_offset (f1
, f2
))
1477 /* Do not warn about artificial fields and just go into
1478 generic field mismatch warning. */
1479 if (DECL_ARTIFICIAL (f1
))
1481 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1482 G_("fields have different layout "
1483 "in another translation unit"));
1486 if (DECL_BIT_FIELD (f1
) != DECL_BIT_FIELD (f2
))
1488 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1489 G_("one field is a bitfield while the other "
1494 gcc_assert (DECL_NONADDRESSABLE_P (f1
)
1495 == DECL_NONADDRESSABLE_P (f2
));
1498 /* If one aggregate has more fields than the other, they
1499 are not the same. */
1502 if ((f1
&& DECL_VIRTUAL_P (f1
)) || (f2
&& DECL_VIRTUAL_P (f2
)))
1503 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1504 G_("a type with different virtual table pointers"
1505 " is defined in another translation unit"));
1506 else if ((f1
&& DECL_ARTIFICIAL (f1
))
1507 || (f2
&& DECL_ARTIFICIAL (f2
)))
1508 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1509 G_("a type with different bases is defined "
1510 "in another translation unit"));
1512 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1513 G_("a type with different number of fields "
1514 "is defined in another translation unit"));
1530 /* Those are better to come last as they are utterly uninformative. */
1531 if (TYPE_SIZE (t1
) && TYPE_SIZE (t2
)
1532 && !operand_equal_p (TYPE_SIZE (t1
), TYPE_SIZE (t2
), 0))
1534 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1535 G_("a type with different size "
1536 "is defined in another translation unit"));
1540 gcc_assert (!TYPE_SIZE_UNIT (t1
) || !TYPE_SIZE_UNIT (t2
)
1541 || operand_equal_p (TYPE_SIZE_UNIT (t1
),
1542 TYPE_SIZE_UNIT (t2
), 0));
1543 return type_variants_equivalent_p (t1
, t2
);
1546 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1549 odr_types_equivalent_p (tree type1
, tree type2
)
1551 gcc_checking_assert (odr_or_derived_type_p (type1
)
1552 && odr_or_derived_type_p (type2
));
1554 hash_set
<type_pair
> visited
;
1555 return odr_types_equivalent_p (type1
, type2
, false, NULL
,
1556 &visited
, UNKNOWN_LOCATION
, UNKNOWN_LOCATION
);
1559 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1560 from VAL->type. This may happen in LTO where tree merging did not merge
1561 all variants of the same type or due to ODR violation.
1563 Analyze and report ODR violations and add type to duplicate list.
1564 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1565 this is first time we see definition of a class return true so the
1566 base types are analyzed. */
1569 add_type_duplicate (odr_type val
, tree type
)
1571 bool build_bases
= false;
1572 bool prevail
= false;
1573 bool odr_must_violate
= false;
1575 if (!val
->types_set
)
1576 val
->types_set
= new hash_set
<tree
>;
1578 /* Chose polymorphic type as leader (this happens only in case of ODR
1580 if ((TREE_CODE (type
) == RECORD_TYPE
&& TYPE_BINFO (type
)
1581 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
1582 && (TREE_CODE (val
->type
) != RECORD_TYPE
|| !TYPE_BINFO (val
->type
)
1583 || !polymorphic_type_binfo_p (TYPE_BINFO (val
->type
))))
1588 /* Always prefer complete type to be the leader. */
1589 else if (!COMPLETE_TYPE_P (val
->type
) && COMPLETE_TYPE_P (type
))
1592 if (TREE_CODE (type
) == RECORD_TYPE
)
1593 build_bases
= TYPE_BINFO (type
);
1595 else if (COMPLETE_TYPE_P (val
->type
) && !COMPLETE_TYPE_P (type
))
1597 else if (TREE_CODE (val
->type
) == ENUMERAL_TYPE
1598 && TREE_CODE (type
) == ENUMERAL_TYPE
1599 && !TYPE_VALUES (val
->type
) && TYPE_VALUES (type
))
1601 else if (TREE_CODE (val
->type
) == RECORD_TYPE
1602 && TREE_CODE (type
) == RECORD_TYPE
1603 && TYPE_BINFO (type
) && !TYPE_BINFO (val
->type
))
1605 gcc_assert (!val
->bases
.length ());
1611 std::swap (val
->type
, type
);
1613 val
->types_set
->add (type
);
1618 gcc_checking_assert (can_be_name_hashed_p (type
)
1619 && can_be_name_hashed_p (val
->type
));
1622 bool base_mismatch
= false;
1624 bool warned
= false;
1625 hash_set
<type_pair
> visited
;
1627 gcc_assert (in_lto_p
);
1628 vec_safe_push (val
->types
, type
);
1630 /* If both are class types, compare the bases. */
1631 if (COMPLETE_TYPE_P (type
) && COMPLETE_TYPE_P (val
->type
)
1632 && TREE_CODE (val
->type
) == RECORD_TYPE
1633 && TREE_CODE (type
) == RECORD_TYPE
1634 && TYPE_BINFO (val
->type
) && TYPE_BINFO (type
))
1636 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type
))
1637 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val
->type
)))
1639 if (!flag_ltrans
&& !warned
&& !val
->odr_violated
)
1642 warn_odr (type
, val
->type
, NULL
, NULL
, !warned
, &warned
,
1643 "a type with the same name but different "
1644 "number of polymorphic bases is "
1645 "defined in another translation unit");
1648 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type
))
1649 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val
->type
)))
1650 extra_base
= BINFO_BASE_BINFO
1652 BINFO_N_BASE_BINFOS (TYPE_BINFO (val
->type
)));
1654 extra_base
= BINFO_BASE_BINFO
1655 (TYPE_BINFO (val
->type
),
1656 BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)));
1657 tree extra_base_type
= BINFO_TYPE (extra_base
);
1658 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type
)),
1659 "the extra base is defined here");
1662 base_mismatch
= true;
1665 for (i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
1667 tree base1
= BINFO_BASE_BINFO (TYPE_BINFO (type
), i
);
1668 tree base2
= BINFO_BASE_BINFO (TYPE_BINFO (val
->type
), i
);
1669 tree type1
= BINFO_TYPE (base1
);
1670 tree type2
= BINFO_TYPE (base2
);
1672 if (types_odr_comparable (type1
, type2
))
1674 if (!types_same_for_odr (type1
, type2
))
1675 base_mismatch
= true;
1678 if (!odr_types_equivalent_p (type1
, type2
))
1679 base_mismatch
= true;
1682 if (!warned
&& !val
->odr_violated
)
1684 warn_odr (type
, val
->type
, NULL
, NULL
,
1686 "a type with the same name but different base "
1687 "type is defined in another translation unit");
1689 warn_types_mismatch (type1
, type2
,
1690 UNKNOWN_LOCATION
, UNKNOWN_LOCATION
);
1694 if (BINFO_OFFSET (base1
) != BINFO_OFFSET (base2
))
1696 base_mismatch
= true;
1697 if (!warned
&& !val
->odr_violated
)
1698 warn_odr (type
, val
->type
, NULL
, NULL
,
1700 "a type with the same name but different base "
1701 "layout is defined in another translation unit");
1704 /* One of bases is not of complete type. */
1705 if (!TYPE_BINFO (type1
) != !TYPE_BINFO (type2
))
1707 /* If we have a polymorphic type info specified for TYPE1
1708 but not for TYPE2 we possibly missed a base when recording
1710 Be sure this does not happen. */
1711 if (TYPE_BINFO (type1
)
1712 && polymorphic_type_binfo_p (TYPE_BINFO (type1
))
1714 odr_must_violate
= true;
1717 /* One base is polymorphic and the other not.
1718 This ought to be diagnosed earlier, but do not ICE in the
1720 else if (TYPE_BINFO (type1
)
1721 && polymorphic_type_binfo_p (TYPE_BINFO (type1
))
1722 != polymorphic_type_binfo_p (TYPE_BINFO (type2
)))
1724 if (!warned
&& !val
->odr_violated
)
1725 warn_odr (type
, val
->type
, NULL
, NULL
,
1727 "a base of the type is polymorphic only in one "
1728 "translation unit");
1729 base_mismatch
= true;
1736 odr_violation_reported
= true;
1737 val
->odr_violated
= true;
1739 if (symtab
->dump_file
)
1741 fprintf (symtab
->dump_file
, "ODR base violation\n");
1743 print_node (symtab
->dump_file
, "", val
->type
, 0);
1744 putc ('\n',symtab
->dump_file
);
1745 print_node (symtab
->dump_file
, "", type
, 0);
1746 putc ('\n',symtab
->dump_file
);
1751 /* Next compare memory layout.
1752 The DECL_SOURCE_LOCATIONs in this invocation came from LTO streaming.
1753 We must apply the location cache to ensure that they are valid
1754 before we can pass them to odr_types_equivalent_p (PR lto/83121). */
1755 if (lto_location_cache::current_cache
)
1756 lto_location_cache::current_cache
->apply_location_cache ();
1757 /* As a special case we stream mangles names of integer types so we can see
1758 if they are believed to be same even though they have different
1759 representation. Avoid bogus warning on mismatches in these. */
1760 if (TREE_CODE (type
) != INTEGER_TYPE
1761 && TREE_CODE (val
->type
) != INTEGER_TYPE
1762 && !odr_types_equivalent_p (val
->type
, type
,
1763 !flag_ltrans
&& !val
->odr_violated
&& !warned
,
1765 DECL_SOURCE_LOCATION (TYPE_NAME (val
->type
)),
1766 DECL_SOURCE_LOCATION (TYPE_NAME (type
))))
1769 odr_violation_reported
= true;
1770 val
->odr_violated
= true;
1772 gcc_assert (val
->odr_violated
|| !odr_must_violate
);
1773 /* Sanity check that all bases will be build same way again. */
1775 && COMPLETE_TYPE_P (type
) && COMPLETE_TYPE_P (val
->type
)
1776 && TREE_CODE (val
->type
) == RECORD_TYPE
1777 && TREE_CODE (type
) == RECORD_TYPE
1778 && TYPE_BINFO (val
->type
) && TYPE_BINFO (type
)
1779 && !val
->odr_violated
1780 && !base_mismatch
&& val
->bases
.length ())
1782 unsigned int num_poly_bases
= 0;
1785 for (i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
1786 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1787 (TYPE_BINFO (type
), i
)))
1789 gcc_assert (num_poly_bases
== val
->bases
.length ());
1790 for (j
= 0, i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
));
1792 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1793 (TYPE_BINFO (type
), i
)))
1795 odr_type base
= get_odr_type
1797 (BINFO_BASE_BINFO (TYPE_BINFO (type
),
1800 gcc_assert (val
->bases
[j
] == base
);
1806 /* Regularize things a little. During LTO same types may come with
1807 different BINFOs. Either because their virtual table was
1808 not merged by tree merging and only later at decl merging or
1809 because one type comes with external vtable, while other
1810 with internal. We want to merge equivalent binfos to conserve
1811 memory and streaming overhead.
1813 The external vtables are more harmful: they contain references
1814 to external declarations of methods that may be defined in the
1815 merged LTO unit. For this reason we absolutely need to remove
1816 them and replace by internal variants. Not doing so will lead
1817 to incomplete answers from possible_polymorphic_call_targets.
1819 FIXME: disable for now; because ODR types are now build during
1820 streaming in, the variants do not need to be linked to the type,
1821 yet. We need to do the merging in cleanup pass to be implemented
1823 if (!flag_ltrans
&& merge
1825 && TREE_CODE (val
->type
) == RECORD_TYPE
1826 && TREE_CODE (type
) == RECORD_TYPE
1827 && TYPE_BINFO (val
->type
) && TYPE_BINFO (type
)
1828 && TYPE_MAIN_VARIANT (type
) == type
1829 && TYPE_MAIN_VARIANT (val
->type
) == val
->type
1830 && BINFO_VTABLE (TYPE_BINFO (val
->type
))
1831 && BINFO_VTABLE (TYPE_BINFO (type
)))
1833 tree master_binfo
= TYPE_BINFO (val
->type
);
1834 tree v1
= BINFO_VTABLE (master_binfo
);
1835 tree v2
= BINFO_VTABLE (TYPE_BINFO (type
));
1837 if (TREE_CODE (v1
) == POINTER_PLUS_EXPR
)
1839 gcc_assert (TREE_CODE (v2
) == POINTER_PLUS_EXPR
1840 && operand_equal_p (TREE_OPERAND (v1
, 1),
1841 TREE_OPERAND (v2
, 1), 0));
1842 v1
= TREE_OPERAND (TREE_OPERAND (v1
, 0), 0);
1843 v2
= TREE_OPERAND (TREE_OPERAND (v2
, 0), 0);
1845 gcc_assert (DECL_ASSEMBLER_NAME (v1
)
1846 == DECL_ASSEMBLER_NAME (v2
));
1848 if (DECL_EXTERNAL (v1
) && !DECL_EXTERNAL (v2
))
1852 set_type_binfo (val
->type
, TYPE_BINFO (type
));
1853 for (i
= 0; i
< val
->types
->length (); i
++)
1855 if (TYPE_BINFO ((*val
->types
)[i
])
1857 set_type_binfo ((*val
->types
)[i
], TYPE_BINFO (type
));
1859 BINFO_TYPE (TYPE_BINFO (type
)) = val
->type
;
1862 set_type_binfo (type
, master_binfo
);
1867 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
1870 obj_type_ref_class (const_tree ref
)
1872 gcc_checking_assert (TREE_CODE (ref
) == OBJ_TYPE_REF
);
1873 ref
= TREE_TYPE (ref
);
1874 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
1875 ref
= TREE_TYPE (ref
);
1876 /* We look for type THIS points to. ObjC also builds
1877 OBJ_TYPE_REF with non-method calls, Their first parameter
1878 ID however also corresponds to class type. */
1879 gcc_checking_assert (TREE_CODE (ref
) == METHOD_TYPE
1880 || TREE_CODE (ref
) == FUNCTION_TYPE
);
1881 ref
= TREE_VALUE (TYPE_ARG_TYPES (ref
));
1882 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
1883 tree ret
= TREE_TYPE (ref
);
1884 if (!in_lto_p
&& !TYPE_STRUCTURAL_EQUALITY_P (ret
))
1885 ret
= TYPE_CANONICAL (ret
);
1887 ret
= get_odr_type (ret
)->type
;
1891 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1892 possibly new entry. */
1895 get_odr_type (tree type
, bool insert
)
1897 odr_type_d
**slot
= NULL
;
1898 odr_type val
= NULL
;
1900 bool build_bases
= false;
1901 bool insert_to_odr_array
= false;
1904 type
= TYPE_MAIN_VARIANT (type
);
1905 if (!in_lto_p
&& !TYPE_STRUCTURAL_EQUALITY_P (type
))
1906 type
= TYPE_CANONICAL (type
);
1908 gcc_checking_assert (can_be_name_hashed_p (type
));
1910 hash
= hash_odr_name (type
);
1911 slot
= odr_hash
->find_slot_with_hash (type
, hash
,
1912 insert
? INSERT
: NO_INSERT
);
1917 /* See if we already have entry for type. */
1922 if (val
->type
!= type
&& insert
1923 && (!val
->types_set
|| !val
->types_set
->add (type
)))
1924 build_bases
= add_type_duplicate (val
, type
);
1928 val
= ggc_cleared_alloc
<odr_type_d
> ();
1931 val
->derived_types
= vNULL
;
1932 if (type_with_linkage_p (type
))
1933 val
->anonymous_namespace
= type_in_anonymous_namespace_p (type
);
1935 val
->anonymous_namespace
= 0;
1936 build_bases
= COMPLETE_TYPE_P (val
->type
);
1937 insert_to_odr_array
= true;
1941 if (build_bases
&& TREE_CODE (type
) == RECORD_TYPE
&& TYPE_BINFO (type
)
1942 && type_with_linkage_p (type
)
1943 && type
== TYPE_MAIN_VARIANT (type
))
1945 tree binfo
= TYPE_BINFO (type
);
1948 gcc_assert (BINFO_TYPE (TYPE_BINFO (val
->type
)) == type
);
1950 val
->all_derivations_known
= type_all_derivations_known_p (type
);
1951 for (i
= 0; i
< BINFO_N_BASE_BINFOS (binfo
); i
++)
1952 /* For now record only polymorphic types. other are
1953 pointless for devirtualization and we cannot precisely
1954 determine ODR equivalency of these during LTO. */
1955 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo
, i
)))
1957 tree base_type
= BINFO_TYPE (BINFO_BASE_BINFO (binfo
, i
));
1958 odr_type base
= get_odr_type (base_type
, true);
1959 gcc_assert (TYPE_MAIN_VARIANT (base_type
) == base_type
);
1960 base
->derived_types
.safe_push (val
);
1961 val
->bases
.safe_push (base
);
1962 if (base
->id
> base_id
)
1966 /* Ensure that type always appears after bases. */
1967 if (insert_to_odr_array
)
1970 val
->id
= odr_types
.length ();
1971 vec_safe_push (odr_types_ptr
, val
);
1973 else if (base_id
> val
->id
)
1975 odr_types
[val
->id
] = 0;
1976 /* Be sure we did not recorded any derived types; these may need
1978 gcc_assert (val
->derived_types
.length() == 0);
1979 val
->id
= odr_types
.length ();
1980 vec_safe_push (odr_types_ptr
, val
);
1985 /* Return type that in ODR type hash prevailed TYPE. Be careful and punt
1986 on ODR violations. */
1989 prevailing_odr_type (tree type
)
1991 odr_type t
= get_odr_type (type
, false);
1992 if (!t
|| t
->odr_violated
)
1997 /* Set tbaa_enabled flag for TYPE. */
2000 enable_odr_based_tbaa (tree type
)
2002 odr_type t
= get_odr_type (type
, true);
2003 t
->tbaa_enabled
= true;
2006 /* True if canonical type of TYPE is determined using ODR name. */
2009 odr_based_tbaa_p (const_tree type
)
2011 if (!RECORD_OR_UNION_TYPE_P (type
))
2013 odr_type t
= get_odr_type (const_cast <tree
> (type
), false);
2014 if (!t
|| !t
->tbaa_enabled
)
2019 /* Set TYPE_CANONICAL of type and all its variants and duplicates
2023 set_type_canonical_for_odr_type (tree type
, tree canonical
)
2025 odr_type t
= get_odr_type (type
, false);
2029 for (tree t2
= t
->type
; t2
; t2
= TYPE_NEXT_VARIANT (t2
))
2030 TYPE_CANONICAL (t2
) = canonical
;
2032 FOR_EACH_VEC_ELT (*t
->types
, i
, tt
)
2033 for (tree t2
= tt
; t2
; t2
= TYPE_NEXT_VARIANT (t2
))
2034 TYPE_CANONICAL (t2
) = canonical
;
2037 /* Return true if we reported some ODR violation on TYPE. */
2040 odr_type_violation_reported_p (tree type
)
2042 return get_odr_type (type
, false)->odr_violated
;
2045 /* Add TYPE od ODR type hash. */
2048 register_odr_type (tree type
)
2051 odr_hash
= new odr_hash_type (23);
2052 if (type
== TYPE_MAIN_VARIANT (type
))
2054 /* To get ODR warings right, first register all sub-types. */
2055 if (RECORD_OR_UNION_TYPE_P (type
)
2056 && COMPLETE_TYPE_P (type
))
2058 /* Limit recursion on types which are already registered. */
2059 odr_type ot
= get_odr_type (type
, false);
2061 && (ot
->type
== type
2063 && ot
->types_set
->contains (type
))))
2065 for (tree f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
2066 if (TREE_CODE (f
) == FIELD_DECL
)
2068 tree subtype
= TREE_TYPE (f
);
2070 while (TREE_CODE (subtype
) == ARRAY_TYPE
)
2071 subtype
= TREE_TYPE (subtype
);
2072 if (type_with_linkage_p (TYPE_MAIN_VARIANT (subtype
)))
2073 register_odr_type (TYPE_MAIN_VARIANT (subtype
));
2075 if (TYPE_BINFO (type
))
2076 for (unsigned int i
= 0;
2077 i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
2078 register_odr_type (BINFO_TYPE (BINFO_BASE_BINFO
2079 (TYPE_BINFO (type
), i
)));
2081 get_odr_type (type
, true);
2085 /* Return true if type is known to have no derivations. */
2088 type_known_to_have_no_derivations_p (tree t
)
2090 return (type_all_derivations_known_p (t
)
2091 && (TYPE_FINAL_P (t
)
2093 && !get_odr_type (t
, true)->derived_types
.length())));
2096 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2097 recursive printing. */
2100 dump_odr_type (FILE *f
, odr_type t
, int indent
=0)
2103 fprintf (f
, "%*s type %i: ", indent
* 2, "", t
->id
);
2104 print_generic_expr (f
, t
->type
, TDF_SLIM
);
2105 fprintf (f
, "%s", t
->anonymous_namespace
? " (anonymous namespace)":"");
2106 fprintf (f
, "%s\n", t
->all_derivations_known
? " (derivations known)":"");
2107 if (TYPE_NAME (t
->type
))
2109 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t
->type
)))
2110 fprintf (f
, "%*s mangled name: %s\n", indent
* 2, "",
2112 (DECL_ASSEMBLER_NAME (TYPE_NAME (t
->type
))));
2114 if (t
->bases
.length ())
2116 fprintf (f
, "%*s base odr type ids: ", indent
* 2, "");
2117 for (i
= 0; i
< t
->bases
.length (); i
++)
2118 fprintf (f
, " %i", t
->bases
[i
]->id
);
2121 if (t
->derived_types
.length ())
2123 fprintf (f
, "%*s derived types:\n", indent
* 2, "");
2124 for (i
= 0; i
< t
->derived_types
.length (); i
++)
2125 dump_odr_type (f
, t
->derived_types
[i
], indent
+ 1);
2130 /* Dump the type inheritance graph. */
2133 dump_type_inheritance_graph (FILE *f
)
2136 unsigned int num_all_types
= 0, num_types
= 0, num_duplicates
= 0;
2139 fprintf (f
, "\n\nType inheritance graph:\n");
2140 for (i
= 0; i
< odr_types
.length (); i
++)
2142 if (odr_types
[i
] && odr_types
[i
]->bases
.length () == 0)
2143 dump_odr_type (f
, odr_types
[i
]);
2145 for (i
= 0; i
< odr_types
.length (); i
++)
2151 if (!odr_types
[i
]->types
|| !odr_types
[i
]->types
->length ())
2154 /* To aid ODR warnings we also mangle integer constants but do
2155 not consinder duplicates there. */
2156 if (TREE_CODE (odr_types
[i
]->type
) == INTEGER_TYPE
)
2159 /* It is normal to have one duplicate and one normal variant. */
2160 if (odr_types
[i
]->types
->length () == 1
2161 && COMPLETE_TYPE_P (odr_types
[i
]->type
)
2162 && !COMPLETE_TYPE_P ((*odr_types
[i
]->types
)[0]))
2168 fprintf (f
, "Duplicate tree types for odr type %i\n", i
);
2169 print_node (f
, "", odr_types
[i
]->type
, 0);
2170 print_node (f
, "", TYPE_NAME (odr_types
[i
]->type
), 0);
2172 for (j
= 0; j
< odr_types
[i
]->types
->length (); j
++)
2176 fprintf (f
, "duplicate #%i\n", j
);
2177 print_node (f
, "", (*odr_types
[i
]->types
)[j
], 0);
2178 t
= (*odr_types
[i
]->types
)[j
];
2179 while (TYPE_P (t
) && TYPE_CONTEXT (t
))
2181 t
= TYPE_CONTEXT (t
);
2182 print_node (f
, "", t
, 0);
2184 print_node (f
, "", TYPE_NAME ((*odr_types
[i
]->types
)[j
]), 0);
2188 fprintf (f
, "Out of %i types there are %i types with duplicates; "
2189 "%i duplicates overall\n", num_all_types
, num_types
, num_duplicates
);
2192 /* Save some WPA->ltrans streaming by freeing stuff needed only for good
2194 We free TYPE_VALUES of enums and also make TYPE_DECLs to not point back
2195 to the type (which is needed to keep them in the same SCC and preserve
2196 location information to output warnings) and subsequently we make all
2197 TYPE_DECLS of same assembler name equivalent. */
2200 free_odr_warning_data ()
2202 static bool odr_data_freed
= false;
2204 if (odr_data_freed
|| !flag_wpa
|| !odr_types_ptr
)
2207 odr_data_freed
= true;
2209 for (unsigned int i
= 0; i
< odr_types
.length (); i
++)
2212 tree t
= odr_types
[i
]->type
;
2214 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
2215 TYPE_VALUES (t
) = NULL
;
2216 TREE_TYPE (TYPE_NAME (t
)) = void_type_node
;
2218 if (odr_types
[i
]->types
)
2219 for (unsigned int j
= 0; j
< odr_types
[i
]->types
->length (); j
++)
2221 tree td
= (*odr_types
[i
]->types
)[j
];
2223 if (TREE_CODE (td
) == ENUMERAL_TYPE
)
2224 TYPE_VALUES (td
) = NULL
;
2225 TYPE_NAME (td
) = TYPE_NAME (t
);
2228 odr_data_freed
= true;
2231 /* Initialize IPA devirt and build inheritance tree graph. */
2234 build_type_inheritance_graph (void)
2236 struct symtab_node
*n
;
2237 FILE *inheritance_dump_file
;
2242 free_odr_warning_data ();
2245 timevar_push (TV_IPA_INHERITANCE
);
2246 inheritance_dump_file
= dump_begin (TDI_inheritance
, &flags
);
2247 odr_hash
= new odr_hash_type (23);
2249 /* We reconstruct the graph starting of types of all methods seen in the
2252 if (is_a
<cgraph_node
*> (n
)
2253 && DECL_VIRTUAL_P (n
->decl
)
2254 && n
->real_symbol_p ())
2255 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n
->decl
)), true);
2257 /* Look also for virtual tables of types that do not define any methods.
2259 We need it in a case where class B has virtual base of class A
2260 re-defining its virtual method and there is class C with no virtual
2261 methods with B as virtual base.
2263 Here we output B's virtual method in two variant - for non-virtual
2264 and virtual inheritance. B's virtual table has non-virtual version,
2265 while C's has virtual.
2267 For this reason we need to know about C in order to include both
2268 variants of B. More correctly, record_target_from_binfo should
2269 add both variants of the method when walking B, but we have no
2270 link in between them.
2272 We rely on fact that either the method is exported and thus we
2273 assume it is called externally or C is in anonymous namespace and
2274 thus we will see the vtable. */
2276 else if (is_a
<varpool_node
*> (n
)
2277 && DECL_VIRTUAL_P (n
->decl
)
2278 && TREE_CODE (DECL_CONTEXT (n
->decl
)) == RECORD_TYPE
2279 && TYPE_BINFO (DECL_CONTEXT (n
->decl
))
2280 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n
->decl
))))
2281 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n
->decl
)), true);
2282 if (inheritance_dump_file
)
2284 dump_type_inheritance_graph (inheritance_dump_file
);
2285 dump_end (TDI_inheritance
, inheritance_dump_file
);
2287 free_odr_warning_data ();
2288 timevar_pop (TV_IPA_INHERITANCE
);
2291 /* Return true if N has reference from live virtual table
2292 (and thus can be a destination of polymorphic call).
2293 Be conservatively correct when callgraph is not built or
2294 if the method may be referred externally. */
2297 referenced_from_vtable_p (struct cgraph_node
*node
)
2300 struct ipa_ref
*ref
;
2303 if (node
->externally_visible
2304 || DECL_EXTERNAL (node
->decl
)
2305 || node
->used_from_other_partition
)
2308 /* Keep this test constant time.
2309 It is unlikely this can happen except for the case where speculative
2310 devirtualization introduced many speculative edges to this node.
2311 In this case the target is very likely alive anyway. */
2312 if (node
->ref_list
.referring
.length () > 100)
2315 /* We need references built. */
2316 if (symtab
->state
<= CONSTRUCTION
)
2319 for (i
= 0; node
->iterate_referring (i
, ref
); i
++)
2320 if ((ref
->use
== IPA_REF_ALIAS
2321 && referenced_from_vtable_p (dyn_cast
<cgraph_node
*> (ref
->referring
)))
2322 || (ref
->use
== IPA_REF_ADDR
2323 && VAR_P (ref
->referring
->decl
)
2324 && DECL_VIRTUAL_P (ref
->referring
->decl
)))
2332 /* Return if TARGET is cxa_pure_virtual. */
2335 is_cxa_pure_virtual_p (tree target
)
2337 return target
&& TREE_CODE (TREE_TYPE (target
)) != METHOD_TYPE
2338 && DECL_NAME (target
)
2339 && id_equal (DECL_NAME (target
),
2340 "__cxa_pure_virtual");
2343 /* If TARGET has associated node, record it in the NODES array.
2344 CAN_REFER specify if program can refer to the target directly.
2345 if TARGET is unknown (NULL) or it cannot be inserted (for example because
2346 its body was already removed and there is no way to refer to it), clear
2350 maybe_record_node (vec
<cgraph_node
*> &nodes
,
2351 tree target
, hash_set
<tree
> *inserted
,
2355 struct cgraph_node
*target_node
, *alias_target
;
2356 enum availability avail
;
2357 bool pure_virtual
= is_cxa_pure_virtual_p (target
);
2359 /* __builtin_unreachable do not need to be added into
2360 list of targets; the runtime effect of calling them is undefined.
2361 Only "real" virtual methods should be accounted. */
2362 if (target
&& TREE_CODE (TREE_TYPE (target
)) != METHOD_TYPE
&& !pure_virtual
)
2367 /* The only case when method of anonymous namespace becomes unreferable
2368 is when we completely optimized it out. */
2371 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target
)))
2379 target_node
= cgraph_node::get (target
);
2381 /* Prefer alias target over aliases, so we do not get confused by
2385 alias_target
= target_node
->ultimate_alias_target (&avail
);
2386 if (target_node
!= alias_target
2387 && avail
>= AVAIL_AVAILABLE
2388 && target_node
->get_availability ())
2389 target_node
= alias_target
;
2392 /* Method can only be called by polymorphic call if any
2393 of vtables referring to it are alive.
2395 While this holds for non-anonymous functions, too, there are
2396 cases where we want to keep them in the list; for example
2397 inline functions with -fno-weak are static, but we still
2398 may devirtualize them when instance comes from other unit.
2399 The same holds for LTO.
2401 Currently we ignore these functions in speculative devirtualization.
2402 ??? Maybe it would make sense to be more aggressive for LTO even
2406 && type_in_anonymous_namespace_p (DECL_CONTEXT (target
))
2408 || !referenced_from_vtable_p (target_node
)))
2410 /* See if TARGET is useful function we can deal with. */
2411 else if (target_node
!= NULL
2412 && (TREE_PUBLIC (target
)
2413 || DECL_EXTERNAL (target
)
2414 || target_node
->definition
)
2415 && target_node
->real_symbol_p ())
2417 gcc_assert (!target_node
->global
.inlined_to
);
2418 gcc_assert (target_node
->real_symbol_p ());
2419 /* When sanitizing, do not assume that __cxa_pure_virtual is not called
2420 by valid program. */
2421 if (flag_sanitize
& SANITIZE_UNREACHABLE
)
2423 /* Only add pure virtual if it is the only possible target. This way
2424 we will preserve the diagnostics about pure virtual called in many
2425 cases without disabling optimization in other. */
2426 else if (pure_virtual
)
2428 if (nodes
.length ())
2431 /* If we found a real target, take away cxa_pure_virtual. */
2432 else if (!pure_virtual
&& nodes
.length () == 1
2433 && is_cxa_pure_virtual_p (nodes
[0]->decl
))
2435 if (pure_virtual
&& nodes
.length ())
2437 if (!inserted
->add (target
))
2439 cached_polymorphic_call_targets
->add (target_node
);
2440 nodes
.safe_push (target_node
);
2443 else if (!completep
)
2445 /* We have definition of __cxa_pure_virtual that is not accessible (it is
2446 optimized out or partitioned to other unit) so we cannot add it. When
2447 not sanitizing, there is nothing to do.
2448 Otherwise declare the list incomplete. */
2449 else if (pure_virtual
)
2451 if (flag_sanitize
& SANITIZE_UNREACHABLE
)
2454 else if (flag_ltrans
2455 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target
)))
2459 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2460 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2461 method in vtable and insert method to NODES array
2462 or BASES_TO_CONSIDER if this array is non-NULL.
2463 Otherwise recurse to base BINFOs.
2464 This matches what get_binfo_at_offset does, but with offset
2467 TYPE_BINFOS is a stack of BINFOS of types with defined
2468 virtual table seen on way from class type to BINFO.
2470 MATCHED_VTABLES tracks virtual tables we already did lookup
2471 for virtual function in. INSERTED tracks nodes we already
2474 ANONYMOUS is true if BINFO is part of anonymous namespace.
2476 Clear COMPLETEP when we hit unreferable target.
2480 record_target_from_binfo (vec
<cgraph_node
*> &nodes
,
2481 vec
<tree
> *bases_to_consider
,
2484 vec
<tree
> &type_binfos
,
2485 HOST_WIDE_INT otr_token
,
2487 HOST_WIDE_INT offset
,
2488 hash_set
<tree
> *inserted
,
2489 hash_set
<tree
> *matched_vtables
,
2493 tree type
= BINFO_TYPE (binfo
);
2498 if (BINFO_VTABLE (binfo
))
2499 type_binfos
.safe_push (binfo
);
2500 if (types_same_for_odr (type
, outer_type
))
2503 tree type_binfo
= NULL
;
2505 /* Look up BINFO with virtual table. For normal types it is always last
2507 for (i
= type_binfos
.length () - 1; i
>= 0; i
--)
2508 if (BINFO_OFFSET (type_binfos
[i
]) == BINFO_OFFSET (binfo
))
2510 type_binfo
= type_binfos
[i
];
2513 if (BINFO_VTABLE (binfo
))
2515 /* If this is duplicated BINFO for base shared by virtual inheritance,
2516 we may not have its associated vtable. This is not a problem, since
2517 we will walk it on the other path. */
2520 tree inner_binfo
= get_binfo_at_offset (type_binfo
,
2524 gcc_assert (odr_violation_reported
);
2527 /* For types in anonymous namespace first check if the respective vtable
2528 is alive. If not, we know the type can't be called. */
2529 if (!flag_ltrans
&& anonymous
)
2531 tree vtable
= BINFO_VTABLE (inner_binfo
);
2532 varpool_node
*vnode
;
2534 if (TREE_CODE (vtable
) == POINTER_PLUS_EXPR
)
2535 vtable
= TREE_OPERAND (TREE_OPERAND (vtable
, 0), 0);
2536 vnode
= varpool_node::get (vtable
);
2537 if (!vnode
|| !vnode
->definition
)
2540 gcc_assert (inner_binfo
);
2541 if (bases_to_consider
2542 ? !matched_vtables
->contains (BINFO_VTABLE (inner_binfo
))
2543 : !matched_vtables
->add (BINFO_VTABLE (inner_binfo
)))
2546 tree target
= gimple_get_virt_method_for_binfo (otr_token
,
2549 if (!bases_to_consider
)
2550 maybe_record_node (nodes
, target
, inserted
, can_refer
, completep
);
2551 /* Destructors are never called via construction vtables. */
2552 else if (!target
|| !DECL_CXX_DESTRUCTOR_P (target
))
2553 bases_to_consider
->safe_push (target
);
2559 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
2560 /* Walking bases that have no virtual method is pointless exercise. */
2561 if (polymorphic_type_binfo_p (base_binfo
))
2562 record_target_from_binfo (nodes
, bases_to_consider
, base_binfo
, otr_type
,
2564 otr_token
, outer_type
, offset
, inserted
,
2565 matched_vtables
, anonymous
, completep
);
2566 if (BINFO_VTABLE (binfo
))
2570 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2571 of TYPE, insert them to NODES, recurse into derived nodes.
2572 INSERTED is used to avoid duplicate insertions of methods into NODES.
2573 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2574 Clear COMPLETEP if unreferable target is found.
2576 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2577 all cases where BASE_SKIPPED is true (because the base is abstract
2581 possible_polymorphic_call_targets_1 (vec
<cgraph_node
*> &nodes
,
2582 hash_set
<tree
> *inserted
,
2583 hash_set
<tree
> *matched_vtables
,
2586 HOST_WIDE_INT otr_token
,
2588 HOST_WIDE_INT offset
,
2590 vec
<tree
> &bases_to_consider
,
2591 bool consider_construction
)
2593 tree binfo
= TYPE_BINFO (type
->type
);
2595 auto_vec
<tree
, 8> type_binfos
;
2596 bool possibly_instantiated
= type_possibly_instantiated_p (type
->type
);
2598 /* We may need to consider types w/o instances because of possible derived
2599 types using their methods either directly or via construction vtables.
2600 We are safe to skip them when all derivations are known, since we will
2602 This is done by recording them to BASES_TO_CONSIDER array. */
2603 if (possibly_instantiated
|| consider_construction
)
2605 record_target_from_binfo (nodes
,
2606 (!possibly_instantiated
2607 && type_all_derivations_known_p (type
->type
))
2608 ? &bases_to_consider
: NULL
,
2609 binfo
, otr_type
, type_binfos
, otr_token
,
2611 inserted
, matched_vtables
,
2612 type
->anonymous_namespace
, completep
);
2614 for (i
= 0; i
< type
->derived_types
.length (); i
++)
2615 possible_polymorphic_call_targets_1 (nodes
, inserted
,
2618 type
->derived_types
[i
],
2619 otr_token
, outer_type
, offset
, completep
,
2620 bases_to_consider
, consider_construction
);
2623 /* Cache of queries for polymorphic call targets.
2625 Enumerating all call targets may get expensive when there are many
2626 polymorphic calls in the program, so we memoize all the previous
2627 queries and avoid duplicated work. */
2629 struct polymorphic_call_target_d
2631 HOST_WIDE_INT otr_token
;
2632 ipa_polymorphic_call_context context
;
2634 vec
<cgraph_node
*> targets
;
2637 unsigned int n_odr_types
;
2642 /* Polymorphic call target cache helpers. */
2644 struct polymorphic_call_target_hasher
2645 : pointer_hash
<polymorphic_call_target_d
>
2647 static inline hashval_t
hash (const polymorphic_call_target_d
*);
2648 static inline bool equal (const polymorphic_call_target_d
*,
2649 const polymorphic_call_target_d
*);
2650 static inline void remove (polymorphic_call_target_d
*);
2653 /* Return the computed hashcode for ODR_QUERY. */
2656 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d
*odr_query
)
2658 inchash::hash
hstate (odr_query
->otr_token
);
2660 hstate
.add_hwi (odr_query
->type
->id
);
2661 hstate
.merge_hash (TYPE_UID (odr_query
->context
.outer_type
));
2662 hstate
.add_hwi (odr_query
->context
.offset
);
2663 hstate
.add_hwi (odr_query
->n_odr_types
);
2665 if (odr_query
->context
.speculative_outer_type
)
2667 hstate
.merge_hash (TYPE_UID (odr_query
->context
.speculative_outer_type
));
2668 hstate
.add_hwi (odr_query
->context
.speculative_offset
);
2670 hstate
.add_flag (odr_query
->speculative
);
2671 hstate
.add_flag (odr_query
->context
.maybe_in_construction
);
2672 hstate
.add_flag (odr_query
->context
.maybe_derived_type
);
2673 hstate
.add_flag (odr_query
->context
.speculative_maybe_derived_type
);
2674 hstate
.commit_flag ();
2675 return hstate
.end ();
2678 /* Compare cache entries T1 and T2. */
2681 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d
*t1
,
2682 const polymorphic_call_target_d
*t2
)
2684 return (t1
->type
== t2
->type
&& t1
->otr_token
== t2
->otr_token
2685 && t1
->speculative
== t2
->speculative
2686 && t1
->context
.offset
== t2
->context
.offset
2687 && t1
->context
.speculative_offset
== t2
->context
.speculative_offset
2688 && t1
->context
.outer_type
== t2
->context
.outer_type
2689 && t1
->context
.speculative_outer_type
== t2
->context
.speculative_outer_type
2690 && t1
->context
.maybe_in_construction
2691 == t2
->context
.maybe_in_construction
2692 && t1
->context
.maybe_derived_type
== t2
->context
.maybe_derived_type
2693 && (t1
->context
.speculative_maybe_derived_type
2694 == t2
->context
.speculative_maybe_derived_type
)
2695 /* Adding new type may affect outcome of target search. */
2696 && t1
->n_odr_types
== t2
->n_odr_types
);
2699 /* Remove entry in polymorphic call target cache hash. */
2702 polymorphic_call_target_hasher::remove (polymorphic_call_target_d
*v
)
2704 v
->targets
.release ();
2708 /* Polymorphic call target query cache. */
2710 typedef hash_table
<polymorphic_call_target_hasher
>
2711 polymorphic_call_target_hash_type
;
2712 static polymorphic_call_target_hash_type
*polymorphic_call_target_hash
;
2714 /* Destroy polymorphic call target query cache. */
2717 free_polymorphic_call_targets_hash ()
2719 if (cached_polymorphic_call_targets
)
2721 delete polymorphic_call_target_hash
;
2722 polymorphic_call_target_hash
= NULL
;
2723 delete cached_polymorphic_call_targets
;
2724 cached_polymorphic_call_targets
= NULL
;
2728 /* Force rebuilding type inheritance graph from scratch.
2729 This is use to make sure that we do not keep references to types
2730 which was not visible to free_lang_data. */
2733 rebuild_type_inheritance_graph ()
2739 odr_types_ptr
= NULL
;
2740 free_polymorphic_call_targets_hash ();
2743 /* When virtual function is removed, we may need to flush the cache. */
2746 devirt_node_removal_hook (struct cgraph_node
*n
, void *d ATTRIBUTE_UNUSED
)
2748 if (cached_polymorphic_call_targets
2749 && cached_polymorphic_call_targets
->contains (n
))
2750 free_polymorphic_call_targets_hash ();
2753 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2756 subbinfo_with_vtable_at_offset (tree binfo
, unsigned HOST_WIDE_INT offset
,
2759 tree v
= BINFO_VTABLE (binfo
);
2762 unsigned HOST_WIDE_INT this_offset
;
2766 if (!vtable_pointer_value_to_vtable (v
, &v
, &this_offset
))
2769 if (offset
== this_offset
2770 && DECL_ASSEMBLER_NAME (v
) == DECL_ASSEMBLER_NAME (vtable
))
2774 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
2775 if (polymorphic_type_binfo_p (base_binfo
))
2777 base_binfo
= subbinfo_with_vtable_at_offset (base_binfo
, offset
, vtable
);
2784 /* T is known constant value of virtual table pointer.
2785 Store virtual table to V and its offset to OFFSET.
2786 Return false if T does not look like virtual table reference. */
2789 vtable_pointer_value_to_vtable (const_tree t
, tree
*v
,
2790 unsigned HOST_WIDE_INT
*offset
)
2792 /* We expect &MEM[(void *)&virtual_table + 16B].
2793 We obtain object's BINFO from the context of the virtual table.
2794 This one contains pointer to virtual table represented via
2795 POINTER_PLUS_EXPR. Verify that this pointer matches what
2796 we propagated through.
2798 In the case of virtual inheritance, the virtual tables may
2799 be nested, i.e. the offset may be different from 16 and we may
2800 need to dive into the type representation. */
2801 if (TREE_CODE (t
) == ADDR_EXPR
2802 && TREE_CODE (TREE_OPERAND (t
, 0)) == MEM_REF
2803 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == ADDR_EXPR
2804 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
2805 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 0))
2807 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2808 (TREE_OPERAND (t
, 0), 0), 0)))
2810 *v
= TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 0);
2811 *offset
= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t
, 0), 1));
2815 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2816 We need to handle it when T comes from static variable initializer or
2818 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
2820 *offset
= tree_to_uhwi (TREE_OPERAND (t
, 1));
2821 t
= TREE_OPERAND (t
, 0);
2826 if (TREE_CODE (t
) != ADDR_EXPR
)
2828 *v
= TREE_OPERAND (t
, 0);
2832 /* T is known constant value of virtual table pointer. Return BINFO of the
2836 vtable_pointer_value_to_binfo (const_tree t
)
2839 unsigned HOST_WIDE_INT offset
;
2841 if (!vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2844 /* FIXME: for stores of construction vtables we return NULL,
2845 because we do not have BINFO for those. Eventually we should fix
2846 our representation to allow this case to be handled, too.
2847 In the case we see store of BINFO we however may assume
2848 that standard folding will be able to cope with it. */
2849 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable
)),
2853 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2854 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2855 and insert them in NODES.
2857 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2860 record_targets_from_bases (tree otr_type
,
2861 HOST_WIDE_INT otr_token
,
2863 HOST_WIDE_INT offset
,
2864 vec
<cgraph_node
*> &nodes
,
2865 hash_set
<tree
> *inserted
,
2866 hash_set
<tree
> *matched_vtables
,
2871 HOST_WIDE_INT pos
, size
;
2875 if (types_same_for_odr (outer_type
, otr_type
))
2878 for (fld
= TYPE_FIELDS (outer_type
); fld
; fld
= DECL_CHAIN (fld
))
2880 if (TREE_CODE (fld
) != FIELD_DECL
)
2883 pos
= int_bit_position (fld
);
2884 size
= tree_to_shwi (DECL_SIZE (fld
));
2885 if (pos
<= offset
&& (pos
+ size
) > offset
2886 /* Do not get confused by zero sized bases. */
2887 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld
))))
2890 /* Within a class type we should always find corresponding fields. */
2891 gcc_assert (fld
&& TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
);
2893 /* Nonbase types should have been stripped by outer_class_type. */
2894 gcc_assert (DECL_ARTIFICIAL (fld
));
2896 outer_type
= TREE_TYPE (fld
);
2899 base_binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
),
2903 gcc_assert (odr_violation_reported
);
2906 gcc_assert (base_binfo
);
2907 if (!matched_vtables
->add (BINFO_VTABLE (base_binfo
)))
2910 tree target
= gimple_get_virt_method_for_binfo (otr_token
,
2913 if (!target
|| ! DECL_CXX_DESTRUCTOR_P (target
))
2914 maybe_record_node (nodes
, target
, inserted
, can_refer
, completep
);
2915 matched_vtables
->add (BINFO_VTABLE (base_binfo
));
2920 /* When virtual table is removed, we may need to flush the cache. */
2923 devirt_variable_node_removal_hook (varpool_node
*n
,
2924 void *d ATTRIBUTE_UNUSED
)
2926 if (cached_polymorphic_call_targets
2927 && DECL_VIRTUAL_P (n
->decl
)
2928 && type_in_anonymous_namespace_p (DECL_CONTEXT (n
->decl
)))
2929 free_polymorphic_call_targets_hash ();
2932 /* Record about how many calls would benefit from given type to be final. */
2934 struct odr_type_warn_count
2938 profile_count dyn_count
;
2941 /* Record about how many calls would benefit from given method to be final. */
2943 struct decl_warn_count
2947 profile_count dyn_count
;
2950 /* Information about type and decl warnings. */
2952 struct final_warning_record
2954 /* If needed grow type_warnings vector and initialize new decl_warn_count
2955 to have dyn_count set to profile_count::zero (). */
2956 void grow_type_warnings (unsigned newlen
);
2958 profile_count dyn_count
;
2959 auto_vec
<odr_type_warn_count
> type_warnings
;
2960 hash_map
<tree
, decl_warn_count
> decl_warnings
;
2964 final_warning_record::grow_type_warnings (unsigned newlen
)
2966 unsigned len
= type_warnings
.length ();
2969 type_warnings
.safe_grow_cleared (newlen
);
2970 for (unsigned i
= len
; i
< newlen
; i
++)
2971 type_warnings
[i
].dyn_count
= profile_count::zero ();
2975 struct final_warning_record
*final_warning_records
;
2977 /* Return vector containing possible targets of polymorphic call of type
2978 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2979 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2980 OTR_TYPE and include their virtual method. This is useful for types
2981 possibly in construction or destruction where the virtual table may
2982 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2983 us to walk the inheritance graph for all derivations.
2985 If COMPLETEP is non-NULL, store true if the list is complete.
2986 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2987 in the target cache. If user needs to visit every target list
2988 just once, it can memoize them.
2990 If SPECULATIVE is set, the list will not contain targets that
2991 are not speculatively taken.
2993 Returned vector is placed into cache. It is NOT caller's responsibility
2994 to free it. The vector can be freed on cgraph_remove_node call if
2995 the particular node is a virtual function present in the cache. */
2998 possible_polymorphic_call_targets (tree otr_type
,
2999 HOST_WIDE_INT otr_token
,
3000 ipa_polymorphic_call_context context
,
3005 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
3006 vec
<cgraph_node
*> nodes
= vNULL
;
3007 auto_vec
<tree
, 8> bases_to_consider
;
3008 odr_type type
, outer_type
;
3009 polymorphic_call_target_d key
;
3010 polymorphic_call_target_d
**slot
;
3014 bool can_refer
= false;
3015 bool skipped
= false;
3017 otr_type
= TYPE_MAIN_VARIANT (otr_type
);
3019 /* If ODR is not initialized or the context is invalid, return empty
3021 if (!odr_hash
|| context
.invalid
|| !TYPE_BINFO (otr_type
))
3024 *completep
= context
.invalid
;
3026 *cache_token
= NULL
;
3030 /* Do not bother to compute speculative info when user do not asks for it. */
3031 if (!speculative
|| !context
.speculative_outer_type
)
3032 context
.clear_speculation ();
3034 type
= get_odr_type (otr_type
, true);
3036 /* Recording type variants would waste results cache. */
3037 gcc_assert (!context
.outer_type
3038 || TYPE_MAIN_VARIANT (context
.outer_type
) == context
.outer_type
);
3040 /* Look up the outer class type we want to walk.
3041 If we fail to do so, the context is invalid. */
3042 if ((context
.outer_type
|| context
.speculative_outer_type
)
3043 && !context
.restrict_to_inner_class (otr_type
))
3048 *cache_token
= NULL
;
3051 gcc_assert (!context
.invalid
);
3053 /* Check that restrict_to_inner_class kept the main variant. */
3054 gcc_assert (!context
.outer_type
3055 || TYPE_MAIN_VARIANT (context
.outer_type
) == context
.outer_type
);
3057 /* We canonicalize our query, so we do not need extra hashtable entries. */
3059 /* Without outer type, we have no use for offset. Just do the
3060 basic search from inner type. */
3061 if (!context
.outer_type
)
3062 context
.clear_outer_type (otr_type
);
3063 /* We need to update our hierarchy if the type does not exist. */
3064 outer_type
= get_odr_type (context
.outer_type
, true);
3065 /* If the type is complete, there are no derivations. */
3066 if (TYPE_FINAL_P (outer_type
->type
))
3067 context
.maybe_derived_type
= false;
3069 /* Initialize query cache. */
3070 if (!cached_polymorphic_call_targets
)
3072 cached_polymorphic_call_targets
= new hash_set
<cgraph_node
*>;
3073 polymorphic_call_target_hash
3074 = new polymorphic_call_target_hash_type (23);
3075 if (!node_removal_hook_holder
)
3077 node_removal_hook_holder
=
3078 symtab
->add_cgraph_removal_hook (&devirt_node_removal_hook
, NULL
);
3079 symtab
->add_varpool_removal_hook (&devirt_variable_node_removal_hook
,
3086 if (context
.outer_type
!= otr_type
)
3088 = get_odr_type (context
.outer_type
, true)->type
;
3089 if (context
.speculative_outer_type
)
3090 context
.speculative_outer_type
3091 = get_odr_type (context
.speculative_outer_type
, true)->type
;
3094 /* Look up cached answer. */
3096 key
.otr_token
= otr_token
;
3097 key
.speculative
= speculative
;
3098 key
.context
= context
;
3099 key
.n_odr_types
= odr_types
.length ();
3100 slot
= polymorphic_call_target_hash
->find_slot (&key
, INSERT
);
3102 *cache_token
= (void *)*slot
;
3106 *completep
= (*slot
)->complete
;
3107 if ((*slot
)->type_warning
&& final_warning_records
)
3109 final_warning_records
->type_warnings
[(*slot
)->type_warning
- 1].count
++;
3110 if (!final_warning_records
->type_warnings
3111 [(*slot
)->type_warning
- 1].dyn_count
.initialized_p ())
3112 final_warning_records
->type_warnings
3113 [(*slot
)->type_warning
- 1].dyn_count
= profile_count::zero ();
3114 if (final_warning_records
->dyn_count
> 0)
3115 final_warning_records
->type_warnings
[(*slot
)->type_warning
- 1].dyn_count
3116 = final_warning_records
->type_warnings
[(*slot
)->type_warning
- 1].dyn_count
3117 + final_warning_records
->dyn_count
;
3119 if (!speculative
&& (*slot
)->decl_warning
&& final_warning_records
)
3121 struct decl_warn_count
*c
=
3122 final_warning_records
->decl_warnings
.get ((*slot
)->decl_warning
);
3124 if (final_warning_records
->dyn_count
> 0)
3125 c
->dyn_count
+= final_warning_records
->dyn_count
;
3127 return (*slot
)->targets
;
3132 /* Do actual search. */
3133 timevar_push (TV_IPA_VIRTUAL_CALL
);
3134 *slot
= XCNEW (polymorphic_call_target_d
);
3136 *cache_token
= (void *)*slot
;
3137 (*slot
)->type
= type
;
3138 (*slot
)->otr_token
= otr_token
;
3139 (*slot
)->context
= context
;
3140 (*slot
)->speculative
= speculative
;
3142 hash_set
<tree
> inserted
;
3143 hash_set
<tree
> matched_vtables
;
3145 /* First insert targets we speculatively identified as likely. */
3146 if (context
.speculative_outer_type
)
3148 odr_type speculative_outer_type
;
3149 bool speculation_complete
= true;
3151 /* First insert target from type itself and check if it may have
3153 speculative_outer_type
= get_odr_type (context
.speculative_outer_type
, true);
3154 if (TYPE_FINAL_P (speculative_outer_type
->type
))
3155 context
.speculative_maybe_derived_type
= false;
3156 binfo
= get_binfo_at_offset (TYPE_BINFO (speculative_outer_type
->type
),
3157 context
.speculative_offset
, otr_type
);
3159 target
= gimple_get_virt_method_for_binfo (otr_token
, binfo
,
3164 /* In the case we get complete method, we don't need
3165 to walk derivations. */
3166 if (target
&& DECL_FINAL_P (target
))
3167 context
.speculative_maybe_derived_type
= false;
3168 if (type_possibly_instantiated_p (speculative_outer_type
->type
))
3169 maybe_record_node (nodes
, target
, &inserted
, can_refer
, &speculation_complete
);
3171 matched_vtables
.add (BINFO_VTABLE (binfo
));
3174 /* Next walk recursively all derived types. */
3175 if (context
.speculative_maybe_derived_type
)
3176 for (i
= 0; i
< speculative_outer_type
->derived_types
.length(); i
++)
3177 possible_polymorphic_call_targets_1 (nodes
, &inserted
,
3180 speculative_outer_type
->derived_types
[i
],
3181 otr_token
, speculative_outer_type
->type
,
3182 context
.speculative_offset
,
3183 &speculation_complete
,
3188 if (!speculative
|| !nodes
.length ())
3190 /* First see virtual method of type itself. */
3191 binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
->type
),
3192 context
.offset
, otr_type
);
3194 target
= gimple_get_virt_method_for_binfo (otr_token
, binfo
,
3198 gcc_assert (odr_violation_reported
);
3202 /* Destructors are never called through construction virtual tables,
3203 because the type is always known. */
3204 if (target
&& DECL_CXX_DESTRUCTOR_P (target
))
3205 context
.maybe_in_construction
= false;
3209 /* In the case we get complete method, we don't need
3210 to walk derivations. */
3211 if (DECL_FINAL_P (target
))
3212 context
.maybe_derived_type
= false;
3215 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3216 if (type_possibly_instantiated_p (outer_type
->type
))
3217 maybe_record_node (nodes
, target
, &inserted
, can_refer
, &complete
);
3222 matched_vtables
.add (BINFO_VTABLE (binfo
));
3224 /* Next walk recursively all derived types. */
3225 if (context
.maybe_derived_type
)
3227 for (i
= 0; i
< outer_type
->derived_types
.length(); i
++)
3228 possible_polymorphic_call_targets_1 (nodes
, &inserted
,
3231 outer_type
->derived_types
[i
],
3232 otr_token
, outer_type
->type
,
3233 context
.offset
, &complete
,
3235 context
.maybe_in_construction
);
3237 if (!outer_type
->all_derivations_known
)
3239 if (!speculative
&& final_warning_records
3240 && nodes
.length () == 1
3241 && TREE_CODE (TREE_TYPE (nodes
[0]->decl
)) == METHOD_TYPE
)
3244 && warn_suggest_final_types
3245 && !outer_type
->derived_types
.length ())
3247 final_warning_records
->grow_type_warnings
3249 final_warning_records
->type_warnings
[outer_type
->id
].count
++;
3250 if (!final_warning_records
->type_warnings
3251 [outer_type
->id
].dyn_count
.initialized_p ())
3252 final_warning_records
->type_warnings
3253 [outer_type
->id
].dyn_count
= profile_count::zero ();
3254 final_warning_records
->type_warnings
[outer_type
->id
].dyn_count
3255 += final_warning_records
->dyn_count
;
3256 final_warning_records
->type_warnings
[outer_type
->id
].type
3258 (*slot
)->type_warning
= outer_type
->id
+ 1;
3261 && warn_suggest_final_methods
3262 && types_same_for_odr (DECL_CONTEXT (nodes
[0]->decl
),
3266 struct decl_warn_count
&c
=
3267 final_warning_records
->decl_warnings
.get_or_insert
3268 (nodes
[0]->decl
, &existed
);
3273 c
.dyn_count
+= final_warning_records
->dyn_count
;
3278 c
.dyn_count
= final_warning_records
->dyn_count
;
3279 c
.decl
= nodes
[0]->decl
;
3281 (*slot
)->decl_warning
= nodes
[0]->decl
;
3290 /* Destructors are never called through construction virtual tables,
3291 because the type is always known. One of entries may be
3292 cxa_pure_virtual so look to at least two of them. */
3293 if (context
.maybe_in_construction
)
3294 for (i
=0 ; i
< MIN (nodes
.length (), 2); i
++)
3295 if (DECL_CXX_DESTRUCTOR_P (nodes
[i
]->decl
))
3296 context
.maybe_in_construction
= false;
3297 if (context
.maybe_in_construction
)
3299 if (type
!= outer_type
3301 || (context
.maybe_derived_type
3302 && !type_all_derivations_known_p (outer_type
->type
))))
3303 record_targets_from_bases (otr_type
, otr_token
, outer_type
->type
,
3304 context
.offset
, nodes
, &inserted
,
3305 &matched_vtables
, &complete
);
3307 maybe_record_node (nodes
, target
, &inserted
, can_refer
, &complete
);
3308 for (i
= 0; i
< bases_to_consider
.length(); i
++)
3309 maybe_record_node (nodes
, bases_to_consider
[i
], &inserted
, can_refer
, &complete
);
3314 (*slot
)->targets
= nodes
;
3315 (*slot
)->complete
= complete
;
3316 (*slot
)->n_odr_types
= odr_types
.length ();
3318 *completep
= complete
;
3320 timevar_pop (TV_IPA_VIRTUAL_CALL
);
3325 add_decl_warning (const tree
&key ATTRIBUTE_UNUSED
, const decl_warn_count
&value
,
3326 vec
<const decl_warn_count
*> *vec
)
3328 vec
->safe_push (&value
);
3332 /* Dump target list TARGETS into FILE. */
3335 dump_targets (FILE *f
, vec
<cgraph_node
*> targets
, bool verbose
)
3339 for (i
= 0; i
< targets
.length (); i
++)
3343 name
= cplus_demangle_v3 (targets
[i
]->asm_name (), 0);
3344 fprintf (f
, " %s/%i", name
? name
: targets
[i
]->name (),
3348 if (!targets
[i
]->definition
)
3349 fprintf (f
, " (no definition%s)",
3350 DECL_DECLARED_INLINE_P (targets
[i
]->decl
)
3352 /* With many targets for every call polymorphic dumps are going to
3353 be quadratic in size. */
3354 if (i
> 10 && !verbose
)
3356 fprintf (f
, " ... and %i more targets\n", targets
.length () - i
);
3363 /* Dump all possible targets of a polymorphic call. */
3366 dump_possible_polymorphic_call_targets (FILE *f
,
3368 HOST_WIDE_INT otr_token
,
3369 const ipa_polymorphic_call_context
&ctx
,
3372 vec
<cgraph_node
*> targets
;
3374 odr_type type
= get_odr_type (TYPE_MAIN_VARIANT (otr_type
), false);
3379 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
,
3381 &final
, NULL
, false);
3382 fprintf (f
, " Targets of polymorphic call of type %i:", type
->id
);
3383 print_generic_expr (f
, type
->type
, TDF_SLIM
);
3384 fprintf (f
, " token %i\n", (int)otr_token
);
3388 fprintf (f
, " %s%s%s%s\n ",
3389 final
? "This is a complete list." :
3390 "This is partial list; extra targets may be defined in other units.",
3391 ctx
.maybe_in_construction
? " (base types included)" : "",
3392 ctx
.maybe_derived_type
? " (derived types included)" : "",
3393 ctx
.speculative_maybe_derived_type
? " (speculative derived types included)" : "");
3394 len
= targets
.length ();
3395 dump_targets (f
, targets
, verbose
);
3397 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
,
3399 &final
, NULL
, true);
3400 if (targets
.length () != len
)
3402 fprintf (f
, " Speculative targets:");
3403 dump_targets (f
, targets
, verbose
);
3405 /* Ugly: during callgraph construction the target cache may get populated
3406 before all targets are found. While this is harmless (because all local
3407 types are discovered and only in those case we devirtualize fully and we
3408 don't do speculative devirtualization before IPA stage) it triggers
3409 assert here when dumping at that stage also populates the case with
3410 speculative targets. Quietly ignore this. */
3411 gcc_assert (symtab
->state
< IPA_SSA
|| targets
.length () <= len
);
3416 /* Return true if N can be possibly target of a polymorphic call of
3417 OTR_TYPE/OTR_TOKEN. */
3420 possible_polymorphic_call_target_p (tree otr_type
,
3421 HOST_WIDE_INT otr_token
,
3422 const ipa_polymorphic_call_context
&ctx
,
3423 struct cgraph_node
*n
)
3425 vec
<cgraph_node
*> targets
;
3427 enum built_in_function fcode
;
3430 if (TREE_CODE (TREE_TYPE (n
->decl
)) == FUNCTION_TYPE
3431 && ((fcode
= DECL_FUNCTION_CODE (n
->decl
)) == BUILT_IN_UNREACHABLE
3432 || fcode
== BUILT_IN_TRAP
))
3435 if (is_cxa_pure_virtual_p (n
->decl
))
3440 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
, ctx
, &final
);
3441 for (i
= 0; i
< targets
.length (); i
++)
3442 if (n
->semantically_equivalent_p (targets
[i
]))
3445 /* At a moment we allow middle end to dig out new external declarations
3446 as a targets of polymorphic calls. */
3447 if (!final
&& !n
->definition
)
3454 /* Return true if N can be possibly target of a polymorphic call of
3455 OBJ_TYPE_REF expression REF in STMT. */
3458 possible_polymorphic_call_target_p (tree ref
,
3460 struct cgraph_node
*n
)
3462 ipa_polymorphic_call_context
context (current_function_decl
, ref
, stmt
);
3463 tree call_fn
= gimple_call_fn (stmt
);
3465 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn
),
3467 (OBJ_TYPE_REF_TOKEN (call_fn
)),
3473 /* After callgraph construction new external nodes may appear.
3474 Add them into the graph. */
3477 update_type_inheritance_graph (void)
3479 struct cgraph_node
*n
;
3483 free_polymorphic_call_targets_hash ();
3484 timevar_push (TV_IPA_INHERITANCE
);
3485 /* We reconstruct the graph starting from types of all methods seen in the
3487 FOR_EACH_FUNCTION (n
)
3488 if (DECL_VIRTUAL_P (n
->decl
)
3490 && n
->real_symbol_p ())
3491 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n
->decl
)), true);
3492 timevar_pop (TV_IPA_INHERITANCE
);
3496 /* Return true if N looks like likely target of a polymorphic call.
3497 Rule out cxa_pure_virtual, noreturns, function declared cold and
3498 other obvious cases. */
3501 likely_target_p (struct cgraph_node
*n
)
3504 /* cxa_pure_virtual and similar things are not likely. */
3505 if (TREE_CODE (TREE_TYPE (n
->decl
)) != METHOD_TYPE
)
3507 flags
= flags_from_decl_or_type (n
->decl
);
3508 if (flags
& ECF_NORETURN
)
3510 if (lookup_attribute ("cold",
3511 DECL_ATTRIBUTES (n
->decl
)))
3513 if (n
->frequency
< NODE_FREQUENCY_NORMAL
)
3515 /* If there are no live virtual tables referring the target,
3516 the only way the target can be called is an instance coming from other
3517 compilation unit; speculative devirtualization is built around an
3518 assumption that won't happen. */
3519 if (!referenced_from_vtable_p (n
))
3524 /* Compare type warning records P1 and P2 and choose one with larger count;
3525 helper for qsort. */
3528 type_warning_cmp (const void *p1
, const void *p2
)
3530 const odr_type_warn_count
*t1
= (const odr_type_warn_count
*)p1
;
3531 const odr_type_warn_count
*t2
= (const odr_type_warn_count
*)p2
;
3533 if (t1
->dyn_count
< t2
->dyn_count
)
3535 if (t1
->dyn_count
> t2
->dyn_count
)
3537 return t2
->count
- t1
->count
;
3540 /* Compare decl warning records P1 and P2 and choose one with larger count;
3541 helper for qsort. */
3544 decl_warning_cmp (const void *p1
, const void *p2
)
3546 const decl_warn_count
*t1
= *(const decl_warn_count
* const *)p1
;
3547 const decl_warn_count
*t2
= *(const decl_warn_count
* const *)p2
;
3549 if (t1
->dyn_count
< t2
->dyn_count
)
3551 if (t1
->dyn_count
> t2
->dyn_count
)
3553 return t2
->count
- t1
->count
;
3557 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3560 struct cgraph_node
*
3561 try_speculative_devirtualization (tree otr_type
, HOST_WIDE_INT otr_token
,
3562 ipa_polymorphic_call_context ctx
)
3564 vec
<cgraph_node
*>targets
3565 = possible_polymorphic_call_targets
3566 (otr_type
, otr_token
, ctx
, NULL
, NULL
, true);
3568 struct cgraph_node
*likely_target
= NULL
;
3570 for (i
= 0; i
< targets
.length (); i
++)
3571 if (likely_target_p (targets
[i
]))
3575 likely_target
= targets
[i
];
3578 ||!likely_target
->definition
3579 || DECL_EXTERNAL (likely_target
->decl
))
3582 /* Don't use an implicitly-declared destructor (c++/58678). */
3583 struct cgraph_node
*non_thunk_target
3584 = likely_target
->function_symbol ();
3585 if (DECL_ARTIFICIAL (non_thunk_target
->decl
))
3587 if (likely_target
->get_availability () <= AVAIL_INTERPOSABLE
3588 && likely_target
->can_be_discarded_p ())
3590 return likely_target
;
3593 /* The ipa-devirt pass.
3594 When polymorphic call has only one likely target in the unit,
3595 turn it into a speculative call. */
3600 struct cgraph_node
*n
;
3601 hash_set
<void *> bad_call_targets
;
3602 struct cgraph_edge
*e
;
3604 int npolymorphic
= 0, nspeculated
= 0, nconverted
= 0, ncold
= 0;
3605 int nmultiple
= 0, noverwritable
= 0, ndevirtualized
= 0, nnotdefined
= 0;
3606 int nwrong
= 0, nok
= 0, nexternal
= 0, nartificial
= 0;
3613 dump_type_inheritance_graph (dump_file
);
3615 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3616 This is implemented by setting up final_warning_records that are updated
3617 by get_polymorphic_call_targets.
3618 We need to clear cache in this case to trigger recomputation of all
3620 if (warn_suggest_final_methods
|| warn_suggest_final_types
)
3622 final_warning_records
= new (final_warning_record
);
3623 final_warning_records
->dyn_count
= profile_count::zero ();
3624 final_warning_records
->grow_type_warnings (odr_types
.length ());
3625 free_polymorphic_call_targets_hash ();
3628 FOR_EACH_DEFINED_FUNCTION (n
)
3630 bool update
= false;
3631 if (!opt_for_fn (n
->decl
, flag_devirtualize
))
3633 if (dump_file
&& n
->indirect_calls
)
3634 fprintf (dump_file
, "\n\nProcesing function %s\n",
3636 for (e
= n
->indirect_calls
; e
; e
= e
->next_callee
)
3637 if (e
->indirect_info
->polymorphic
)
3639 struct cgraph_node
*likely_target
= NULL
;
3643 if (final_warning_records
)
3644 final_warning_records
->dyn_count
= e
->count
.ipa ();
3646 vec
<cgraph_node
*>targets
3647 = possible_polymorphic_call_targets
3648 (e
, &final
, &cache_token
, true);
3651 /* Trigger warnings by calculating non-speculative targets. */
3652 if (warn_suggest_final_methods
|| warn_suggest_final_types
)
3653 possible_polymorphic_call_targets (e
);
3656 dump_possible_polymorphic_call_targets
3657 (dump_file
, e
, (dump_flags
& TDF_DETAILS
));
3661 /* See if the call can be devirtualized by means of ipa-prop's
3662 polymorphic call context propagation. If not, we can just
3663 forget about this call being polymorphic and avoid some heavy
3664 lifting in remove_unreachable_nodes that will otherwise try to
3665 keep all possible targets alive until inlining and in the inliner
3668 This may need to be revisited once we add further ways to use
3669 the may edges, but it is a resonable thing to do right now. */
3671 if ((e
->indirect_info
->param_index
== -1
3672 || (!opt_for_fn (n
->decl
, flag_devirtualize_speculatively
)
3673 && e
->indirect_info
->vptr_changed
))
3674 && !flag_ltrans_devirtualize
)
3676 e
->indirect_info
->polymorphic
= false;
3679 fprintf (dump_file
, "Dropping polymorphic call info;"
3680 " it cannot be used by ipa-prop\n");
3683 if (!opt_for_fn (n
->decl
, flag_devirtualize_speculatively
))
3686 if (!e
->maybe_hot_p ())
3689 fprintf (dump_file
, "Call is cold\n\n");
3696 fprintf (dump_file
, "Call is already speculated\n\n");
3699 /* When dumping see if we agree with speculation. */
3703 if (bad_call_targets
.contains (cache_token
))
3706 fprintf (dump_file
, "Target list is known to be useless\n\n");
3710 for (i
= 0; i
< targets
.length (); i
++)
3711 if (likely_target_p (targets
[i
]))
3715 likely_target
= NULL
;
3717 fprintf (dump_file
, "More than one likely target\n\n");
3721 likely_target
= targets
[i
];
3725 bad_call_targets
.add (cache_token
);
3728 /* This is reached only when dumping; check if we agree or disagree
3729 with the speculation. */
3732 struct cgraph_edge
*e2
;
3733 struct ipa_ref
*ref
;
3734 e
->speculative_call_info (e2
, e
, ref
);
3735 if (e2
->callee
->ultimate_alias_target ()
3736 == likely_target
->ultimate_alias_target ())
3738 fprintf (dump_file
, "We agree with speculation\n\n");
3743 fprintf (dump_file
, "We disagree with speculation\n\n");
3748 if (!likely_target
->definition
)
3751 fprintf (dump_file
, "Target is not a definition\n\n");
3755 /* Do not introduce new references to external symbols. While we
3756 can handle these just well, it is common for programs to
3757 incorrectly with headers defining methods they are linked
3759 if (DECL_EXTERNAL (likely_target
->decl
))
3762 fprintf (dump_file
, "Target is external\n\n");
3766 /* Don't use an implicitly-declared destructor (c++/58678). */
3767 struct cgraph_node
*non_thunk_target
3768 = likely_target
->function_symbol ();
3769 if (DECL_ARTIFICIAL (non_thunk_target
->decl
))
3772 fprintf (dump_file
, "Target is artificial\n\n");
3776 if (likely_target
->get_availability () <= AVAIL_INTERPOSABLE
3777 && likely_target
->can_be_discarded_p ())
3780 fprintf (dump_file
, "Target is overwritable\n\n");
3784 else if (dbg_cnt (devirt
))
3786 if (dump_enabled_p ())
3788 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, e
->call_stmt
,
3789 "speculatively devirtualizing call "
3792 likely_target
->dump_name ());
3794 if (!likely_target
->can_be_discarded_p ())
3797 alias
= dyn_cast
<cgraph_node
*> (likely_target
->noninterposable_alias ());
3799 likely_target
= alias
;
3804 (likely_target
, e
->count
.apply_scale (8, 10));
3808 ipa_update_overall_fn_summary (n
);
3810 if (warn_suggest_final_methods
|| warn_suggest_final_types
)
3812 if (warn_suggest_final_types
)
3814 final_warning_records
->type_warnings
.qsort (type_warning_cmp
);
3815 for (unsigned int i
= 0;
3816 i
< final_warning_records
->type_warnings
.length (); i
++)
3817 if (final_warning_records
->type_warnings
[i
].count
)
3819 tree type
= final_warning_records
->type_warnings
[i
].type
;
3820 int count
= final_warning_records
->type_warnings
[i
].count
;
3821 profile_count dyn_count
3822 = final_warning_records
->type_warnings
[i
].dyn_count
;
3824 if (!(dyn_count
> 0))
3825 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type
)),
3826 OPT_Wsuggest_final_types
, count
,
3827 "Declaring type %qD final "
3828 "would enable devirtualization of %i call",
3829 "Declaring type %qD final "
3830 "would enable devirtualization of %i calls",
3834 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type
)),
3835 OPT_Wsuggest_final_types
, count
,
3836 "Declaring type %qD final "
3837 "would enable devirtualization of %i call "
3838 "executed %lli times",
3839 "Declaring type %qD final "
3840 "would enable devirtualization of %i calls "
3841 "executed %lli times",
3844 (long long) dyn_count
.to_gcov_type ());
3848 if (warn_suggest_final_methods
)
3850 auto_vec
<const decl_warn_count
*> decl_warnings_vec
;
3852 final_warning_records
->decl_warnings
.traverse
3853 <vec
<const decl_warn_count
*> *, add_decl_warning
> (&decl_warnings_vec
);
3854 decl_warnings_vec
.qsort (decl_warning_cmp
);
3855 for (unsigned int i
= 0; i
< decl_warnings_vec
.length (); i
++)
3857 tree decl
= decl_warnings_vec
[i
]->decl
;
3858 int count
= decl_warnings_vec
[i
]->count
;
3859 profile_count dyn_count
3860 = decl_warnings_vec
[i
]->dyn_count
;
3862 if (!(dyn_count
> 0))
3863 if (DECL_CXX_DESTRUCTOR_P (decl
))
3864 warning_n (DECL_SOURCE_LOCATION (decl
),
3865 OPT_Wsuggest_final_methods
, count
,
3866 "Declaring virtual destructor of %qD final "
3867 "would enable devirtualization of %i call",
3868 "Declaring virtual destructor of %qD final "
3869 "would enable devirtualization of %i calls",
3870 DECL_CONTEXT (decl
), count
);
3872 warning_n (DECL_SOURCE_LOCATION (decl
),
3873 OPT_Wsuggest_final_methods
, count
,
3874 "Declaring method %qD final "
3875 "would enable devirtualization of %i call",
3876 "Declaring method %qD final "
3877 "would enable devirtualization of %i calls",
3879 else if (DECL_CXX_DESTRUCTOR_P (decl
))
3880 warning_n (DECL_SOURCE_LOCATION (decl
),
3881 OPT_Wsuggest_final_methods
, count
,
3882 "Declaring virtual destructor of %qD final "
3883 "would enable devirtualization of %i call "
3884 "executed %lli times",
3885 "Declaring virtual destructor of %qD final "
3886 "would enable devirtualization of %i calls "
3887 "executed %lli times",
3888 DECL_CONTEXT (decl
), count
,
3889 (long long)dyn_count
.to_gcov_type ());
3891 warning_n (DECL_SOURCE_LOCATION (decl
),
3892 OPT_Wsuggest_final_methods
, count
,
3893 "Declaring method %qD final "
3894 "would enable devirtualization of %i call "
3895 "executed %lli times",
3896 "Declaring method %qD final "
3897 "would enable devirtualization of %i calls "
3898 "executed %lli times",
3900 (long long)dyn_count
.to_gcov_type ());
3904 delete (final_warning_records
);
3905 final_warning_records
= 0;
3910 "%i polymorphic calls, %i devirtualized,"
3911 " %i speculatively devirtualized, %i cold\n"
3912 "%i have multiple targets, %i overwritable,"
3913 " %i already speculated (%i agree, %i disagree),"
3914 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3915 npolymorphic
, ndevirtualized
, nconverted
, ncold
,
3916 nmultiple
, noverwritable
, nspeculated
, nok
, nwrong
,
3917 nexternal
, nnotdefined
, nartificial
, ndropped
);
3918 return ndevirtualized
|| ndropped
? TODO_remove_functions
: 0;
3923 const pass_data pass_data_ipa_devirt
=
3925 IPA_PASS
, /* type */
3926 "devirt", /* name */
3927 OPTGROUP_NONE
, /* optinfo_flags */
3928 TV_IPA_DEVIRT
, /* tv_id */
3929 0, /* properties_required */
3930 0, /* properties_provided */
3931 0, /* properties_destroyed */
3932 0, /* todo_flags_start */
3933 ( TODO_dump_symtab
), /* todo_flags_finish */
3936 class pass_ipa_devirt
: public ipa_opt_pass_d
3939 pass_ipa_devirt (gcc::context
*ctxt
)
3940 : ipa_opt_pass_d (pass_data_ipa_devirt
, ctxt
,
3941 NULL
, /* generate_summary */
3942 NULL
, /* write_summary */
3943 NULL
, /* read_summary */
3944 NULL
, /* write_optimization_summary */
3945 NULL
, /* read_optimization_summary */
3946 NULL
, /* stmt_fixup */
3947 0, /* function_transform_todo_flags_start */
3948 NULL
, /* function_transform */
3949 NULL
) /* variable_transform */
3952 /* opt_pass methods: */
3953 virtual bool gate (function
*)
3955 /* In LTO, always run the IPA passes and decide on function basis if the
3959 return (flag_devirtualize
3960 && (flag_devirtualize_speculatively
3961 || (warn_suggest_final_methods
3962 || warn_suggest_final_types
))
3966 virtual unsigned int execute (function
*) { return ipa_devirt (); }
3968 }; // class pass_ipa_devirt
3973 make_pass_ipa_devirt (gcc::context
*ctxt
)
3975 return new pass_ipa_devirt (ctxt
);
3978 #include "gt-ipa-devirt.h"