* ipa-devirt.c (odr_types_equivalent_p): Fix formating.
[official-gcc.git] / gcc / ipa-devirt.c
blob790e483f77513a022bf4bdeb4fefd145b5b0c01e
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2015 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
82 What we do here:
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "hash-set.h"
113 #include "machmode.h"
114 #include "hash-map.h"
115 #include "vec.h"
116 #include "double-int.h"
117 #include "input.h"
118 #include "alias.h"
119 #include "symtab.h"
120 #include "wide-int.h"
121 #include "inchash.h"
122 #include "tree.h"
123 #include "fold-const.h"
124 #include "print-tree.h"
125 #include "calls.h"
126 #include "predict.h"
127 #include "basic-block.h"
128 #include "is-a.h"
129 #include "plugin-api.h"
130 #include "hard-reg-set.h"
131 #include "function.h"
132 #include "ipa-ref.h"
133 #include "cgraph.h"
134 #include "hashtab.h"
135 #include "rtl.h"
136 #include "flags.h"
137 #include "statistics.h"
138 #include "real.h"
139 #include "fixed-value.h"
140 #include "insn-config.h"
141 #include "expmed.h"
142 #include "dojump.h"
143 #include "explow.h"
144 #include "emit-rtl.h"
145 #include "varasm.h"
146 #include "stmt.h"
147 #include "expr.h"
148 #include "tree-pass.h"
149 #include "target.h"
150 #include "hash-table.h"
151 #include "tree-pretty-print.h"
152 #include "ipa-utils.h"
153 #include "tree-ssa-alias.h"
154 #include "internal-fn.h"
155 #include "gimple-fold.h"
156 #include "gimple-expr.h"
157 #include "gimple.h"
158 #include "alloc-pool.h"
159 #include "symbol-summary.h"
160 #include "ipa-prop.h"
161 #include "ipa-inline.h"
162 #include "diagnostic.h"
163 #include "tree-dfa.h"
164 #include "demangle.h"
165 #include "dbgcnt.h"
166 #include "gimple-pretty-print.h"
167 #include "stor-layout.h"
168 #include "intl.h"
170 /* Hash based set of pairs of types. */
171 typedef struct
173 tree first;
174 tree second;
175 } type_pair;
177 struct pair_traits : default_hashset_traits
179 static hashval_t
180 hash (type_pair p)
182 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
184 static bool
185 is_empty (type_pair p)
187 return p.first == NULL;
189 static bool
190 is_deleted (type_pair p ATTRIBUTE_UNUSED)
192 return false;
194 static bool
195 equal (const type_pair &a, const type_pair &b)
197 return a.first==b.first && a.second == b.second;
199 static void
200 mark_empty (type_pair &e)
202 e.first = NULL;
206 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
207 hash_set<type_pair,pair_traits> *);
209 static bool odr_violation_reported = false;
212 /* Pointer set of all call targets appearing in the cache. */
213 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
215 /* The node of type inheritance graph. For each type unique in
216 One Definition Rule (ODR) sense, we produce one node linking all
217 main variants of types equivalent to it, bases and derived types. */
219 struct GTY(()) odr_type_d
221 /* leader type. */
222 tree type;
223 /* All bases; built only for main variants of types. */
224 vec<odr_type> GTY((skip)) bases;
225 /* All derived types with virtual methods seen in unit;
226 built only for main variants of types. */
227 vec<odr_type> GTY((skip)) derived_types;
229 /* All equivalent types, if more than one. */
230 vec<tree, va_gc> *types;
231 /* Set of all equivalent types, if NON-NULL. */
232 hash_set<tree> * GTY((skip)) types_set;
234 /* Unique ID indexing the type in odr_types array. */
235 int id;
236 /* Is it in anonymous namespace? */
237 bool anonymous_namespace;
238 /* Do we know about all derivations of given type? */
239 bool all_derivations_known;
240 /* Did we report ODR violation here? */
241 bool odr_violated;
244 /* Return TRUE if all derived types of T are known and thus
245 we may consider the walk of derived type complete.
247 This is typically true only for final anonymous namespace types and types
248 defined within functions (that may be COMDAT and thus shared across units,
249 but with the same set of derived types). */
251 bool
252 type_all_derivations_known_p (const_tree t)
254 if (TYPE_FINAL_P (t))
255 return true;
256 if (flag_ltrans)
257 return false;
258 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
259 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
260 return true;
261 if (type_in_anonymous_namespace_p (t))
262 return true;
263 return (decl_function_context (TYPE_NAME (t)) != NULL);
266 /* Return TRUE if type's constructors are all visible. */
268 static bool
269 type_all_ctors_visible_p (tree t)
271 return !flag_ltrans
272 && symtab->state >= CONSTRUCTION
273 /* We can not always use type_all_derivations_known_p.
274 For function local types we must assume case where
275 the function is COMDAT and shared in between units.
277 TODO: These cases are quite easy to get, but we need
278 to keep track of C++ privatizing via -Wno-weak
279 as well as the IPA privatizing. */
280 && type_in_anonymous_namespace_p (t);
283 /* Return TRUE if type may have instance. */
285 static bool
286 type_possibly_instantiated_p (tree t)
288 tree vtable;
289 varpool_node *vnode;
291 /* TODO: Add abstract types here. */
292 if (!type_all_ctors_visible_p (t))
293 return true;
295 vtable = BINFO_VTABLE (TYPE_BINFO (t));
296 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
297 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
298 vnode = varpool_node::get (vtable);
299 return vnode && vnode->definition;
302 /* One Definition Rule hashtable helpers. */
304 struct odr_hasher
306 typedef odr_type_d value_type;
307 typedef union tree_node compare_type;
308 static inline hashval_t hash (const value_type *);
309 static inline bool equal (const value_type *, const compare_type *);
310 static inline void remove (value_type *);
313 /* Return type that was declared with T's name so that T is an
314 qualified variant of it. */
316 static inline tree
317 main_odr_variant (const_tree t)
319 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
320 return TREE_TYPE (TYPE_NAME (t));
321 /* Unnamed types and non-C++ produced types can be compared by variants. */
322 else
323 return TYPE_MAIN_VARIANT (t);
326 /* Produce hash based on type name. */
328 static hashval_t
329 hash_type_name (tree t)
331 gcc_checking_assert (main_odr_variant (t) == t);
333 /* If not in LTO, all main variants are unique, so we can do
334 pointer hash. */
335 if (!in_lto_p)
336 return htab_hash_pointer (t);
338 /* Anonymous types are unique. */
339 if (type_in_anonymous_namespace_p (t))
340 return htab_hash_pointer (t);
342 /* ODR types have name specified. */
343 if (TYPE_NAME (t)
344 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)))
345 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
347 /* For polymorphic types that was compiled with -fno-lto-odr-type-merging
348 we can simply hash the virtual table. */
349 if (TREE_CODE (t) == RECORD_TYPE
350 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
352 tree v = BINFO_VTABLE (TYPE_BINFO (t));
353 hashval_t hash = 0;
355 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
357 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
358 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
361 v = DECL_ASSEMBLER_NAME (v);
362 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
363 return hash;
366 /* Builtin types may appear as main variants of ODR types and are unique.
367 Sanity check we do not get anything that looks non-builtin. */
368 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
369 || TREE_CODE (t) == VOID_TYPE
370 || TREE_CODE (t) == COMPLEX_TYPE
371 || TREE_CODE (t) == REAL_TYPE
372 || TREE_CODE (t) == POINTER_TYPE);
373 return htab_hash_pointer (t);
376 /* Return the computed hashcode for ODR_TYPE. */
378 inline hashval_t
379 odr_hasher::hash (const value_type *odr_type)
381 return hash_type_name (odr_type->type);
384 /* For languages with One Definition Rule, work out if
385 types are the same based on their name.
387 This is non-trivial for LTO where minor differences in
388 the type representation may have prevented type merging
389 to merge two copies of otherwise equivalent type.
391 Until we start streaming mangled type names, this function works
392 only for polymorphic types. */
394 bool
395 types_same_for_odr (const_tree type1, const_tree type2)
397 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
399 type1 = main_odr_variant (type1);
400 type2 = main_odr_variant (type2);
402 if (type1 == type2)
403 return true;
405 if (!in_lto_p)
406 return false;
408 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
409 on the corresponding TYPE_STUB_DECL. */
410 if (type_in_anonymous_namespace_p (type1)
411 || type_in_anonymous_namespace_p (type2))
412 return false;
415 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
417 Ideally we should never need types without ODR names here. It can however
418 happen in two cases:
420 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
421 Here testing for equivalence is safe, since their MAIN_VARIANTs are
422 unique.
423 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
424 establish precise ODR equivalency, but for correctness we care only
425 about equivalency on complete polymorphic types. For these we can
426 compare assembler names of their virtual tables. */
427 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
428 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
430 /* See if types are obviously different (i.e. different codes
431 or polymorphic wrt non-polymorphic). This is not strictly correct
432 for ODR violating programs, but we can't do better without streaming
433 ODR names. */
434 if (TREE_CODE (type1) != TREE_CODE (type2))
435 return false;
436 if (TREE_CODE (type1) == RECORD_TYPE
437 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
438 return false;
439 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
440 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
441 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
442 return false;
444 /* At the moment we have no way to establish ODR equivalence at LTO
445 other than comparing virtual table pointers of polymorphic types.
446 Eventually we should start saving mangled names in TYPE_NAME.
447 Then this condition will become non-trivial. */
449 if (TREE_CODE (type1) == RECORD_TYPE
450 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
451 && BINFO_VTABLE (TYPE_BINFO (type1))
452 && BINFO_VTABLE (TYPE_BINFO (type2)))
454 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
455 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
456 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
457 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
458 return (operand_equal_p (TREE_OPERAND (v1, 1),
459 TREE_OPERAND (v2, 1), 0)
460 && DECL_ASSEMBLER_NAME
461 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
462 == DECL_ASSEMBLER_NAME
463 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
465 gcc_unreachable ();
467 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
468 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
471 /* Return true if we can decide on ODR equivalency.
473 In non-LTO it is always decide, in LTO however it depends in the type has
474 ODR info attached. */
476 bool
477 types_odr_comparable (tree t1, tree t2)
479 return (!in_lto_p
480 || main_odr_variant (t1) == main_odr_variant (t2)
481 || (odr_type_p (t1) && odr_type_p (t2))
482 || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
483 && TYPE_BINFO (t1) && TYPE_BINFO (t2)
484 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
485 && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
488 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
489 known, be conservative and return false. */
491 bool
492 types_must_be_same_for_odr (tree t1, tree t2)
494 if (types_odr_comparable (t1, t2))
495 return types_same_for_odr (t1, t2);
496 else
497 return main_odr_variant (t1) == main_odr_variant (t2);
500 /* Compare types T1 and T2 and return true if they are
501 equivalent. */
503 inline bool
504 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
506 tree t2 = const_cast <tree> (ct2);
508 gcc_checking_assert (main_odr_variant (t2) == t2);
509 if (t1->type == t2)
510 return true;
511 if (!in_lto_p)
512 return false;
513 return types_same_for_odr (t1->type, t2);
516 /* Free ODR type V. */
518 inline void
519 odr_hasher::remove (value_type *v)
521 v->bases.release ();
522 v->derived_types.release ();
523 if (v->types_set)
524 delete v->types_set;
525 ggc_free (v);
528 /* ODR type hash used to look up ODR type based on tree type node. */
530 typedef hash_table<odr_hasher> odr_hash_type;
531 static odr_hash_type *odr_hash;
533 /* ODR types are also stored into ODR_TYPE vector to allow consistent
534 walking. Bases appear before derived types. Vector is garbage collected
535 so we won't end up visiting empty types. */
537 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
538 #define odr_types (*odr_types_ptr)
540 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
541 void
542 set_type_binfo (tree type, tree binfo)
544 for (; type; type = TYPE_NEXT_VARIANT (type))
545 if (COMPLETE_TYPE_P (type))
546 TYPE_BINFO (type) = binfo;
547 else
548 gcc_assert (!TYPE_BINFO (type));
551 /* Compare T2 and T2 based on name or structure. */
553 static bool
554 odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<type_pair,pair_traits> *visited)
556 bool an1, an2;
558 /* This can happen in incomplete types that should be handled earlier. */
559 gcc_assert (t1 && t2);
561 t1 = main_odr_variant (t1);
562 t2 = main_odr_variant (t2);
563 if (t1 == t2)
564 return true;
566 /* Anonymous namespace types must match exactly. */
567 an1 = type_in_anonymous_namespace_p (t1);
568 an2 = type_in_anonymous_namespace_p (t2);
569 if (an1 != an2 || an1)
570 return false;
572 /* For ODR types be sure to compare their names.
573 To support -wno-odr-type-merging we allow one type to be non-ODR
574 and other ODR even though it is a violation. */
575 if (types_odr_comparable (t1, t2))
577 if (!types_same_for_odr (t1, t2))
578 return false;
579 /* Limit recursion: If subtypes are ODR types and we know
580 that they are same, be happy. */
581 if (!get_odr_type (t1, true)->odr_violated)
582 return true;
585 /* Component types, builtins and possibly violating ODR types
586 have to be compared structurally. */
587 if (TREE_CODE (t1) != TREE_CODE (t2))
588 return false;
589 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
590 return false;
591 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
592 return false;
594 type_pair pair={t1,t2};
595 if (TYPE_UID (t1) > TYPE_UID (t2))
597 pair.first = t2;
598 pair.second = t1;
600 if (visited->add (pair))
601 return true;
602 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
605 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
606 violation warnings. */
608 void
609 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
611 int n1, n2;
612 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
614 odr_violation_reported = true;
615 if (DECL_VIRTUAL_P (prevailing->decl))
617 varpool_node *tmp = prevailing;
618 prevailing = vtable;
619 vtable = tmp;
621 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
622 OPT_Wodr,
623 "virtual table of type %qD violates one definition rule",
624 DECL_CONTEXT (vtable->decl)))
625 inform (DECL_SOURCE_LOCATION (prevailing->decl),
626 "variable of same assembler name as the virtual table is "
627 "defined in another translation unit");
628 return;
630 if (!prevailing->definition || !vtable->definition)
631 return;
632 for (n1 = 0, n2 = 0; true; n1++, n2++)
634 struct ipa_ref *ref1, *ref2;
635 bool end1, end2;
636 end1 = !prevailing->iterate_reference (n1, ref1);
637 end2 = !vtable->iterate_reference (n2, ref2);
638 if (end1 && end2)
639 return;
640 if (!end1 && !end2
641 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
642 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
643 && !n2
644 && !DECL_VIRTUAL_P (ref2->referred->decl)
645 && DECL_VIRTUAL_P (ref1->referred->decl))
647 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
648 "virtual table of type %qD contains RTTI information",
649 DECL_CONTEXT (vtable->decl)))
651 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
652 "but is prevailed by one without from other translation unit");
653 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
654 "RTTI will not work on this type");
656 n2++;
657 end2 = !vtable->iterate_reference (n2, ref2);
659 if (!end1 && !end2
660 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
661 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
662 && !n1
663 && !DECL_VIRTUAL_P (ref1->referred->decl)
664 && DECL_VIRTUAL_P (ref2->referred->decl))
666 n1++;
667 end1 = !vtable->iterate_reference (n1, ref1);
669 if (end1 || end2)
671 if (end1)
673 varpool_node *tmp = prevailing;
674 prevailing = vtable;
675 vtable = tmp;
676 ref1 = ref2;
678 if (warning_at (DECL_SOURCE_LOCATION
679 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
680 "virtual table of type %qD violates "
681 "one definition rule",
682 DECL_CONTEXT (vtable->decl)))
684 inform (DECL_SOURCE_LOCATION
685 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
686 "the conflicting type defined in another translation "
687 "unit");
688 inform (DECL_SOURCE_LOCATION
689 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
690 "contains additional virtual method %qD",
691 ref1->referred->decl);
693 return;
695 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
696 != DECL_ASSEMBLER_NAME (ref2->referred->decl))
698 if (warning_at (DECL_SOURCE_LOCATION
699 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
700 "virtual table of type %qD violates "
701 "one definition rule ",
702 DECL_CONTEXT (vtable->decl)))
704 inform (DECL_SOURCE_LOCATION
705 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
706 "the conflicting type defined in another translation "
707 "unit");
708 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
709 "virtual method %qD", ref1->referred->decl);
710 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
711 "ought to match virtual method %qD but does not",
712 ref2->referred->decl);
713 return;
719 /* Output ODR violation warning about T1 and T2 with REASON.
720 Display location of ST1 and ST2 if REASON speaks about field or
721 method of the type.
722 If WARN is false, do nothing. Set WARNED if warning was indeed
723 output. */
725 void
726 warn_odr (tree t1, tree t2, tree st1, tree st2,
727 bool warn, bool *warned, const char *reason)
729 tree decl2 = TYPE_NAME (t2);
731 if (!warn)
732 return;
733 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
734 "type %qT violates one definition rule",
735 t1))
736 return;
737 if (!st1 && !st2)
739 /* For FIELD_DECL support also case where one of fields is
740 NULL - this is used when the structures have mismatching number of
741 elements. */
742 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
744 inform (DECL_SOURCE_LOCATION (decl2),
745 "a different type is defined in another translation unit");
746 if (!st1)
748 st1 = st2;
749 st2 = NULL;
751 inform (DECL_SOURCE_LOCATION (st1),
752 "the first difference of corresponding definitions is field %qD",
753 st1);
754 if (st2)
755 decl2 = st2;
757 else if (TREE_CODE (st1) == FUNCTION_DECL)
759 inform (DECL_SOURCE_LOCATION (decl2),
760 "a different type is defined in another translation unit");
761 inform (DECL_SOURCE_LOCATION (st1),
762 "the first difference of corresponding definitions is method %qD",
763 st1);
764 decl2 = st2;
766 else
767 return;
768 inform (DECL_SOURCE_LOCATION (decl2), reason);
770 if (warned)
771 *warned = true;
774 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
775 because they are used on same place in ODR matching types.
776 They are not; inform the user. */
778 void
779 warn_types_mismatch (tree t1, tree t2)
781 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
782 return;
783 /* In Firefox it is a common bug to have same types but in
784 different namespaces. Be a bit more informative on
785 this. */
786 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
787 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
788 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
789 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
790 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
791 DECL_NAME (TYPE_CONTEXT (t2))))))
792 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
793 "type %qT should match type %qT but is defined "
794 "in different namespace ",
795 t1, t2);
796 else
797 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
798 "type %qT should match type %qT",
799 t1, t2);
800 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
801 "the incompatible type is defined here");
804 /* Compare T1 and T2, report ODR violations if WARN is true and set
805 WARNED to true if anything is reported. Return true if types match.
806 If true is returned, the types are also compatible in the sense of
807 gimple_canonical_types_compatible_p. */
809 static bool
810 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
811 hash_set<type_pair,pair_traits> *visited)
813 /* Check first for the obvious case of pointer identity. */
814 if (t1 == t2)
815 return true;
816 gcc_assert (!type_in_anonymous_namespace_p (t1));
817 gcc_assert (!type_in_anonymous_namespace_p (t2));
819 /* Can't be the same type if the types don't have the same code. */
820 if (TREE_CODE (t1) != TREE_CODE (t2))
822 warn_odr (t1, t2, NULL, NULL, warn, warned,
823 G_("a different type is defined in another translation unit"));
824 return false;
827 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
829 warn_odr (t1, t2, NULL, NULL, warn, warned,
830 G_("a type with different qualifiers is defined in another "
831 "translation unit"));
832 return false;
835 if (comp_type_attributes (t1, t2) != 1)
837 warn_odr (t1, t2, NULL, NULL, warn, warned,
838 G_("a type with attributes "
839 "is defined in another translation unit"));
840 return false;
843 if (TREE_CODE (t1) == ENUMERAL_TYPE)
845 tree v1, v2;
846 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
847 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
849 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
851 warn_odr (t1, t2, NULL, NULL, warn, warned,
852 G_("an enum with different value name"
853 " is defined in another translation unit"));
854 return false;
856 if (TREE_VALUE (v1) != TREE_VALUE (v2)
857 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
858 DECL_INITIAL (TREE_VALUE (v2)), 0))
860 warn_odr (t1, t2, NULL, NULL, warn, warned,
861 G_("an enum with different values is defined"
862 " in another translation unit"));
863 return false;
866 if (v1 || v2)
868 warn_odr (t1, t2, NULL, NULL, warn, warned,
869 G_("an enum with mismatching number of values "
870 "is defined in another translation unit"));
871 return false;
875 /* Non-aggregate types can be handled cheaply. */
876 if (INTEGRAL_TYPE_P (t1)
877 || SCALAR_FLOAT_TYPE_P (t1)
878 || FIXED_POINT_TYPE_P (t1)
879 || TREE_CODE (t1) == VECTOR_TYPE
880 || TREE_CODE (t1) == COMPLEX_TYPE
881 || TREE_CODE (t1) == OFFSET_TYPE
882 || POINTER_TYPE_P (t1))
884 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
886 warn_odr (t1, t2, NULL, NULL, warn, warned,
887 G_("a type with different precision is defined "
888 "in another translation unit"));
889 return false;
891 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
893 warn_odr (t1, t2, NULL, NULL, warn, warned,
894 G_("a type with different signedness is defined "
895 "in another translation unit"));
896 return false;
899 if (TREE_CODE (t1) == INTEGER_TYPE
900 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
902 /* char WRT uint_8? */
903 warn_odr (t1, t2, NULL, NULL, warn, warned,
904 G_("a different type is defined in another "
905 "translation unit"));
906 return false;
909 /* For canonical type comparisons we do not want to build SCCs
910 so we cannot compare pointed-to types. But we can, for now,
911 require the same pointed-to type kind and match what
912 useless_type_conversion_p would do. */
913 if (POINTER_TYPE_P (t1))
915 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
916 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
918 warn_odr (t1, t2, NULL, NULL, warn, warned,
919 G_("it is defined as a pointer in different address "
920 "space in another translation unit"));
921 return false;
924 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
926 warn_odr (t1, t2, NULL, NULL, warn, warned,
927 G_("it is defined as a pointer to different type "
928 "in another translation unit"));
929 if (warn && warned)
930 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
931 return false;
935 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
936 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
938 /* Probably specific enough. */
939 warn_odr (t1, t2, NULL, NULL, warn, warned,
940 G_("a different type is defined "
941 "in another translation unit"));
942 if (warn && warned)
943 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
944 return false;
947 /* Do type-specific comparisons. */
948 else switch (TREE_CODE (t1))
950 case ARRAY_TYPE:
952 /* Array types are the same if the element types are the same and
953 the number of elements are the same. */
954 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
956 warn_odr (t1, t2, NULL, NULL, warn, warned,
957 G_("a different type is defined in another "
958 "translation unit"));
959 if (warn && warned)
960 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
962 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
963 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
964 == TYPE_NONALIASED_COMPONENT (t2));
966 tree i1 = TYPE_DOMAIN (t1);
967 tree i2 = TYPE_DOMAIN (t2);
969 /* For an incomplete external array, the type domain can be
970 NULL_TREE. Check this condition also. */
971 if (i1 == NULL_TREE || i2 == NULL_TREE)
972 return true;
974 tree min1 = TYPE_MIN_VALUE (i1);
975 tree min2 = TYPE_MIN_VALUE (i2);
976 tree max1 = TYPE_MAX_VALUE (i1);
977 tree max2 = TYPE_MAX_VALUE (i2);
979 /* In C++, minimums should be always 0. */
980 gcc_assert (min1 == min2);
981 if (!operand_equal_p (max1, max2, 0))
983 warn_odr (t1, t2, NULL, NULL, warn, warned,
984 G_("an array of different size is defined "
985 "in another translation unit"));
986 return false;
989 break;
991 case METHOD_TYPE:
992 case FUNCTION_TYPE:
993 /* Function types are the same if the return type and arguments types
994 are the same. */
995 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
997 warn_odr (t1, t2, NULL, NULL, warn, warned,
998 G_("has different return value "
999 "in another translation unit"));
1000 if (warn && warned)
1001 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1002 return false;
1005 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
1006 return true;
1007 else
1009 tree parms1, parms2;
1011 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1012 parms1 && parms2;
1013 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1015 if (!odr_subtypes_equivalent_p
1016 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
1018 warn_odr (t1, t2, NULL, NULL, warn, warned,
1019 G_("has different parameters in another "
1020 "translation unit"));
1021 if (warn && warned)
1022 warn_types_mismatch (TREE_VALUE (parms1),
1023 TREE_VALUE (parms2));
1024 return false;
1028 if (parms1 || parms2)
1030 warn_odr (t1, t2, NULL, NULL, warn, warned,
1031 G_("has different parameters "
1032 "in another translation unit"));
1033 return false;
1036 return true;
1039 case RECORD_TYPE:
1040 case UNION_TYPE:
1041 case QUAL_UNION_TYPE:
1043 tree f1, f2;
1045 /* For aggregate types, all the fields must be the same. */
1046 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1048 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1049 f1 || f2;
1050 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1052 /* Skip non-fields. */
1053 while (f1 && TREE_CODE (f1) != FIELD_DECL)
1054 f1 = TREE_CHAIN (f1);
1055 while (f2 && TREE_CODE (f2) != FIELD_DECL)
1056 f2 = TREE_CHAIN (f2);
1057 if (!f1 || !f2)
1058 break;
1059 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1060 break;
1061 if (DECL_NAME (f1) != DECL_NAME (f2)
1062 && !DECL_ARTIFICIAL (f1))
1064 warn_odr (t1, t2, f1, f2, warn, warned,
1065 G_("a field with different name is defined "
1066 "in another translation unit"));
1067 return false;
1069 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1071 /* Do not warn about artificial fields and just go into generic
1072 field mismatch warning. */
1073 if (DECL_ARTIFICIAL (f1))
1074 break;
1076 warn_odr (t1, t2, f1, f2, warn, warned,
1077 G_("a field of same name but different type "
1078 "is defined in another translation unit"));
1079 if (warn && warned)
1080 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
1081 return false;
1083 if (!gimple_compare_field_offset (f1, f2))
1085 /* Do not warn about artificial fields and just go into generic
1086 field mismatch warning. */
1087 if (DECL_ARTIFICIAL (f1))
1088 break;
1089 warn_odr (t1, t2, t1, t2, warn, warned,
1090 G_("fields has different layout "
1091 "in another translation unit"));
1092 return false;
1094 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1095 == DECL_NONADDRESSABLE_P (f2));
1098 /* If one aggregate has more fields than the other, they
1099 are not the same. */
1100 if (f1 || f2)
1102 if (f1 && DECL_ARTIFICIAL (f1))
1103 f1 = NULL;
1104 if (f2 && DECL_ARTIFICIAL (f2))
1105 f2 = NULL;
1106 if (f1 || f2)
1107 warn_odr (t1, t2, f1, f2, warn, warned,
1108 G_("a type with different number of fields "
1109 "is defined in another translation unit"));
1110 /* Ideally we should never get this generic message. */
1111 else
1112 warn_odr (t1, t2, f1, f2, warn, warned,
1113 G_("a type with different memory representation "
1114 "is defined in another translation unit"));
1116 return false;
1118 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1119 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1120 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1122 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1123 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1124 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1126 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1128 warn_odr (t1, t2, f1, f2, warn, warned,
1129 G_("a different method of same type "
1130 "is defined in another translation unit"));
1131 return false;
1133 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1135 warn_odr (t1, t2, f1, f2, warn, warned,
1136 G_("s definition that differs by virtual "
1137 "keyword in another translation unit"));
1138 return false;
1140 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1142 warn_odr (t1, t2, f1, f2, warn, warned,
1143 G_("virtual table layout differs in another "
1144 "translation unit"));
1145 return false;
1147 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1149 warn_odr (t1, t2, f1, f2, warn, warned,
1150 G_("method with incompatible type is defined "
1151 "in another translation unit"));
1152 return false;
1155 if (f1 || f2)
1157 warn_odr (t1, t2, NULL, NULL, warn, warned,
1158 G_("a type with different number of methods "
1159 "is defined in another translation unit"));
1160 return false;
1164 break;
1166 case VOID_TYPE:
1167 break;
1169 default:
1170 debug_tree (t1);
1171 gcc_unreachable ();
1174 /* Those are better to come last as they are utterly uninformative. */
1175 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1176 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1178 warn_odr (t1, t2, NULL, NULL, warn, warned,
1179 G_("a type with different size "
1180 "is defined in another translation unit"));
1181 return false;
1183 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1184 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1186 warn_odr (t1, t2, NULL, NULL, warn, warned,
1187 G_("a type with different alignment "
1188 "is defined in another translation unit"));
1189 return false;
1191 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1192 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1193 TYPE_SIZE_UNIT (t2), 0));
1194 return true;
1197 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1198 from VAL->type. This may happen in LTO where tree merging did not merge
1199 all variants of the same type. It may or may not mean the ODR violation.
1200 Add it to the list of duplicates and warn on some violations. */
1202 static bool
1203 add_type_duplicate (odr_type val, tree type)
1205 bool build_bases = false;
1206 if (!val->types_set)
1207 val->types_set = new hash_set<tree>;
1209 /* Always prefer complete type to be the leader. */
1210 if ((!COMPLETE_TYPE_P (val->type) || !TYPE_BINFO (val->type))
1211 && (COMPLETE_TYPE_P (type) && TYPE_BINFO (val->type)))
1213 tree tmp = type;
1215 build_bases = true;
1216 type = val->type;
1217 val->type = tmp;
1220 /* See if this duplicate is new. */
1221 if (!val->types_set->add (type))
1223 bool merge = true;
1224 bool base_mismatch = false;
1225 unsigned int i,j;
1226 bool warned = false;
1227 hash_set<type_pair,pair_traits> visited;
1229 gcc_assert (in_lto_p);
1230 vec_safe_push (val->types, type);
1232 /* First we compare memory layout. */
1233 if (!odr_types_equivalent_p (val->type, type,
1234 !flag_ltrans && !val->odr_violated,
1235 &warned, &visited))
1237 merge = false;
1238 odr_violation_reported = true;
1239 val->odr_violated = true;
1240 if (symtab->dump_file)
1242 fprintf (symtab->dump_file, "ODR violation\n");
1244 print_node (symtab->dump_file, "", val->type, 0);
1245 putc ('\n',symtab->dump_file);
1246 print_node (symtab->dump_file, "", type, 0);
1247 putc ('\n',symtab->dump_file);
1251 /* Next sanity check that bases are the same. If not, we will end
1252 up producing wrong answers. */
1253 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1254 && TREE_CODE (val->type) == RECORD_TYPE
1255 && TREE_CODE (type) == RECORD_TYPE
1256 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1258 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1259 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1261 if (!warned && !val->odr_violated)
1263 tree extra_base;
1264 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1265 "a type with the same name but different "
1266 "number of polymorphic bases is "
1267 "defined in another translation unit");
1268 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1269 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1270 extra_base = BINFO_BASE_BINFO
1271 (TYPE_BINFO (type),
1272 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1273 else
1274 extra_base = BINFO_BASE_BINFO
1275 (TYPE_BINFO (val->type),
1276 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1277 inform (DECL_SOURCE_LOCATION
1278 (TYPE_NAME (DECL_CONTEXT (extra_base))),
1279 "the extra base is defined here ");
1281 base_mismatch = true;
1283 else
1284 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1286 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1287 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1288 tree type1 = BINFO_TYPE (base1);
1289 tree type2 = BINFO_TYPE (base2);
1291 if (types_odr_comparable (type1, type2))
1293 if (!types_same_for_odr (type1, type2))
1294 base_mismatch = true;
1296 else
1298 hash_set<type_pair,pair_traits> visited;
1299 if (!odr_types_equivalent_p (type1, type2, false, NULL,
1300 &visited))
1301 base_mismatch = true;
1303 if (base_mismatch)
1305 if (!warned && !val->odr_violated)
1306 warn_odr (type, val->type, NULL, NULL,
1307 !warned, &warned,
1308 "a type with the same name but different base "
1309 "type is defined in another translation unit");
1310 warn_types_mismatch (type1, type2);
1311 break;
1313 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1315 base_mismatch = true;
1316 if (!warned && !val->odr_violated)
1317 warn_odr (type, val->type, NULL, NULL,
1318 !warned, &warned,
1319 "a type with the same name but different base "
1320 "layout is defined in another translation unit");
1321 break;
1324 /* Sanity check that all bases will be build same way again. */
1325 if (!base_mismatch && val->bases.length ())
1327 unsigned int num_poly_bases = 0;
1329 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1330 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1331 (TYPE_BINFO (type), i)))
1332 num_poly_bases++;
1333 gcc_assert (num_poly_bases == val->bases.length ());
1334 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1335 i++)
1336 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1337 (TYPE_BINFO (type), i)))
1339 odr_type base = get_odr_type
1340 (BINFO_TYPE
1341 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1342 i)),
1343 true);
1344 gcc_assert (val->bases[j] == base);
1345 j++;
1348 if (base_mismatch)
1350 merge = false;
1351 odr_violation_reported = true;
1352 val->odr_violated = true;
1354 if (symtab->dump_file)
1356 fprintf (symtab->dump_file, "ODR base violation\n");
1358 print_node (symtab->dump_file, "", val->type, 0);
1359 putc ('\n',symtab->dump_file);
1360 print_node (symtab->dump_file, "", type, 0);
1361 putc ('\n',symtab->dump_file);
1366 /* Regularize things a little. During LTO same types may come with
1367 different BINFOs. Either because their virtual table was
1368 not merged by tree merging and only later at decl merging or
1369 because one type comes with external vtable, while other
1370 with internal. We want to merge equivalent binfos to conserve
1371 memory and streaming overhead.
1373 The external vtables are more harmful: they contain references
1374 to external declarations of methods that may be defined in the
1375 merged LTO unit. For this reason we absolutely need to remove
1376 them and replace by internal variants. Not doing so will lead
1377 to incomplete answers from possible_polymorphic_call_targets.
1379 FIXME: disable for now; because ODR types are now build during
1380 streaming in, the variants do not need to be linked to the type,
1381 yet. We need to do the merging in cleanup pass to be implemented
1382 soon. */
1383 if (!flag_ltrans && merge
1384 && 0
1385 && TREE_CODE (val->type) == RECORD_TYPE
1386 && TREE_CODE (type) == RECORD_TYPE
1387 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1388 && TYPE_MAIN_VARIANT (type) == type
1389 && TYPE_MAIN_VARIANT (val->type) == val->type
1390 && BINFO_VTABLE (TYPE_BINFO (val->type))
1391 && BINFO_VTABLE (TYPE_BINFO (type)))
1393 tree master_binfo = TYPE_BINFO (val->type);
1394 tree v1 = BINFO_VTABLE (master_binfo);
1395 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1397 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1399 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1400 && operand_equal_p (TREE_OPERAND (v1, 1),
1401 TREE_OPERAND (v2, 1), 0));
1402 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1403 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1405 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1406 == DECL_ASSEMBLER_NAME (v2));
1408 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1410 unsigned int i;
1412 set_type_binfo (val->type, TYPE_BINFO (type));
1413 for (i = 0; i < val->types->length (); i++)
1415 if (TYPE_BINFO ((*val->types)[i])
1416 == master_binfo)
1417 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1419 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1421 else
1422 set_type_binfo (type, master_binfo);
1425 return build_bases;
1428 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1429 possibly new entry. */
1431 odr_type
1432 get_odr_type (tree type, bool insert)
1434 odr_type_d **slot;
1435 odr_type val;
1436 hashval_t hash;
1437 bool build_bases = false;
1438 bool insert_to_odr_array = false;
1439 int base_id = -1;
1441 type = main_odr_variant (type);
1443 hash = hash_type_name (type);
1444 slot = odr_hash->find_slot_with_hash (type, hash,
1445 insert ? INSERT : NO_INSERT);
1446 if (!slot)
1447 return NULL;
1449 /* See if we already have entry for type. */
1450 if (*slot)
1452 val = *slot;
1454 /* With LTO we need to support multiple tree representation of
1455 the same ODR type. */
1456 if (val->type != type)
1457 build_bases = add_type_duplicate (val, type);
1459 else
1461 val = ggc_cleared_alloc<odr_type_d> ();
1462 val->type = type;
1463 val->bases = vNULL;
1464 val->derived_types = vNULL;
1465 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1466 build_bases = COMPLETE_TYPE_P (val->type);
1467 insert_to_odr_array = true;
1470 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1471 && type == TYPE_MAIN_VARIANT (type))
1473 tree binfo = TYPE_BINFO (type);
1474 unsigned int i;
1476 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1478 val->all_derivations_known = type_all_derivations_known_p (type);
1479 *slot = val;
1480 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1481 /* For now record only polymorphic types. other are
1482 pointless for devirtualization and we can not precisely
1483 determine ODR equivalency of these during LTO. */
1484 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1486 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1487 i)),
1488 true);
1489 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1490 base->derived_types.safe_push (val);
1491 val->bases.safe_push (base);
1492 if (base->id > base_id)
1493 base_id = base->id;
1496 /* Ensure that type always appears after bases. */
1497 if (insert_to_odr_array)
1499 if (odr_types_ptr)
1500 val->id = odr_types.length ();
1501 vec_safe_push (odr_types_ptr, val);
1503 else if (base_id > val->id)
1505 odr_types[val->id] = 0;
1506 /* Be sure we did not recorded any derived types; these may need
1507 renumbering too. */
1508 gcc_assert (val->derived_types.length() == 0);
1509 if (odr_types_ptr)
1510 val->id = odr_types.length ();
1511 vec_safe_push (odr_types_ptr, val);
1513 return val;
1516 /* Add TYPE od ODR type hash. */
1518 void
1519 register_odr_type (tree type)
1521 if (!odr_hash)
1522 odr_hash = new odr_hash_type (23);
1523 /* Arrange things to be nicer and insert main variants first. */
1524 if (odr_type_p (TYPE_MAIN_VARIANT (type)))
1525 get_odr_type (TYPE_MAIN_VARIANT (type), true);
1526 if (TYPE_MAIN_VARIANT (type) != type)
1527 get_odr_type (type, true);
1530 /* Return true if type is known to have no derivations. */
1532 bool
1533 type_known_to_have_no_deriavations_p (tree t)
1535 return (type_all_derivations_known_p (t)
1536 && (TYPE_FINAL_P (t)
1537 || (odr_hash
1538 && !get_odr_type (t, true)->derived_types.length())));
1541 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
1542 recursive printing. */
1544 static void
1545 dump_odr_type (FILE *f, odr_type t, int indent=0)
1547 unsigned int i;
1548 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1549 print_generic_expr (f, t->type, TDF_SLIM);
1550 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1551 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1552 if (TYPE_NAME (t->type))
1554 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1555 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1556 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1558 if (t->bases.length ())
1560 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1561 for (i = 0; i < t->bases.length (); i++)
1562 fprintf (f, " %i", t->bases[i]->id);
1563 fprintf (f, "\n");
1565 if (t->derived_types.length ())
1567 fprintf (f, "%*s derived types:\n", indent * 2, "");
1568 for (i = 0; i < t->derived_types.length (); i++)
1569 dump_odr_type (f, t->derived_types[i], indent + 1);
1571 fprintf (f, "\n");
1574 /* Dump the type inheritance graph. */
1576 static void
1577 dump_type_inheritance_graph (FILE *f)
1579 unsigned int i;
1580 if (!odr_types_ptr)
1581 return;
1582 fprintf (f, "\n\nType inheritance graph:\n");
1583 for (i = 0; i < odr_types.length (); i++)
1585 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1586 dump_odr_type (f, odr_types[i]);
1588 for (i = 0; i < odr_types.length (); i++)
1590 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1592 unsigned int j;
1593 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1594 print_node (f, "", odr_types[i]->type, 0);
1595 for (j = 0; j < odr_types[i]->types->length (); j++)
1597 tree t;
1598 fprintf (f, "duplicate #%i\n", j);
1599 print_node (f, "", (*odr_types[i]->types)[j], 0);
1600 t = (*odr_types[i]->types)[j];
1601 while (TYPE_P (t) && TYPE_CONTEXT (t))
1603 t = TYPE_CONTEXT (t);
1604 print_node (f, "", t, 0);
1606 putc ('\n',f);
1612 /* Given method type T, return type of class it belongs to.
1613 Look up this pointer and get its type. */
1615 tree
1616 method_class_type (const_tree t)
1618 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1619 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1621 return TREE_TYPE (first_parm_type);
1624 /* Initialize IPA devirt and build inheritance tree graph. */
1626 void
1627 build_type_inheritance_graph (void)
1629 struct symtab_node *n;
1630 FILE *inheritance_dump_file;
1631 int flags;
1633 if (odr_hash)
1634 return;
1635 timevar_push (TV_IPA_INHERITANCE);
1636 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1637 odr_hash = new odr_hash_type (23);
1639 /* We reconstruct the graph starting of types of all methods seen in the
1640 the unit. */
1641 FOR_EACH_SYMBOL (n)
1642 if (is_a <cgraph_node *> (n)
1643 && DECL_VIRTUAL_P (n->decl)
1644 && n->real_symbol_p ())
1645 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1646 true);
1648 /* Look also for virtual tables of types that do not define any methods.
1650 We need it in a case where class B has virtual base of class A
1651 re-defining its virtual method and there is class C with no virtual
1652 methods with B as virtual base.
1654 Here we output B's virtual method in two variant - for non-virtual
1655 and virtual inheritance. B's virtual table has non-virtual version,
1656 while C's has virtual.
1658 For this reason we need to know about C in order to include both
1659 variants of B. More correctly, record_target_from_binfo should
1660 add both variants of the method when walking B, but we have no
1661 link in between them.
1663 We rely on fact that either the method is exported and thus we
1664 assume it is called externally or C is in anonymous namespace and
1665 thus we will see the vtable. */
1667 else if (is_a <varpool_node *> (n)
1668 && DECL_VIRTUAL_P (n->decl)
1669 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1670 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1671 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1672 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1673 if (inheritance_dump_file)
1675 dump_type_inheritance_graph (inheritance_dump_file);
1676 dump_end (TDI_inheritance, inheritance_dump_file);
1678 timevar_pop (TV_IPA_INHERITANCE);
1681 /* Return true if N has reference from live virtual table
1682 (and thus can be a destination of polymorphic call).
1683 Be conservatively correct when callgraph is not built or
1684 if the method may be referred externally. */
1686 static bool
1687 referenced_from_vtable_p (struct cgraph_node *node)
1689 int i;
1690 struct ipa_ref *ref;
1691 bool found = false;
1693 if (node->externally_visible
1694 || DECL_EXTERNAL (node->decl)
1695 || node->used_from_other_partition)
1696 return true;
1698 /* Keep this test constant time.
1699 It is unlikely this can happen except for the case where speculative
1700 devirtualization introduced many speculative edges to this node.
1701 In this case the target is very likely alive anyway. */
1702 if (node->ref_list.referring.length () > 100)
1703 return true;
1705 /* We need references built. */
1706 if (symtab->state <= CONSTRUCTION)
1707 return true;
1709 for (i = 0; node->iterate_referring (i, ref); i++)
1711 if ((ref->use == IPA_REF_ALIAS
1712 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1713 || (ref->use == IPA_REF_ADDR
1714 && TREE_CODE (ref->referring->decl) == VAR_DECL
1715 && DECL_VIRTUAL_P (ref->referring->decl)))
1717 found = true;
1718 break;
1720 return found;
1723 /* If TARGET has associated node, record it in the NODES array.
1724 CAN_REFER specify if program can refer to the target directly.
1725 if TARGET is unknown (NULL) or it can not be inserted (for example because
1726 its body was already removed and there is no way to refer to it), clear
1727 COMPLETEP. */
1729 static void
1730 maybe_record_node (vec <cgraph_node *> &nodes,
1731 tree target, hash_set<tree> *inserted,
1732 bool can_refer,
1733 bool *completep)
1735 struct cgraph_node *target_node, *alias_target;
1736 enum availability avail;
1738 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1739 list of targets; the runtime effect of calling them is undefined.
1740 Only "real" virtual methods should be accounted. */
1741 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1742 return;
1744 if (!can_refer)
1746 /* The only case when method of anonymous namespace becomes unreferable
1747 is when we completely optimized it out. */
1748 if (flag_ltrans
1749 || !target
1750 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1751 *completep = false;
1752 return;
1755 if (!target)
1756 return;
1758 target_node = cgraph_node::get (target);
1760 /* Prefer alias target over aliases, so we do not get confused by
1761 fake duplicates. */
1762 if (target_node)
1764 alias_target = target_node->ultimate_alias_target (&avail);
1765 if (target_node != alias_target
1766 && avail >= AVAIL_AVAILABLE
1767 && target_node->get_availability ())
1768 target_node = alias_target;
1771 /* Method can only be called by polymorphic call if any
1772 of vtables referring to it are alive.
1774 While this holds for non-anonymous functions, too, there are
1775 cases where we want to keep them in the list; for example
1776 inline functions with -fno-weak are static, but we still
1777 may devirtualize them when instance comes from other unit.
1778 The same holds for LTO.
1780 Currently we ignore these functions in speculative devirtualization.
1781 ??? Maybe it would make sense to be more aggressive for LTO even
1782 elsewhere. */
1783 if (!flag_ltrans
1784 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1785 && (!target_node
1786 || !referenced_from_vtable_p (target_node)))
1788 /* See if TARGET is useful function we can deal with. */
1789 else if (target_node != NULL
1790 && (TREE_PUBLIC (target)
1791 || DECL_EXTERNAL (target)
1792 || target_node->definition)
1793 && target_node->real_symbol_p ())
1795 gcc_assert (!target_node->global.inlined_to);
1796 gcc_assert (target_node->real_symbol_p ());
1797 if (!inserted->add (target))
1799 cached_polymorphic_call_targets->add (target_node);
1800 nodes.safe_push (target_node);
1803 else if (completep
1804 && (!type_in_anonymous_namespace_p
1805 (DECL_CONTEXT (target))
1806 || flag_ltrans))
1807 *completep = false;
1810 /* See if BINFO's type matches OUTER_TYPE. If so, look up
1811 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1812 method in vtable and insert method to NODES array
1813 or BASES_TO_CONSIDER if this array is non-NULL.
1814 Otherwise recurse to base BINFOs.
1815 This matches what get_binfo_at_offset does, but with offset
1816 being unknown.
1818 TYPE_BINFOS is a stack of BINFOS of types with defined
1819 virtual table seen on way from class type to BINFO.
1821 MATCHED_VTABLES tracks virtual tables we already did lookup
1822 for virtual function in. INSERTED tracks nodes we already
1823 inserted.
1825 ANONYMOUS is true if BINFO is part of anonymous namespace.
1827 Clear COMPLETEP when we hit unreferable target.
1830 static void
1831 record_target_from_binfo (vec <cgraph_node *> &nodes,
1832 vec <tree> *bases_to_consider,
1833 tree binfo,
1834 tree otr_type,
1835 vec <tree> &type_binfos,
1836 HOST_WIDE_INT otr_token,
1837 tree outer_type,
1838 HOST_WIDE_INT offset,
1839 hash_set<tree> *inserted,
1840 hash_set<tree> *matched_vtables,
1841 bool anonymous,
1842 bool *completep)
1844 tree type = BINFO_TYPE (binfo);
1845 int i;
1846 tree base_binfo;
1849 if (BINFO_VTABLE (binfo))
1850 type_binfos.safe_push (binfo);
1851 if (types_same_for_odr (type, outer_type))
1853 int i;
1854 tree type_binfo = NULL;
1856 /* Look up BINFO with virtual table. For normal types it is always last
1857 binfo on stack. */
1858 for (i = type_binfos.length () - 1; i >= 0; i--)
1859 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1861 type_binfo = type_binfos[i];
1862 break;
1864 if (BINFO_VTABLE (binfo))
1865 type_binfos.pop ();
1866 /* If this is duplicated BINFO for base shared by virtual inheritance,
1867 we may not have its associated vtable. This is not a problem, since
1868 we will walk it on the other path. */
1869 if (!type_binfo)
1870 return;
1871 tree inner_binfo = get_binfo_at_offset (type_binfo,
1872 offset, otr_type);
1873 if (!inner_binfo)
1875 gcc_assert (odr_violation_reported);
1876 return;
1878 /* For types in anonymous namespace first check if the respective vtable
1879 is alive. If not, we know the type can't be called. */
1880 if (!flag_ltrans && anonymous)
1882 tree vtable = BINFO_VTABLE (inner_binfo);
1883 varpool_node *vnode;
1885 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1886 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1887 vnode = varpool_node::get (vtable);
1888 if (!vnode || !vnode->definition)
1889 return;
1891 gcc_assert (inner_binfo);
1892 if (bases_to_consider
1893 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
1894 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
1896 bool can_refer;
1897 tree target = gimple_get_virt_method_for_binfo (otr_token,
1898 inner_binfo,
1899 &can_refer);
1900 if (!bases_to_consider)
1901 maybe_record_node (nodes, target, inserted, can_refer, completep);
1902 /* Destructors are never called via construction vtables. */
1903 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1904 bases_to_consider->safe_push (target);
1906 return;
1909 /* Walk bases. */
1910 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1911 /* Walking bases that have no virtual method is pointless exercise. */
1912 if (polymorphic_type_binfo_p (base_binfo))
1913 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1914 type_binfos,
1915 otr_token, outer_type, offset, inserted,
1916 matched_vtables, anonymous, completep);
1917 if (BINFO_VTABLE (binfo))
1918 type_binfos.pop ();
1921 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1922 of TYPE, insert them to NODES, recurse into derived nodes.
1923 INSERTED is used to avoid duplicate insertions of methods into NODES.
1924 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1925 Clear COMPLETEP if unreferable target is found.
1927 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
1928 all cases where BASE_SKIPPED is true (because the base is abstract
1929 class). */
1931 static void
1932 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1933 hash_set<tree> *inserted,
1934 hash_set<tree> *matched_vtables,
1935 tree otr_type,
1936 odr_type type,
1937 HOST_WIDE_INT otr_token,
1938 tree outer_type,
1939 HOST_WIDE_INT offset,
1940 bool *completep,
1941 vec <tree> &bases_to_consider,
1942 bool consider_construction)
1944 tree binfo = TYPE_BINFO (type->type);
1945 unsigned int i;
1946 auto_vec <tree, 8> type_binfos;
1947 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1949 /* We may need to consider types w/o instances because of possible derived
1950 types using their methods either directly or via construction vtables.
1951 We are safe to skip them when all derivations are known, since we will
1952 handle them later.
1953 This is done by recording them to BASES_TO_CONSIDER array. */
1954 if (possibly_instantiated || consider_construction)
1956 record_target_from_binfo (nodes,
1957 (!possibly_instantiated
1958 && type_all_derivations_known_p (type->type))
1959 ? &bases_to_consider : NULL,
1960 binfo, otr_type, type_binfos, otr_token,
1961 outer_type, offset,
1962 inserted, matched_vtables,
1963 type->anonymous_namespace, completep);
1965 for (i = 0; i < type->derived_types.length (); i++)
1966 possible_polymorphic_call_targets_1 (nodes, inserted,
1967 matched_vtables,
1968 otr_type,
1969 type->derived_types[i],
1970 otr_token, outer_type, offset, completep,
1971 bases_to_consider, consider_construction);
1974 /* Cache of queries for polymorphic call targets.
1976 Enumerating all call targets may get expensive when there are many
1977 polymorphic calls in the program, so we memoize all the previous
1978 queries and avoid duplicated work. */
1980 struct polymorphic_call_target_d
1982 HOST_WIDE_INT otr_token;
1983 ipa_polymorphic_call_context context;
1984 odr_type type;
1985 vec <cgraph_node *> targets;
1986 tree decl_warning;
1987 int type_warning;
1988 bool complete;
1989 bool speculative;
1992 /* Polymorphic call target cache helpers. */
1994 struct polymorphic_call_target_hasher
1996 typedef polymorphic_call_target_d value_type;
1997 typedef polymorphic_call_target_d compare_type;
1998 static inline hashval_t hash (const value_type *);
1999 static inline bool equal (const value_type *, const compare_type *);
2000 static inline void remove (value_type *);
2003 /* Return the computed hashcode for ODR_QUERY. */
2005 inline hashval_t
2006 polymorphic_call_target_hasher::hash (const value_type *odr_query)
2008 inchash::hash hstate (odr_query->otr_token);
2010 hstate.add_wide_int (odr_query->type->id);
2011 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2012 hstate.add_wide_int (odr_query->context.offset);
2014 if (odr_query->context.speculative_outer_type)
2016 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2017 hstate.add_wide_int (odr_query->context.speculative_offset);
2019 hstate.add_flag (odr_query->speculative);
2020 hstate.add_flag (odr_query->context.maybe_in_construction);
2021 hstate.add_flag (odr_query->context.maybe_derived_type);
2022 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2023 hstate.commit_flag ();
2024 return hstate.end ();
2027 /* Compare cache entries T1 and T2. */
2029 inline bool
2030 polymorphic_call_target_hasher::equal (const value_type *t1,
2031 const compare_type *t2)
2033 return (t1->type == t2->type && t1->otr_token == t2->otr_token
2034 && t1->speculative == t2->speculative
2035 && t1->context.offset == t2->context.offset
2036 && t1->context.speculative_offset == t2->context.speculative_offset
2037 && t1->context.outer_type == t2->context.outer_type
2038 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2039 && t1->context.maybe_in_construction
2040 == t2->context.maybe_in_construction
2041 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2042 && (t1->context.speculative_maybe_derived_type
2043 == t2->context.speculative_maybe_derived_type));
2046 /* Remove entry in polymorphic call target cache hash. */
2048 inline void
2049 polymorphic_call_target_hasher::remove (value_type *v)
2051 v->targets.release ();
2052 free (v);
2055 /* Polymorphic call target query cache. */
2057 typedef hash_table<polymorphic_call_target_hasher>
2058 polymorphic_call_target_hash_type;
2059 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2061 /* Destroy polymorphic call target query cache. */
2063 static void
2064 free_polymorphic_call_targets_hash ()
2066 if (cached_polymorphic_call_targets)
2068 delete polymorphic_call_target_hash;
2069 polymorphic_call_target_hash = NULL;
2070 delete cached_polymorphic_call_targets;
2071 cached_polymorphic_call_targets = NULL;
2075 /* When virtual function is removed, we may need to flush the cache. */
2077 static void
2078 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2080 if (cached_polymorphic_call_targets
2081 && cached_polymorphic_call_targets->contains (n))
2082 free_polymorphic_call_targets_hash ();
2085 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2087 tree
2088 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2089 tree vtable)
2091 tree v = BINFO_VTABLE (binfo);
2092 int i;
2093 tree base_binfo;
2094 unsigned HOST_WIDE_INT this_offset;
2096 if (v)
2098 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2099 gcc_unreachable ();
2101 if (offset == this_offset
2102 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2103 return binfo;
2106 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2107 if (polymorphic_type_binfo_p (base_binfo))
2109 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2110 if (base_binfo)
2111 return base_binfo;
2113 return NULL;
2116 /* T is known constant value of virtual table pointer.
2117 Store virtual table to V and its offset to OFFSET.
2118 Return false if T does not look like virtual table reference. */
2120 bool
2121 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2122 unsigned HOST_WIDE_INT *offset)
2124 /* We expect &MEM[(void *)&virtual_table + 16B].
2125 We obtain object's BINFO from the context of the virtual table.
2126 This one contains pointer to virtual table represented via
2127 POINTER_PLUS_EXPR. Verify that this pointer matches what
2128 we propagated through.
2130 In the case of virtual inheritance, the virtual tables may
2131 be nested, i.e. the offset may be different from 16 and we may
2132 need to dive into the type representation. */
2133 if (TREE_CODE (t) == ADDR_EXPR
2134 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2135 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2136 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2137 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2138 == VAR_DECL)
2139 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2140 (TREE_OPERAND (t, 0), 0), 0)))
2142 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2143 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2144 return true;
2147 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2148 We need to handle it when T comes from static variable initializer or
2149 BINFO. */
2150 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2152 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2153 t = TREE_OPERAND (t, 0);
2155 else
2156 *offset = 0;
2158 if (TREE_CODE (t) != ADDR_EXPR)
2159 return false;
2160 *v = TREE_OPERAND (t, 0);
2161 return true;
2164 /* T is known constant value of virtual table pointer. Return BINFO of the
2165 instance type. */
2167 tree
2168 vtable_pointer_value_to_binfo (const_tree t)
2170 tree vtable;
2171 unsigned HOST_WIDE_INT offset;
2173 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2174 return NULL_TREE;
2176 /* FIXME: for stores of construction vtables we return NULL,
2177 because we do not have BINFO for those. Eventually we should fix
2178 our representation to allow this case to be handled, too.
2179 In the case we see store of BINFO we however may assume
2180 that standard folding will be able to cope with it. */
2181 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2182 offset, vtable);
2185 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2186 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2187 and insert them in NODES.
2189 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2191 static void
2192 record_targets_from_bases (tree otr_type,
2193 HOST_WIDE_INT otr_token,
2194 tree outer_type,
2195 HOST_WIDE_INT offset,
2196 vec <cgraph_node *> &nodes,
2197 hash_set<tree> *inserted,
2198 hash_set<tree> *matched_vtables,
2199 bool *completep)
2201 while (true)
2203 HOST_WIDE_INT pos, size;
2204 tree base_binfo;
2205 tree fld;
2207 if (types_same_for_odr (outer_type, otr_type))
2208 return;
2210 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2212 if (TREE_CODE (fld) != FIELD_DECL)
2213 continue;
2215 pos = int_bit_position (fld);
2216 size = tree_to_shwi (DECL_SIZE (fld));
2217 if (pos <= offset && (pos + size) > offset
2218 /* Do not get confused by zero sized bases. */
2219 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2220 break;
2222 /* Within a class type we should always find corresponding fields. */
2223 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2225 /* Nonbase types should have been stripped by outer_class_type. */
2226 gcc_assert (DECL_ARTIFICIAL (fld));
2228 outer_type = TREE_TYPE (fld);
2229 offset -= pos;
2231 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2232 offset, otr_type);
2233 if (!base_binfo)
2235 gcc_assert (odr_violation_reported);
2236 return;
2238 gcc_assert (base_binfo);
2239 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2241 bool can_refer;
2242 tree target = gimple_get_virt_method_for_binfo (otr_token,
2243 base_binfo,
2244 &can_refer);
2245 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2246 maybe_record_node (nodes, target, inserted, can_refer, completep);
2247 matched_vtables->add (BINFO_VTABLE (base_binfo));
2252 /* When virtual table is removed, we may need to flush the cache. */
2254 static void
2255 devirt_variable_node_removal_hook (varpool_node *n,
2256 void *d ATTRIBUTE_UNUSED)
2258 if (cached_polymorphic_call_targets
2259 && DECL_VIRTUAL_P (n->decl)
2260 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2261 free_polymorphic_call_targets_hash ();
2264 /* Record about how many calls would benefit from given type to be final. */
2266 struct odr_type_warn_count
2268 tree type;
2269 int count;
2270 gcov_type dyn_count;
2273 /* Record about how many calls would benefit from given method to be final. */
2275 struct decl_warn_count
2277 tree decl;
2278 int count;
2279 gcov_type dyn_count;
2282 /* Information about type and decl warnings. */
2284 struct final_warning_record
2286 gcov_type dyn_count;
2287 vec<odr_type_warn_count> type_warnings;
2288 hash_map<tree, decl_warn_count> decl_warnings;
2290 struct final_warning_record *final_warning_records;
2292 /* Return vector containing possible targets of polymorphic call of type
2293 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2294 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2295 OTR_TYPE and include their virtual method. This is useful for types
2296 possibly in construction or destruction where the virtual table may
2297 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2298 us to walk the inheritance graph for all derivations.
2300 If COMPLETEP is non-NULL, store true if the list is complete.
2301 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2302 in the target cache. If user needs to visit every target list
2303 just once, it can memoize them.
2305 If SPECULATIVE is set, the list will not contain targets that
2306 are not speculatively taken.
2308 Returned vector is placed into cache. It is NOT caller's responsibility
2309 to free it. The vector can be freed on cgraph_remove_node call if
2310 the particular node is a virtual function present in the cache. */
2312 vec <cgraph_node *>
2313 possible_polymorphic_call_targets (tree otr_type,
2314 HOST_WIDE_INT otr_token,
2315 ipa_polymorphic_call_context context,
2316 bool *completep,
2317 void **cache_token,
2318 bool speculative)
2320 static struct cgraph_node_hook_list *node_removal_hook_holder;
2321 vec <cgraph_node *> nodes = vNULL;
2322 auto_vec <tree, 8> bases_to_consider;
2323 odr_type type, outer_type;
2324 polymorphic_call_target_d key;
2325 polymorphic_call_target_d **slot;
2326 unsigned int i;
2327 tree binfo, target;
2328 bool complete;
2329 bool can_refer = false;
2330 bool skipped = false;
2332 otr_type = TYPE_MAIN_VARIANT (otr_type);
2334 /* If ODR is not initialized or the context is invalid, return empty
2335 incomplete list. */
2336 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
2338 if (completep)
2339 *completep = context.invalid;
2340 if (cache_token)
2341 *cache_token = NULL;
2342 return nodes;
2345 /* Do not bother to compute speculative info when user do not asks for it. */
2346 if (!speculative || !context.speculative_outer_type)
2347 context.clear_speculation ();
2349 type = get_odr_type (otr_type, true);
2351 /* Recording type variants would waste results cache. */
2352 gcc_assert (!context.outer_type
2353 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2355 /* Look up the outer class type we want to walk.
2356 If we fail to do so, the context is invalid. */
2357 if ((context.outer_type || context.speculative_outer_type)
2358 && !context.restrict_to_inner_class (otr_type))
2360 if (completep)
2361 *completep = true;
2362 if (cache_token)
2363 *cache_token = NULL;
2364 return nodes;
2366 gcc_assert (!context.invalid);
2368 /* Check that restrict_to_inner_class kept the main variant. */
2369 gcc_assert (!context.outer_type
2370 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2372 /* We canonicalize our query, so we do not need extra hashtable entries. */
2374 /* Without outer type, we have no use for offset. Just do the
2375 basic search from inner type. */
2376 if (!context.outer_type)
2377 context.clear_outer_type (otr_type);
2378 /* We need to update our hierarchy if the type does not exist. */
2379 outer_type = get_odr_type (context.outer_type, true);
2380 /* If the type is complete, there are no derivations. */
2381 if (TYPE_FINAL_P (outer_type->type))
2382 context.maybe_derived_type = false;
2384 /* Initialize query cache. */
2385 if (!cached_polymorphic_call_targets)
2387 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
2388 polymorphic_call_target_hash
2389 = new polymorphic_call_target_hash_type (23);
2390 if (!node_removal_hook_holder)
2392 node_removal_hook_holder =
2393 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
2394 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
2395 NULL);
2399 if (in_lto_p)
2401 if (context.outer_type != otr_type)
2402 context.outer_type
2403 = get_odr_type (context.outer_type, true)->type;
2404 if (context.speculative_outer_type)
2405 context.speculative_outer_type
2406 = get_odr_type (context.speculative_outer_type, true)->type;
2409 /* Look up cached answer. */
2410 key.type = type;
2411 key.otr_token = otr_token;
2412 key.speculative = speculative;
2413 key.context = context;
2414 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
2415 if (cache_token)
2416 *cache_token = (void *)*slot;
2417 if (*slot)
2419 if (completep)
2420 *completep = (*slot)->complete;
2421 if ((*slot)->type_warning && final_warning_records)
2423 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
2424 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
2425 += final_warning_records->dyn_count;
2427 if (!speculative && (*slot)->decl_warning && final_warning_records)
2429 struct decl_warn_count *c =
2430 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
2431 c->count++;
2432 c->dyn_count += final_warning_records->dyn_count;
2434 return (*slot)->targets;
2437 complete = true;
2439 /* Do actual search. */
2440 timevar_push (TV_IPA_VIRTUAL_CALL);
2441 *slot = XCNEW (polymorphic_call_target_d);
2442 if (cache_token)
2443 *cache_token = (void *)*slot;
2444 (*slot)->type = type;
2445 (*slot)->otr_token = otr_token;
2446 (*slot)->context = context;
2447 (*slot)->speculative = speculative;
2449 hash_set<tree> inserted;
2450 hash_set<tree> matched_vtables;
2452 /* First insert targets we speculatively identified as likely. */
2453 if (context.speculative_outer_type)
2455 odr_type speculative_outer_type;
2456 bool speculation_complete = true;
2458 /* First insert target from type itself and check if it may have
2459 derived types. */
2460 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
2461 if (TYPE_FINAL_P (speculative_outer_type->type))
2462 context.speculative_maybe_derived_type = false;
2463 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
2464 context.speculative_offset, otr_type);
2465 if (binfo)
2466 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2467 &can_refer);
2468 else
2469 target = NULL;
2471 /* In the case we get complete method, we don't need
2472 to walk derivations. */
2473 if (target && DECL_FINAL_P (target))
2474 context.speculative_maybe_derived_type = false;
2475 if (type_possibly_instantiated_p (speculative_outer_type->type))
2476 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
2477 if (binfo)
2478 matched_vtables.add (BINFO_VTABLE (binfo));
2481 /* Next walk recursively all derived types. */
2482 if (context.speculative_maybe_derived_type)
2483 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
2484 possible_polymorphic_call_targets_1 (nodes, &inserted,
2485 &matched_vtables,
2486 otr_type,
2487 speculative_outer_type->derived_types[i],
2488 otr_token, speculative_outer_type->type,
2489 context.speculative_offset,
2490 &speculation_complete,
2491 bases_to_consider,
2492 false);
2495 if (!speculative || !nodes.length ())
2497 /* First see virtual method of type itself. */
2498 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2499 context.offset, otr_type);
2500 if (binfo)
2501 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2502 &can_refer);
2503 else
2505 gcc_assert (odr_violation_reported);
2506 target = NULL;
2509 /* Destructors are never called through construction virtual tables,
2510 because the type is always known. */
2511 if (target && DECL_CXX_DESTRUCTOR_P (target))
2512 context.maybe_in_construction = false;
2514 if (target)
2516 /* In the case we get complete method, we don't need
2517 to walk derivations. */
2518 if (DECL_FINAL_P (target))
2519 context.maybe_derived_type = false;
2522 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2523 if (type_possibly_instantiated_p (outer_type->type))
2524 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2525 else
2526 skipped = true;
2528 if (binfo)
2529 matched_vtables.add (BINFO_VTABLE (binfo));
2531 /* Next walk recursively all derived types. */
2532 if (context.maybe_derived_type)
2534 for (i = 0; i < outer_type->derived_types.length(); i++)
2535 possible_polymorphic_call_targets_1 (nodes, &inserted,
2536 &matched_vtables,
2537 otr_type,
2538 outer_type->derived_types[i],
2539 otr_token, outer_type->type,
2540 context.offset, &complete,
2541 bases_to_consider,
2542 context.maybe_in_construction);
2544 if (!outer_type->all_derivations_known)
2546 if (!speculative && final_warning_records)
2548 if (complete
2549 && nodes.length () == 1
2550 && warn_suggest_final_types
2551 && !outer_type->derived_types.length ())
2553 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
2554 final_warning_records->type_warnings.safe_grow_cleared
2555 (odr_types.length ());
2556 final_warning_records->type_warnings[outer_type->id].count++;
2557 final_warning_records->type_warnings[outer_type->id].dyn_count
2558 += final_warning_records->dyn_count;
2559 final_warning_records->type_warnings[outer_type->id].type
2560 = outer_type->type;
2561 (*slot)->type_warning = outer_type->id + 1;
2563 if (complete
2564 && warn_suggest_final_methods
2565 && nodes.length () == 1
2566 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
2567 outer_type->type))
2569 bool existed;
2570 struct decl_warn_count &c =
2571 final_warning_records->decl_warnings.get_or_insert
2572 (nodes[0]->decl, &existed);
2574 if (existed)
2576 c.count++;
2577 c.dyn_count += final_warning_records->dyn_count;
2579 else
2581 c.count = 1;
2582 c.dyn_count = final_warning_records->dyn_count;
2583 c.decl = nodes[0]->decl;
2585 (*slot)->decl_warning = nodes[0]->decl;
2588 complete = false;
2592 if (!speculative)
2594 /* Destructors are never called through construction virtual tables,
2595 because the type is always known. One of entries may be
2596 cxa_pure_virtual so look to at least two of them. */
2597 if (context.maybe_in_construction)
2598 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2599 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2600 context.maybe_in_construction = false;
2601 if (context.maybe_in_construction)
2603 if (type != outer_type
2604 && (!skipped
2605 || (context.maybe_derived_type
2606 && !type_all_derivations_known_p (outer_type->type))))
2607 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2608 context.offset, nodes, &inserted,
2609 &matched_vtables, &complete);
2610 if (skipped)
2611 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2612 for (i = 0; i < bases_to_consider.length(); i++)
2613 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
2618 (*slot)->targets = nodes;
2619 (*slot)->complete = complete;
2620 if (completep)
2621 *completep = complete;
2623 timevar_pop (TV_IPA_VIRTUAL_CALL);
2624 return nodes;
2627 bool
2628 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
2629 vec<const decl_warn_count*> *vec)
2631 vec->safe_push (&value);
2632 return true;
2635 /* Dump target list TARGETS into FILE. */
2637 static void
2638 dump_targets (FILE *f, vec <cgraph_node *> targets)
2640 unsigned int i;
2642 for (i = 0; i < targets.length (); i++)
2644 char *name = NULL;
2645 if (in_lto_p)
2646 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2647 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2648 if (in_lto_p)
2649 free (name);
2650 if (!targets[i]->definition)
2651 fprintf (f, " (no definition%s)",
2652 DECL_DECLARED_INLINE_P (targets[i]->decl)
2653 ? " inline" : "");
2655 fprintf (f, "\n");
2658 /* Dump all possible targets of a polymorphic call. */
2660 void
2661 dump_possible_polymorphic_call_targets (FILE *f,
2662 tree otr_type,
2663 HOST_WIDE_INT otr_token,
2664 const ipa_polymorphic_call_context &ctx)
2666 vec <cgraph_node *> targets;
2667 bool final;
2668 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
2669 unsigned int len;
2671 if (!type)
2672 return;
2673 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2674 ctx,
2675 &final, NULL, false);
2676 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
2677 print_generic_expr (f, type->type, TDF_SLIM);
2678 fprintf (f, " token %i\n", (int)otr_token);
2680 ctx.dump (f);
2682 fprintf (f, " %s%s%s%s\n ",
2683 final ? "This is a complete list." :
2684 "This is partial list; extra targets may be defined in other units.",
2685 ctx.maybe_in_construction ? " (base types included)" : "",
2686 ctx.maybe_derived_type ? " (derived types included)" : "",
2687 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
2688 len = targets.length ();
2689 dump_targets (f, targets);
2691 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2692 ctx,
2693 &final, NULL, true);
2694 gcc_assert (targets.length () <= len);
2695 if (targets.length () != len)
2697 fprintf (f, " Speculative targets:");
2698 dump_targets (f, targets);
2700 fprintf (f, "\n");
2704 /* Return true if N can be possibly target of a polymorphic call of
2705 OTR_TYPE/OTR_TOKEN. */
2707 bool
2708 possible_polymorphic_call_target_p (tree otr_type,
2709 HOST_WIDE_INT otr_token,
2710 const ipa_polymorphic_call_context &ctx,
2711 struct cgraph_node *n)
2713 vec <cgraph_node *> targets;
2714 unsigned int i;
2715 enum built_in_function fcode;
2716 bool final;
2718 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2719 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2720 == BUILT_IN_UNREACHABLE
2721 || fcode == BUILT_IN_TRAP))
2722 return true;
2724 if (!odr_hash)
2725 return true;
2726 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
2727 for (i = 0; i < targets.length (); i++)
2728 if (n->semantically_equivalent_p (targets[i]))
2729 return true;
2731 /* At a moment we allow middle end to dig out new external declarations
2732 as a targets of polymorphic calls. */
2733 if (!final && !n->definition)
2734 return true;
2735 return false;
2740 /* Return true if N can be possibly target of a polymorphic call of
2741 OBJ_TYPE_REF expression REF in STMT. */
2743 bool
2744 possible_polymorphic_call_target_p (tree ref,
2745 gimple stmt,
2746 struct cgraph_node *n)
2748 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
2749 tree call_fn = gimple_call_fn (stmt);
2751 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
2752 tree_to_uhwi
2753 (OBJ_TYPE_REF_TOKEN (call_fn)),
2754 context,
2759 /* After callgraph construction new external nodes may appear.
2760 Add them into the graph. */
2762 void
2763 update_type_inheritance_graph (void)
2765 struct cgraph_node *n;
2767 if (!odr_hash)
2768 return;
2769 free_polymorphic_call_targets_hash ();
2770 timevar_push (TV_IPA_INHERITANCE);
2771 /* We reconstruct the graph starting from types of all methods seen in the
2772 the unit. */
2773 FOR_EACH_FUNCTION (n)
2774 if (DECL_VIRTUAL_P (n->decl)
2775 && !n->definition
2776 && n->real_symbol_p ())
2777 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2778 true);
2779 timevar_pop (TV_IPA_INHERITANCE);
2783 /* Return true if N looks like likely target of a polymorphic call.
2784 Rule out cxa_pure_virtual, noreturns, function declared cold and
2785 other obvious cases. */
2787 bool
2788 likely_target_p (struct cgraph_node *n)
2790 int flags;
2791 /* cxa_pure_virtual and similar things are not likely. */
2792 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
2793 return false;
2794 flags = flags_from_decl_or_type (n->decl);
2795 if (flags & ECF_NORETURN)
2796 return false;
2797 if (lookup_attribute ("cold",
2798 DECL_ATTRIBUTES (n->decl)))
2799 return false;
2800 if (n->frequency < NODE_FREQUENCY_NORMAL)
2801 return false;
2802 /* If there are no live virtual tables referring the target,
2803 the only way the target can be called is an instance coming from other
2804 compilation unit; speculative devirtualization is built around an
2805 assumption that won't happen. */
2806 if (!referenced_from_vtable_p (n))
2807 return false;
2808 return true;
2811 /* Compare type warning records P1 and P2 and choose one with larger count;
2812 helper for qsort. */
2815 type_warning_cmp (const void *p1, const void *p2)
2817 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
2818 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
2820 if (t1->dyn_count < t2->dyn_count)
2821 return 1;
2822 if (t1->dyn_count > t2->dyn_count)
2823 return -1;
2824 return t2->count - t1->count;
2827 /* Compare decl warning records P1 and P2 and choose one with larger count;
2828 helper for qsort. */
2831 decl_warning_cmp (const void *p1, const void *p2)
2833 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
2834 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
2836 if (t1->dyn_count < t2->dyn_count)
2837 return 1;
2838 if (t1->dyn_count > t2->dyn_count)
2839 return -1;
2840 return t2->count - t1->count;
2844 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
2845 context CTX. */
2847 struct cgraph_node *
2848 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
2849 ipa_polymorphic_call_context ctx)
2851 vec <cgraph_node *>targets
2852 = possible_polymorphic_call_targets
2853 (otr_type, otr_token, ctx, NULL, NULL, true);
2854 unsigned int i;
2855 struct cgraph_node *likely_target = NULL;
2857 for (i = 0; i < targets.length (); i++)
2858 if (likely_target_p (targets[i]))
2860 if (likely_target)
2861 return NULL;
2862 likely_target = targets[i];
2864 if (!likely_target
2865 ||!likely_target->definition
2866 || DECL_EXTERNAL (likely_target->decl))
2867 return NULL;
2869 /* Don't use an implicitly-declared destructor (c++/58678). */
2870 struct cgraph_node *non_thunk_target
2871 = likely_target->function_symbol ();
2872 if (DECL_ARTIFICIAL (non_thunk_target->decl))
2873 return NULL;
2874 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
2875 && likely_target->can_be_discarded_p ())
2876 return NULL;
2877 return likely_target;
2880 /* The ipa-devirt pass.
2881 When polymorphic call has only one likely target in the unit,
2882 turn it into a speculative call. */
2884 static unsigned int
2885 ipa_devirt (void)
2887 struct cgraph_node *n;
2888 hash_set<void *> bad_call_targets;
2889 struct cgraph_edge *e;
2891 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2892 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
2893 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
2894 int ndropped = 0;
2896 if (!odr_types_ptr)
2897 return 0;
2899 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
2900 This is implemented by setting up final_warning_records that are updated
2901 by get_polymorphic_call_targets.
2902 We need to clear cache in this case to trigger recomputation of all
2903 entries. */
2904 if (warn_suggest_final_methods || warn_suggest_final_types)
2906 final_warning_records = new (final_warning_record);
2907 final_warning_records->type_warnings = vNULL;
2908 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
2909 free_polymorphic_call_targets_hash ();
2912 FOR_EACH_DEFINED_FUNCTION (n)
2914 bool update = false;
2915 if (!opt_for_fn (n->decl, flag_devirtualize))
2916 continue;
2917 if (dump_file && n->indirect_calls)
2918 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
2919 n->name (), n->order);
2920 for (e = n->indirect_calls; e; e = e->next_callee)
2921 if (e->indirect_info->polymorphic)
2923 struct cgraph_node *likely_target = NULL;
2924 void *cache_token;
2925 bool final;
2927 if (final_warning_records)
2928 final_warning_records->dyn_count = e->count;
2930 vec <cgraph_node *>targets
2931 = possible_polymorphic_call_targets
2932 (e, &final, &cache_token, true);
2933 unsigned int i;
2935 /* Trigger warnings by calculating non-speculative targets. */
2936 if (warn_suggest_final_methods || warn_suggest_final_types)
2937 possible_polymorphic_call_targets (e);
2939 if (dump_file)
2940 dump_possible_polymorphic_call_targets
2941 (dump_file, e);
2943 npolymorphic++;
2945 /* See if the call can be devirtualized by means of ipa-prop's
2946 polymorphic call context propagation. If not, we can just
2947 forget about this call being polymorphic and avoid some heavy
2948 lifting in remove_unreachable_nodes that will otherwise try to
2949 keep all possible targets alive until inlining and in the inliner
2950 itself.
2952 This may need to be revisited once we add further ways to use
2953 the may edges, but it is a resonable thing to do right now. */
2955 if ((e->indirect_info->param_index == -1
2956 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
2957 && e->indirect_info->vptr_changed))
2958 && !flag_ltrans_devirtualize)
2960 e->indirect_info->polymorphic = false;
2961 ndropped++;
2962 if (dump_file)
2963 fprintf (dump_file, "Dropping polymorphic call info;"
2964 " it can not be used by ipa-prop\n");
2967 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
2968 continue;
2970 if (!e->maybe_hot_p ())
2972 if (dump_file)
2973 fprintf (dump_file, "Call is cold\n\n");
2974 ncold++;
2975 continue;
2977 if (e->speculative)
2979 if (dump_file)
2980 fprintf (dump_file, "Call is already speculated\n\n");
2981 nspeculated++;
2983 /* When dumping see if we agree with speculation. */
2984 if (!dump_file)
2985 continue;
2987 if (bad_call_targets.contains (cache_token))
2989 if (dump_file)
2990 fprintf (dump_file, "Target list is known to be useless\n\n");
2991 nmultiple++;
2992 continue;
2994 for (i = 0; i < targets.length (); i++)
2995 if (likely_target_p (targets[i]))
2997 if (likely_target)
2999 likely_target = NULL;
3000 if (dump_file)
3001 fprintf (dump_file, "More than one likely target\n\n");
3002 nmultiple++;
3003 break;
3005 likely_target = targets[i];
3007 if (!likely_target)
3009 bad_call_targets.add (cache_token);
3010 continue;
3012 /* This is reached only when dumping; check if we agree or disagree
3013 with the speculation. */
3014 if (e->speculative)
3016 struct cgraph_edge *e2;
3017 struct ipa_ref *ref;
3018 e->speculative_call_info (e2, e, ref);
3019 if (e2->callee->ultimate_alias_target ()
3020 == likely_target->ultimate_alias_target ())
3022 fprintf (dump_file, "We agree with speculation\n\n");
3023 nok++;
3025 else
3027 fprintf (dump_file, "We disagree with speculation\n\n");
3028 nwrong++;
3030 continue;
3032 if (!likely_target->definition)
3034 if (dump_file)
3035 fprintf (dump_file, "Target is not a definition\n\n");
3036 nnotdefined++;
3037 continue;
3039 /* Do not introduce new references to external symbols. While we
3040 can handle these just well, it is common for programs to
3041 incorrectly with headers defining methods they are linked
3042 with. */
3043 if (DECL_EXTERNAL (likely_target->decl))
3045 if (dump_file)
3046 fprintf (dump_file, "Target is external\n\n");
3047 nexternal++;
3048 continue;
3050 /* Don't use an implicitly-declared destructor (c++/58678). */
3051 struct cgraph_node *non_thunk_target
3052 = likely_target->function_symbol ();
3053 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3055 if (dump_file)
3056 fprintf (dump_file, "Target is artificial\n\n");
3057 nartificial++;
3058 continue;
3060 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3061 && likely_target->can_be_discarded_p ())
3063 if (dump_file)
3064 fprintf (dump_file, "Target is overwritable\n\n");
3065 noverwritable++;
3066 continue;
3068 else if (dbg_cnt (devirt))
3070 if (dump_enabled_p ())
3072 location_t locus = gimple_location_safe (e->call_stmt);
3073 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3074 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3075 n->name (), n->order,
3076 likely_target->name (),
3077 likely_target->order);
3079 if (!likely_target->can_be_discarded_p ())
3081 cgraph_node *alias;
3082 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3083 if (alias)
3084 likely_target = alias;
3086 nconverted++;
3087 update = true;
3088 e->make_speculative
3089 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3092 if (update)
3093 inline_update_overall_summary (n);
3095 if (warn_suggest_final_methods || warn_suggest_final_types)
3097 if (warn_suggest_final_types)
3099 final_warning_records->type_warnings.qsort (type_warning_cmp);
3100 for (unsigned int i = 0;
3101 i < final_warning_records->type_warnings.length (); i++)
3102 if (final_warning_records->type_warnings[i].count)
3104 tree type = final_warning_records->type_warnings[i].type;
3105 int count = final_warning_records->type_warnings[i].count;
3106 long long dyn_count
3107 = final_warning_records->type_warnings[i].dyn_count;
3109 if (!dyn_count)
3110 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3111 OPT_Wsuggest_final_types, count,
3112 "Declaring type %qD final "
3113 "would enable devirtualization of %i call",
3114 "Declaring type %qD final "
3115 "would enable devirtualization of %i calls",
3116 type,
3117 count);
3118 else
3119 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3120 OPT_Wsuggest_final_types, count,
3121 "Declaring type %qD final "
3122 "would enable devirtualization of %i call "
3123 "executed %lli times",
3124 "Declaring type %qD final "
3125 "would enable devirtualization of %i calls "
3126 "executed %lli times",
3127 type,
3128 count,
3129 dyn_count);
3133 if (warn_suggest_final_methods)
3135 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3137 final_warning_records->decl_warnings.traverse
3138 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3139 decl_warnings_vec.qsort (decl_warning_cmp);
3140 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3142 tree decl = decl_warnings_vec[i]->decl;
3143 int count = decl_warnings_vec[i]->count;
3144 long long dyn_count = decl_warnings_vec[i]->dyn_count;
3146 if (!dyn_count)
3147 if (DECL_CXX_DESTRUCTOR_P (decl))
3148 warning_n (DECL_SOURCE_LOCATION (decl),
3149 OPT_Wsuggest_final_methods, count,
3150 "Declaring virtual destructor of %qD final "
3151 "would enable devirtualization of %i call",
3152 "Declaring virtual destructor of %qD final "
3153 "would enable devirtualization of %i calls",
3154 DECL_CONTEXT (decl), count);
3155 else
3156 warning_n (DECL_SOURCE_LOCATION (decl),
3157 OPT_Wsuggest_final_methods, count,
3158 "Declaring method %qD final "
3159 "would enable devirtualization of %i call",
3160 "Declaring method %qD final "
3161 "would enable devirtualization of %i calls",
3162 decl, count);
3163 else if (DECL_CXX_DESTRUCTOR_P (decl))
3164 warning_n (DECL_SOURCE_LOCATION (decl),
3165 OPT_Wsuggest_final_methods, count,
3166 "Declaring virtual destructor of %qD final "
3167 "would enable devirtualization of %i call "
3168 "executed %lli times",
3169 "Declaring virtual destructor of %qD final "
3170 "would enable devirtualization of %i calls "
3171 "executed %lli times",
3172 DECL_CONTEXT (decl), count, dyn_count);
3173 else
3174 warning_n (DECL_SOURCE_LOCATION (decl),
3175 OPT_Wsuggest_final_methods, count,
3176 "Declaring method %qD final "
3177 "would enable devirtualization of %i call "
3178 "executed %lli times",
3179 "Declaring method %qD final "
3180 "would enable devirtualization of %i calls "
3181 "executed %lli times",
3182 decl, count, dyn_count);
3186 delete (final_warning_records);
3187 final_warning_records = 0;
3190 if (dump_file)
3191 fprintf (dump_file,
3192 "%i polymorphic calls, %i devirtualized,"
3193 " %i speculatively devirtualized, %i cold\n"
3194 "%i have multiple targets, %i overwritable,"
3195 " %i already speculated (%i agree, %i disagree),"
3196 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3197 npolymorphic, ndevirtualized, nconverted, ncold,
3198 nmultiple, noverwritable, nspeculated, nok, nwrong,
3199 nexternal, nnotdefined, nartificial, ndropped);
3200 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3203 namespace {
3205 const pass_data pass_data_ipa_devirt =
3207 IPA_PASS, /* type */
3208 "devirt", /* name */
3209 OPTGROUP_NONE, /* optinfo_flags */
3210 TV_IPA_DEVIRT, /* tv_id */
3211 0, /* properties_required */
3212 0, /* properties_provided */
3213 0, /* properties_destroyed */
3214 0, /* todo_flags_start */
3215 ( TODO_dump_symtab ), /* todo_flags_finish */
3218 class pass_ipa_devirt : public ipa_opt_pass_d
3220 public:
3221 pass_ipa_devirt (gcc::context *ctxt)
3222 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3223 NULL, /* generate_summary */
3224 NULL, /* write_summary */
3225 NULL, /* read_summary */
3226 NULL, /* write_optimization_summary */
3227 NULL, /* read_optimization_summary */
3228 NULL, /* stmt_fixup */
3229 0, /* function_transform_todo_flags_start */
3230 NULL, /* function_transform */
3231 NULL) /* variable_transform */
3234 /* opt_pass methods: */
3235 virtual bool gate (function *)
3237 /* In LTO, always run the IPA passes and decide on function basis if the
3238 pass is enabled. */
3239 if (in_lto_p)
3240 return true;
3241 return (flag_devirtualize
3242 && (flag_devirtualize_speculatively
3243 || (warn_suggest_final_methods
3244 || warn_suggest_final_types))
3245 && optimize);
3248 virtual unsigned int execute (function *) { return ipa_devirt (); }
3250 }; // class pass_ipa_devirt
3252 } // anon namespace
3254 ipa_opt_pass_d *
3255 make_pass_ipa_devirt (gcc::context *ctxt)
3257 return new pass_ipa_devirt (ctxt);
3260 #include "gt-ipa-devirt.h"