Fix previous commit.
[official-gcc.git] / gcc / ipa-devirt.c
blobbc94a79d0380b7c8ec7913eff83451b2302d484c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
82 What we do here:
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "hash-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "inchash.h"
122 #include "tree-pretty-print.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-alias.h"
125 #include "internal-fn.h"
126 #include "gimple-fold.h"
127 #include "gimple-expr.h"
128 #include "gimple.h"
129 #include "ipa-inline.h"
130 #include "diagnostic.h"
131 #include "tree-dfa.h"
132 #include "demangle.h"
133 #include "dbgcnt.h"
134 #include "gimple-pretty-print.h"
135 #include "stor-layout.h"
136 #include "intl.h"
137 #include "hash-map.h"
138 #include "data-streamer.h"
139 #include "lto-streamer.h"
140 #include "streamer-hooks.h"
142 /* Hash based set of pairs of types. */
143 typedef struct
145 tree first;
146 tree second;
147 } type_pair;
149 struct pair_traits : default_hashset_traits
151 static hashval_t
152 hash (type_pair p)
154 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
156 static bool
157 is_empty (type_pair p)
159 return p.first == NULL;
161 static bool
162 is_deleted (type_pair p ATTRIBUTE_UNUSED)
164 return false;
166 static bool
167 equal (const type_pair &a, const type_pair &b)
169 return a.first==b.first && a.second == b.second;
171 static void
172 mark_empty (type_pair &e)
174 e.first = NULL;
178 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
179 hash_set<type_pair,pair_traits> *);
181 static bool odr_violation_reported = false;
184 /* Pointer set of all call targets appearing in the cache. */
185 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
187 /* The node of type inheritance graph. For each type unique in
188 One Defintion Rule (ODR) sense, we produce one node linking all
189 main variants of types equivalent to it, bases and derived types. */
191 struct GTY(()) odr_type_d
193 /* leader type. */
194 tree type;
195 /* All bases; built only for main variants of types */
196 vec<odr_type> GTY((skip)) bases;
197 /* All derrived types with virtual methods seen in unit;
198 built only for main variants oftypes */
199 vec<odr_type> GTY((skip)) derived_types;
201 /* All equivalent types, if more than one. */
202 vec<tree, va_gc> *types;
203 /* Set of all equivalent types, if NON-NULL. */
204 hash_set<tree> * GTY((skip)) types_set;
206 /* Unique ID indexing the type in odr_types array. */
207 int id;
208 /* Is it in anonymous namespace? */
209 bool anonymous_namespace;
210 /* Do we know about all derivations of given type? */
211 bool all_derivations_known;
212 /* Did we report ODR violation here? */
213 bool odr_violated;
216 static bool contains_type_p (tree, HOST_WIDE_INT, tree);
219 /* Return true if BINFO corresponds to a type with virtual methods.
221 Every type has several BINFOs. One is the BINFO associated by the type
222 while other represents bases of derived types. The BINFOs representing
223 bases do not have BINFO_VTABLE pointer set when this is the single
224 inheritance (because vtables are shared). Look up the BINFO of type
225 and check presence of its vtable. */
227 static inline bool
228 polymorphic_type_binfo_p (tree binfo)
230 /* See if BINFO's type has an virtual table associtated with it. */
231 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
234 /* Return TRUE if all derived types of T are known and thus
235 we may consider the walk of derived type complete.
237 This is typically true only for final anonymous namespace types and types
238 defined within functions (that may be COMDAT and thus shared across units,
239 but with the same set of derived types). */
241 static bool
242 type_all_derivations_known_p (tree t)
244 if (TYPE_FINAL_P (t))
245 return true;
246 if (flag_ltrans)
247 return false;
248 if (type_in_anonymous_namespace_p (t))
249 return true;
250 return (decl_function_context (TYPE_NAME (t)) != NULL);
253 /* Return TURE if type's constructors are all visible. */
255 static bool
256 type_all_ctors_visible_p (tree t)
258 return !flag_ltrans
259 && symtab->state >= CONSTRUCTION
260 /* We can not always use type_all_derivations_known_p.
261 For function local types we must assume case where
262 the function is COMDAT and shared in between units.
264 TODO: These cases are quite easy to get, but we need
265 to keep track of C++ privatizing via -Wno-weak
266 as well as the IPA privatizing. */
267 && type_in_anonymous_namespace_p (t);
270 /* Return TRUE if type may have instance. */
272 static bool
273 type_possibly_instantiated_p (tree t)
275 tree vtable;
276 varpool_node *vnode;
278 /* TODO: Add abstract types here. */
279 if (!type_all_ctors_visible_p (t))
280 return true;
282 vtable = BINFO_VTABLE (TYPE_BINFO (t));
283 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
284 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
285 vnode = varpool_node::get (vtable);
286 return vnode && vnode->definition;
289 /* One Definition Rule hashtable helpers. */
291 struct odr_hasher
293 typedef odr_type_d value_type;
294 typedef union tree_node compare_type;
295 static inline hashval_t hash (const value_type *);
296 static inline bool equal (const value_type *, const compare_type *);
297 static inline void remove (value_type *);
300 /* Return type that was declared with T's name so that T is an
301 qualified variant of it. */
303 static inline tree
304 main_odr_variant (const_tree t)
306 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
307 return TREE_TYPE (TYPE_NAME (t));
308 /* Unnamed types and non-C++ produced types can be compared by variants. */
309 else
310 return TYPE_MAIN_VARIANT (t);
313 /* Produce hash based on type name. */
315 static hashval_t
316 hash_type_name (tree t)
318 gcc_checking_assert (main_odr_variant (t) == t);
320 /* If not in LTO, all main variants are unique, so we can do
321 pointer hash. */
322 if (!in_lto_p)
323 return htab_hash_pointer (t);
325 /* Anonymous types are unique. */
326 if (type_in_anonymous_namespace_p (t))
327 return htab_hash_pointer (t);
329 /* ODR types have name specified. */
330 if (TYPE_NAME (t)
331 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)))
332 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
334 /* For polymorphic types that was compiled with -fno-lto-odr-type-merging
335 we can simply hash the virtual table. */
336 if (TREE_CODE (t) == RECORD_TYPE
337 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
339 tree v = BINFO_VTABLE (TYPE_BINFO (t));
340 hashval_t hash = 0;
342 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
344 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
345 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
348 v = DECL_ASSEMBLER_NAME (v);
349 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
350 return hash;
353 /* Builtin types may appear as main variants of ODR types and are unique.
354 Sanity check we do not get anything that looks non-builtin. */
355 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
356 || TREE_CODE (t) == VOID_TYPE
357 || TREE_CODE (t) == COMPLEX_TYPE
358 || TREE_CODE (t) == REAL_TYPE
359 || TREE_CODE (t) == POINTER_TYPE);
360 return htab_hash_pointer (t);
363 /* Return the computed hashcode for ODR_TYPE. */
365 inline hashval_t
366 odr_hasher::hash (const value_type *odr_type)
368 return hash_type_name (odr_type->type);
371 /* For languages with One Definition Rule, work out if
372 types are the same based on their name.
374 This is non-trivial for LTO where minnor differences in
375 the type representation may have prevented type merging
376 to merge two copies of otherwise equivalent type.
378 Until we start streaming mangled type names, this function works
379 only for polymorphic types. */
381 bool
382 types_same_for_odr (const_tree type1, const_tree type2)
384 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
386 type1 = main_odr_variant (type1);
387 type2 = main_odr_variant (type2);
389 if (type1 == type2)
390 return true;
392 if (!in_lto_p)
393 return false;
395 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
396 on the corresponding TYPE_STUB_DECL. */
397 if (type_in_anonymous_namespace_p (type1)
398 || type_in_anonymous_namespace_p (type2))
399 return false;
402 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
404 Ideally we should never meed types without ODR names here. It can however
405 happen in two cases:
407 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
408 Here testing for equivalence is safe, since their MAIN_VARIANTs are
409 unique.
410 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
411 establish precise ODR equivalency, but for correctness we care only
412 about equivalency on complete polymorphic types. For these we can
413 compare assembler names of their virtual tables. */
414 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
415 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
417 /* See if types are obvoiusly different (i.e. different codes
418 or polymorphis wrt non-polymorphic). This is not strictly correct
419 for ODR violating programs, but we can't do better without streaming
420 ODR names. */
421 if (TREE_CODE (type1) != TREE_CODE (type2))
422 return false;
423 if (TREE_CODE (type1) == RECORD_TYPE
424 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
425 return false;
426 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
427 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
428 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
429 return false;
431 /* At the moment we have no way to establish ODR equivlaence at LTO
432 other than comparing virtual table pointrs of polymorphic types.
433 Eventually we should start saving mangled names in TYPE_NAME.
434 Then this condition will become non-trivial. */
436 if (TREE_CODE (type1) == RECORD_TYPE
437 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
438 && BINFO_VTABLE (TYPE_BINFO (type1))
439 && BINFO_VTABLE (TYPE_BINFO (type2)))
441 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
442 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
443 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
444 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
445 return (operand_equal_p (TREE_OPERAND (v1, 1),
446 TREE_OPERAND (v2, 1), 0)
447 && DECL_ASSEMBLER_NAME
448 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
449 == DECL_ASSEMBLER_NAME
450 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
452 gcc_unreachable ();
454 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
455 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
458 /* Return true if we can decide on ODR equivalency.
460 In non-LTO it is always decide, in LTO however it depends in the type has
461 ODR info attached. */
463 static bool
464 types_odr_comparable (tree t1, tree t2)
466 return (!in_lto_p
467 || main_odr_variant (t1) == main_odr_variant (t2)
468 || (odr_type_p (t1) && odr_type_p (t2))
469 || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
470 && TYPE_BINFO (t1) && TYPE_BINFO (t2)
471 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
472 && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
475 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
476 known, be conservative and return false. */
478 static bool
479 types_must_be_same_for_odr (tree t1, tree t2)
481 if (types_odr_comparable (t1, t2))
482 return types_same_for_odr (t1, t2);
483 else
484 return main_odr_variant (t1) == main_odr_variant (t2);
487 /* Compare types T1 and T2 and return true if they are
488 equivalent. */
490 inline bool
491 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
493 tree t2 = const_cast <tree> (ct2);
495 gcc_checking_assert (main_odr_variant (t2) == t2);
496 if (t1->type == t2)
497 return true;
498 if (!in_lto_p)
499 return false;
500 return types_same_for_odr (t1->type, t2);
503 /* Free ODR type V. */
505 inline void
506 odr_hasher::remove (value_type *v)
508 v->bases.release ();
509 v->derived_types.release ();
510 if (v->types_set)
511 delete v->types_set;
512 ggc_free (v);
515 /* ODR type hash used to lookup ODR type based on tree type node. */
517 typedef hash_table<odr_hasher> odr_hash_type;
518 static odr_hash_type *odr_hash;
520 /* ODR types are also stored into ODR_TYPE vector to allow consistent
521 walking. Bases appear before derived types. Vector is garbage collected
522 so we won't end up visiting empty types. */
524 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
525 #define odr_types (*odr_types_ptr)
527 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
528 void
529 set_type_binfo (tree type, tree binfo)
531 for (; type; type = TYPE_NEXT_VARIANT (type))
532 if (COMPLETE_TYPE_P (type))
533 TYPE_BINFO (type) = binfo;
534 else
535 gcc_assert (!TYPE_BINFO (type));
538 /* Compare T2 and T2 based on name or structure. */
540 static bool
541 odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<type_pair,pair_traits> *visited)
543 bool an1, an2;
545 /* This can happen in incomplete types that should be handled earlier. */
546 gcc_assert (t1 && t2);
548 t1 = main_odr_variant (t1);
549 t2 = main_odr_variant (t2);
550 if (t1 == t2)
551 return true;
553 /* Anonymous namespace types must match exactly. */
554 an1 = type_in_anonymous_namespace_p (t1);
555 an2 = type_in_anonymous_namespace_p (t2);
556 if (an1 != an2 || an1)
557 return false;
559 /* For ODR types be sure to compare their names.
560 To support -wno-odr-type-merging we allow one type to be non-ODR
561 and other ODR even though it is a violation. */
562 if (types_odr_comparable (t1, t2))
564 if (!types_same_for_odr (t1, t2))
565 return false;
566 /* Limit recursion: If subtypes are ODR types and we know
567 that they are same, be happy. */
568 if (!get_odr_type (t1, true)->odr_violated)
569 return true;
572 /* Component types, builtins and possibly vioalting ODR types
573 have to be compared structurally. */
574 if (TREE_CODE (t1) != TREE_CODE (t2))
575 return false;
576 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
577 return false;
578 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
579 return false;
581 type_pair pair={t1,t2};
582 if (TYPE_UID (t1) > TYPE_UID (t2))
584 pair.first = t2;
585 pair.second = t1;
587 if (visited->add (pair))
588 return true;
589 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
592 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
593 violation warings. */
595 void
596 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
598 int n1, n2;
599 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
601 odr_violation_reported = true;
602 if (DECL_VIRTUAL_P (prevailing->decl))
604 varpool_node *tmp = prevailing;
605 prevailing = vtable;
606 vtable = tmp;
608 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
609 OPT_Wodr,
610 "virtual table of type %qD violates one definition rule",
611 DECL_CONTEXT (vtable->decl)))
612 inform (DECL_SOURCE_LOCATION (prevailing->decl),
613 "variable of same assembler name as the virtual table is "
614 "defined in another translation unit");
615 return;
617 if (!prevailing->definition || !vtable->definition)
618 return;
619 for (n1 = 0, n2 = 0; true; n1++, n2++)
621 struct ipa_ref *ref1, *ref2;
622 bool end1, end2;
623 end1 = !prevailing->iterate_reference (n1, ref1);
624 end2 = !vtable->iterate_reference (n2, ref2);
625 if (end1 && end2)
626 return;
627 if (!end1 && !end2
628 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
629 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
630 && !n2
631 && !DECL_VIRTUAL_P (ref2->referred->decl)
632 && DECL_VIRTUAL_P (ref1->referred->decl))
634 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
635 "virtual table of type %qD contains RTTI information",
636 DECL_CONTEXT (vtable->decl)))
638 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
639 "but is prevailed by one without from other translation unit");
640 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
641 "RTTI will not work on this type");
643 n2++;
644 end2 = !vtable->iterate_reference (n2, ref2);
646 if (!end1 && !end2
647 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
648 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
649 && !n1
650 && !DECL_VIRTUAL_P (ref1->referred->decl)
651 && DECL_VIRTUAL_P (ref2->referred->decl))
653 n1++;
654 end1 = !vtable->iterate_reference (n1, ref1);
656 if (end1 || end2)
658 if (end1)
660 varpool_node *tmp = prevailing;
661 prevailing = vtable;
662 vtable = tmp;
663 ref1 = ref2;
665 if (warning_at (DECL_SOURCE_LOCATION
666 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
667 "virtual table of type %qD violates "
668 "one definition rule",
669 DECL_CONTEXT (vtable->decl)))
671 inform (DECL_SOURCE_LOCATION
672 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
673 "the conflicting type defined in another translation "
674 "unit");
675 inform (DECL_SOURCE_LOCATION
676 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
677 "contains additional virtual method %qD",
678 ref1->referred->decl);
680 return;
682 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
683 != DECL_ASSEMBLER_NAME (ref2->referred->decl))
685 if (warning_at (DECL_SOURCE_LOCATION
686 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
687 "virtual table of type %qD violates "
688 "one definition rule ",
689 DECL_CONTEXT (vtable->decl)))
691 inform (DECL_SOURCE_LOCATION
692 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
693 "the conflicting type defined in another translation "
694 "unit");
695 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
696 "virtual method %qD", ref1->referred->decl);
697 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
698 "ought to match virtual method %qD but does not",
699 ref2->referred->decl);
700 return;
706 /* Output ODR violation warning about T1 and T2 with REASON.
707 Display location of ST1 and ST2 if REASON speaks about field or
708 method of the type.
709 If WARN is false, do nothing. Set WARNED if warning was indeed
710 output. */
712 void
713 warn_odr (tree t1, tree t2, tree st1, tree st2,
714 bool warn, bool *warned, const char *reason)
716 tree decl2 = TYPE_NAME (t2);
718 if (!warn)
719 return;
720 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
721 "type %qT violates one definition rule",
722 t1))
723 return;
724 if (!st1 && !st2)
726 /* For FIELD_DECL support also case where one of fields is
727 NULL - this is used when the structures have mismatching number of
728 elements. */
729 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
731 inform (DECL_SOURCE_LOCATION (decl2),
732 "a different type is defined in another translation unit");
733 if (!st1)
735 st1 = st2;
736 st2 = NULL;
738 inform (DECL_SOURCE_LOCATION (st1),
739 "the first difference of corresponding definitions is field %qD",
740 st1);
741 if (st2)
742 decl2 = st2;
744 else if (TREE_CODE (st1) == FUNCTION_DECL)
746 inform (DECL_SOURCE_LOCATION (decl2),
747 "a different type is defined in another translation unit");
748 inform (DECL_SOURCE_LOCATION (st1),
749 "the first difference of corresponding definitions is method %qD",
750 st1);
751 decl2 = st2;
753 else
754 return;
755 inform (DECL_SOURCE_LOCATION (decl2), reason);
757 if (warned)
758 *warned = true;
761 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
762 because they are used on same place in ODR matching types.
763 They are not; inform the user. */
765 void
766 warn_types_mismatch (tree t1, tree t2)
768 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
769 return;
770 /* In Firefox it is a common bug to have same types but in
771 different namespaces. Be a bit more informative on
772 this. */
773 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
774 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
775 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
776 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
777 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
778 DECL_NAME (TYPE_CONTEXT (t2))))))
779 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
780 "type %qT should match type %qT but is defined "
781 "in different namespace ",
782 t1, t2);
783 else
784 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
785 "type %qT should match type %qT",
786 t1, t2);
787 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
788 "the incompatible type is defined here");
791 /* Compare T1 and T2, report ODR violations if WARN is true and set
792 WARNED to true if anything is reported. Return true if types match.
793 If true is returned, the types are also compatible in the sense of
794 gimple_canonical_types_compatible_p. */
796 static bool
797 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<type_pair,pair_traits> *visited)
799 /* Check first for the obvious case of pointer identity. */
800 if (t1 == t2)
801 return true;
802 gcc_assert (!type_in_anonymous_namespace_p (t1));
803 gcc_assert (!type_in_anonymous_namespace_p (t2));
805 /* Can't be the same type if the types don't have the same code. */
806 if (TREE_CODE (t1) != TREE_CODE (t2))
808 warn_odr (t1, t2, NULL, NULL, warn, warned,
809 G_("a different type is defined in another translation unit"));
810 return false;
813 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
815 warn_odr (t1, t2, NULL, NULL, warn, warned,
816 G_("a type with different qualifiers is defined in another "
817 "translation unit"));
818 return false;
821 if (comp_type_attributes (t1, t2) != 1)
823 warn_odr (t1, t2, NULL, NULL, warn, warned,
824 G_("a type with attributes "
825 "is defined in another translation unit"));
826 return false;
829 if (TREE_CODE (t1) == ENUMERAL_TYPE)
831 tree v1, v2;
832 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
833 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
835 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
837 warn_odr (t1, t2, NULL, NULL, warn, warned,
838 G_("an enum with different value name"
839 " is defined in another translation unit"));
840 return false;
842 if (TREE_VALUE (v1) != TREE_VALUE (v2)
843 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
844 DECL_INITIAL (TREE_VALUE (v2)), 0))
846 warn_odr (t1, t2, NULL, NULL, warn, warned,
847 G_("an enum with different values is defined"
848 " in another translation unit"));
849 return false;
852 if (v1 || v2)
854 warn_odr (t1, t2, NULL, NULL, warn, warned,
855 G_("an enum with mismatching number of values "
856 "is defined in another translation unit"));
857 return false;
861 /* Non-aggregate types can be handled cheaply. */
862 if (INTEGRAL_TYPE_P (t1)
863 || SCALAR_FLOAT_TYPE_P (t1)
864 || FIXED_POINT_TYPE_P (t1)
865 || TREE_CODE (t1) == VECTOR_TYPE
866 || TREE_CODE (t1) == COMPLEX_TYPE
867 || TREE_CODE (t1) == OFFSET_TYPE
868 || POINTER_TYPE_P (t1))
870 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
872 warn_odr (t1, t2, NULL, NULL, warn, warned,
873 G_("a type with different precision is defined "
874 "in another translation unit"));
875 return false;
877 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
879 warn_odr (t1, t2, NULL, NULL, warn, warned,
880 G_("a type with different signedness is defined "
881 "in another translation unit"));
882 return false;
885 if (TREE_CODE (t1) == INTEGER_TYPE
886 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
888 /* char WRT uint_8? */
889 warn_odr (t1, t2, NULL, NULL, warn, warned,
890 G_("a different type is defined in another "
891 "translation unit"));
892 return false;
895 /* For canonical type comparisons we do not want to build SCCs
896 so we cannot compare pointed-to types. But we can, for now,
897 require the same pointed-to type kind and match what
898 useless_type_conversion_p would do. */
899 if (POINTER_TYPE_P (t1))
901 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
902 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
904 warn_odr (t1, t2, NULL, NULL, warn, warned,
905 G_("it is defined as a pointer in different address "
906 "space in another translation unit"));
907 return false;
910 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
912 warn_odr (t1, t2, NULL, NULL, warn, warned,
913 G_("it is defined as a pointer to different type "
914 "in another translation unit"));
915 if (warn && warned)
916 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
917 return false;
921 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
922 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
924 /* Probably specific enough. */
925 warn_odr (t1, t2, NULL, NULL, warn, warned,
926 G_("a different type is defined "
927 "in another translation unit"));
928 if (warn && warned)
929 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
930 return false;
933 /* Do type-specific comparisons. */
934 else switch (TREE_CODE (t1))
936 case ARRAY_TYPE:
938 /* Array types are the same if the element types are the same and
939 the number of elements are the same. */
940 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
942 warn_odr (t1, t2, NULL, NULL, warn, warned,
943 G_("a different type is defined in another "
944 "translation unit"));
945 if (warn && warned)
946 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
948 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
949 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
950 == TYPE_NONALIASED_COMPONENT (t2));
952 tree i1 = TYPE_DOMAIN (t1);
953 tree i2 = TYPE_DOMAIN (t2);
955 /* For an incomplete external array, the type domain can be
956 NULL_TREE. Check this condition also. */
957 if (i1 == NULL_TREE || i2 == NULL_TREE)
958 return true;
960 tree min1 = TYPE_MIN_VALUE (i1);
961 tree min2 = TYPE_MIN_VALUE (i2);
962 tree max1 = TYPE_MAX_VALUE (i1);
963 tree max2 = TYPE_MAX_VALUE (i2);
965 /* In C++, minimums should be always 0. */
966 gcc_assert (min1 == min2);
967 if (!operand_equal_p (max1, max2, 0))
969 warn_odr (t1, t2, NULL, NULL, warn, warned,
970 G_("an array of different size is defined "
971 "in another translation unit"));
972 return false;
975 break;
977 case METHOD_TYPE:
978 case FUNCTION_TYPE:
979 /* Function types are the same if the return type and arguments types
980 are the same. */
981 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
983 warn_odr (t1, t2, NULL, NULL, warn, warned,
984 G_("has different return value "
985 "in another translation unit"));
986 if (warn && warned)
987 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
988 return false;
991 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
992 return true;
993 else
995 tree parms1, parms2;
997 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
998 parms1 && parms2;
999 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1001 if (!odr_subtypes_equivalent_p
1002 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
1004 warn_odr (t1, t2, NULL, NULL, warn, warned,
1005 G_("has different parameters in another "
1006 "translation unit"));
1007 if (warn && warned)
1008 warn_types_mismatch (TREE_VALUE (parms1),
1009 TREE_VALUE (parms2));
1010 return false;
1014 if (parms1 || parms2)
1016 warn_odr (t1, t2, NULL, NULL, warn, warned,
1017 G_("has different parameters "
1018 "in another translation unit"));
1019 return false;
1022 return true;
1025 case RECORD_TYPE:
1026 case UNION_TYPE:
1027 case QUAL_UNION_TYPE:
1029 tree f1, f2;
1031 /* For aggregate types, all the fields must be the same. */
1032 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1034 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1035 f1 || f2;
1036 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1038 /* Skip non-fields. */
1039 while (f1 && TREE_CODE (f1) != FIELD_DECL)
1040 f1 = TREE_CHAIN (f1);
1041 while (f2 && TREE_CODE (f2) != FIELD_DECL)
1042 f2 = TREE_CHAIN (f2);
1043 if (!f1 || !f2)
1044 break;
1045 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1046 break;
1047 if (DECL_NAME (f1) != DECL_NAME (f2)
1048 && !DECL_ARTIFICIAL (f1))
1050 warn_odr (t1, t2, f1, f2, warn, warned,
1051 G_("a field with different name is defined "
1052 "in another translation unit"));
1053 return false;
1055 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1057 /* Do not warn about artificial fields and just go into generic
1058 field mismatch warning. */
1059 if (DECL_ARTIFICIAL (f1))
1060 break;
1062 warn_odr (t1, t2, f1, f2, warn, warned,
1063 G_("a field of same name but different type "
1064 "is defined in another translation unit"));
1065 if (warn && warned)
1066 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
1067 return false;
1069 if (!gimple_compare_field_offset (f1, f2))
1071 /* Do not warn about artificial fields and just go into generic
1072 field mismatch warning. */
1073 if (DECL_ARTIFICIAL (f1))
1074 break;
1075 warn_odr (t1, t2, t1, t2, warn, warned,
1076 G_("fields has different layout "
1077 "in another translation unit"));
1078 return false;
1080 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1081 == DECL_NONADDRESSABLE_P (f2));
1084 /* If one aggregate has more fields than the other, they
1085 are not the same. */
1086 if (f1 || f2)
1088 if (f1 && DECL_ARTIFICIAL (f1))
1089 f1 = NULL;
1090 if (f2 && DECL_ARTIFICIAL (f2))
1091 f2 = NULL;
1092 if (f1 || f2)
1093 warn_odr (t1, t2, f1, f2, warn, warned,
1094 G_("a type with different number of fields "
1095 "is defined in another translation unit"));
1096 /* Ideally we should never get this generic message. */
1097 else
1098 warn_odr (t1, t2, f1, f2, warn, warned,
1099 G_("a type with different memory representation "
1100 "is defined in another translation unit"));
1102 return false;
1104 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1105 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1106 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1108 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1109 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1110 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1112 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1114 warn_odr (t1, t2, f1, f2, warn, warned,
1115 G_("a different method of same type "
1116 "is defined in another translation unit"));
1117 return false;
1119 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1121 warn_odr (t1, t2, f1, f2, warn, warned,
1122 G_("s definition that differs by virtual "
1123 "keyword in another translation unit"));
1124 return false;
1126 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1128 warn_odr (t1, t2, f1, f2, warn, warned,
1129 G_("virtual table layout differs in another "
1130 "translation unit"));
1131 return false;
1133 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1135 warn_odr (t1, t2, f1, f2, warn, warned,
1136 G_("method with incompatible type is defined "
1137 "in another translation unit"));
1138 return false;
1141 if (f1 || f2)
1143 warn_odr (t1, t2, NULL, NULL, warn, warned,
1144 G_("a type with different number of methods "
1145 "is defined in another translation unit"));
1146 return false;
1150 break;
1152 case VOID_TYPE:
1153 break;
1155 default:
1156 debug_tree (t1);
1157 gcc_unreachable ();
1160 /* Those are better to come last as they are utterly uninformative. */
1161 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1162 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1164 warn_odr (t1, t2, NULL, NULL, warn, warned,
1165 G_("a type with different size "
1166 "is defined in another translation unit"));
1167 return false;
1169 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1170 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1172 warn_odr (t1, t2, NULL, NULL, warn, warned,
1173 G_("a type with different alignment "
1174 "is defined in another translation unit"));
1175 return false;
1177 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1178 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1179 TYPE_SIZE_UNIT (t2), 0));
1180 return true;
1183 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1184 from VAL->type. This may happen in LTO where tree merging did not merge
1185 all variants of the same type. It may or may not mean the ODR violation.
1186 Add it to the list of duplicates and warn on some violations. */
1188 static bool
1189 add_type_duplicate (odr_type val, tree type)
1191 bool build_bases = false;
1192 if (!val->types_set)
1193 val->types_set = new hash_set<tree>;
1195 /* Always prefer complete type to be the leader. */
1196 if (!COMPLETE_TYPE_P (val->type)
1197 && COMPLETE_TYPE_P (type))
1199 tree tmp = type;
1201 build_bases = true;
1202 type = val->type;
1203 val->type = tmp;
1206 /* See if this duplicate is new. */
1207 if (!val->types_set->add (type))
1209 bool merge = true;
1210 bool base_mismatch = false;
1211 unsigned int i,j;
1212 bool warned = false;
1213 hash_set<type_pair,pair_traits> visited;
1215 gcc_assert (in_lto_p);
1216 vec_safe_push (val->types, type);
1218 /* First we compare memory layout. */
1219 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
1220 &warned, &visited))
1222 merge = false;
1223 odr_violation_reported = true;
1224 val->odr_violated = true;
1225 if (symtab->dump_file)
1227 fprintf (symtab->dump_file, "ODR violation\n");
1229 print_node (symtab->dump_file, "", val->type, 0);
1230 putc ('\n',symtab->dump_file);
1231 print_node (symtab->dump_file, "", type, 0);
1232 putc ('\n',symtab->dump_file);
1236 /* Next sanity check that bases are the same. If not, we will end
1237 up producing wrong answers. */
1238 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1239 && TREE_CODE (val->type) == RECORD_TYPE
1240 && TREE_CODE (type) == RECORD_TYPE
1241 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1243 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1244 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
1246 odr_type base = get_odr_type
1247 (BINFO_TYPE
1248 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1249 i)),
1250 true);
1251 if (val->bases.length () <= j || val->bases[j] != base)
1252 base_mismatch = true;
1253 j++;
1255 if (base_mismatch)
1257 merge = false;
1258 odr_violation_reported = true;
1260 if (!warned && !val->odr_violated)
1261 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1262 "a type with the same name but different bases is "
1263 "defined in another translation unit");
1264 val->odr_violated = true;
1265 if (symtab->dump_file)
1267 fprintf (symtab->dump_file, "ODR bse violation or merging bug?\n");
1269 print_node (symtab->dump_file, "", val->type, 0);
1270 putc ('\n',symtab->dump_file);
1271 print_node (symtab->dump_file, "", type, 0);
1272 putc ('\n',symtab->dump_file);
1277 /* Regularize things a little. During LTO same types may come with
1278 different BINFOs. Either because their virtual table was
1279 not merged by tree merging and only later at decl merging or
1280 because one type comes with external vtable, while other
1281 with internal. We want to merge equivalent binfos to conserve
1282 memory and streaming overhead.
1284 The external vtables are more harmful: they contain references
1285 to external declarations of methods that may be defined in the
1286 merged LTO unit. For this reason we absolutely need to remove
1287 them and replace by internal variants. Not doing so will lead
1288 to incomplete answers from possible_polymorphic_call_targets.
1290 FIXME: disable for now; because ODR types are now build during
1291 streaming in, the variants do not need to be linked to the type,
1292 yet. We need to do the merging in cleanup pass to be implemented
1293 soon. */
1294 if (!flag_ltrans && merge
1295 && 0
1296 && TREE_CODE (val->type) == RECORD_TYPE
1297 && TREE_CODE (type) == RECORD_TYPE
1298 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1299 && TYPE_MAIN_VARIANT (type) == type
1300 && TYPE_MAIN_VARIANT (val->type) == val->type
1301 && BINFO_VTABLE (TYPE_BINFO (val->type))
1302 && BINFO_VTABLE (TYPE_BINFO (type)))
1304 tree master_binfo = TYPE_BINFO (val->type);
1305 tree v1 = BINFO_VTABLE (master_binfo);
1306 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1308 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1310 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1311 && operand_equal_p (TREE_OPERAND (v1, 1),
1312 TREE_OPERAND (v2, 1), 0));
1313 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1314 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1316 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1317 == DECL_ASSEMBLER_NAME (v2));
1319 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1321 unsigned int i;
1323 set_type_binfo (val->type, TYPE_BINFO (type));
1324 for (i = 0; i < val->types->length (); i++)
1326 if (TYPE_BINFO ((*val->types)[i])
1327 == master_binfo)
1328 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1330 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1332 else
1333 set_type_binfo (type, master_binfo);
1336 return build_bases;
1339 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1340 possibly new entry. */
1342 odr_type
1343 get_odr_type (tree type, bool insert)
1345 odr_type_d **slot;
1346 odr_type val;
1347 hashval_t hash;
1348 bool build_bases = false;
1349 bool insert_to_odr_array = false;
1350 int base_id = -1;
1352 type = main_odr_variant (type);
1354 hash = hash_type_name (type);
1355 slot = odr_hash->find_slot_with_hash (type, hash,
1356 insert ? INSERT : NO_INSERT);
1357 if (!slot)
1358 return NULL;
1360 /* See if we already have entry for type. */
1361 if (*slot)
1363 val = *slot;
1365 /* With LTO we need to support multiple tree representation of
1366 the same ODR type. */
1367 if (val->type != type)
1368 build_bases = add_type_duplicate (val, type);
1370 else
1372 val = ggc_cleared_alloc<odr_type_d> ();
1373 val->type = type;
1374 val->bases = vNULL;
1375 val->derived_types = vNULL;
1376 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1377 build_bases = COMPLETE_TYPE_P (val->type);
1378 insert_to_odr_array = true;
1381 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1382 && type == TYPE_MAIN_VARIANT (type))
1384 tree binfo = TYPE_BINFO (type);
1385 unsigned int i;
1387 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1389 val->all_derivations_known = type_all_derivations_known_p (type);
1390 *slot = val;
1391 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1392 /* For now record only polymorphic types. other are
1393 pointless for devirtualization and we can not precisely
1394 determine ODR equivalency of these during LTO. */
1395 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1397 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1398 i)),
1399 true);
1400 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1401 base->derived_types.safe_push (val);
1402 val->bases.safe_push (base);
1403 if (base->id > base_id)
1404 base_id = base->id;
1407 /* Ensure that type always appears after bases. */
1408 if (insert_to_odr_array)
1410 if (odr_types_ptr)
1411 val->id = odr_types.length ();
1412 vec_safe_push (odr_types_ptr, val);
1414 else if (base_id > val->id)
1416 odr_types[val->id] = 0;
1417 /* Be sure we did not recorded any derived types; these may need
1418 renumbering too. */
1419 gcc_assert (val->derived_types.length() == 0);
1420 if (odr_types_ptr)
1421 val->id = odr_types.length ();
1422 vec_safe_push (odr_types_ptr, val);
1424 return val;
1427 /* Add TYPE od ODR type hash. */
1429 void
1430 register_odr_type (tree type)
1432 if (!odr_hash)
1433 odr_hash = new odr_hash_type (23);
1434 /* Arrange things to be nicer and insert main variants first. */
1435 if (odr_type_p (TYPE_MAIN_VARIANT (type)))
1436 get_odr_type (TYPE_MAIN_VARIANT (type), true);
1437 if (TYPE_MAIN_VARIANT (type) != type)
1438 get_odr_type (type, true);
1441 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
1442 recusive printing. */
1444 static void
1445 dump_odr_type (FILE *f, odr_type t, int indent=0)
1447 unsigned int i;
1448 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1449 print_generic_expr (f, t->type, TDF_SLIM);
1450 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1451 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1452 if (TYPE_NAME (t->type))
1454 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1455 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1456 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1458 if (t->bases.length ())
1460 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1461 for (i = 0; i < t->bases.length (); i++)
1462 fprintf (f, " %i", t->bases[i]->id);
1463 fprintf (f, "\n");
1465 if (t->derived_types.length ())
1467 fprintf (f, "%*s derived types:\n", indent * 2, "");
1468 for (i = 0; i < t->derived_types.length (); i++)
1469 dump_odr_type (f, t->derived_types[i], indent + 1);
1471 fprintf (f, "\n");
1474 /* Dump the type inheritance graph. */
1476 static void
1477 dump_type_inheritance_graph (FILE *f)
1479 unsigned int i;
1480 if (!odr_types_ptr)
1481 return;
1482 fprintf (f, "\n\nType inheritance graph:\n");
1483 for (i = 0; i < odr_types.length (); i++)
1485 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1486 dump_odr_type (f, odr_types[i]);
1488 for (i = 0; i < odr_types.length (); i++)
1490 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1492 unsigned int j;
1493 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1494 print_node (f, "", odr_types[i]->type, 0);
1495 for (j = 0; j < odr_types[i]->types->length (); j++)
1497 tree t;
1498 fprintf (f, "duplicate #%i\n", j);
1499 print_node (f, "", (*odr_types[i]->types)[j], 0);
1500 t = (*odr_types[i]->types)[j];
1501 while (TYPE_P (t) && TYPE_CONTEXT (t))
1503 t = TYPE_CONTEXT (t);
1504 print_node (f, "", t, 0);
1506 putc ('\n',f);
1512 /* Given method type T, return type of class it belongs to.
1513 Lookup this pointer and get its type. */
1515 tree
1516 method_class_type (const_tree t)
1518 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1519 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1521 return TREE_TYPE (first_parm_type);
1524 /* Initialize IPA devirt and build inheritance tree graph. */
1526 void
1527 build_type_inheritance_graph (void)
1529 struct symtab_node *n;
1530 FILE *inheritance_dump_file;
1531 int flags;
1533 if (odr_hash)
1534 return;
1535 timevar_push (TV_IPA_INHERITANCE);
1536 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1537 odr_hash = new odr_hash_type (23);
1539 /* We reconstruct the graph starting of types of all methods seen in the
1540 the unit. */
1541 FOR_EACH_SYMBOL (n)
1542 if (is_a <cgraph_node *> (n)
1543 && DECL_VIRTUAL_P (n->decl)
1544 && n->real_symbol_p ())
1545 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1546 true);
1548 /* Look also for virtual tables of types that do not define any methods.
1550 We need it in a case where class B has virtual base of class A
1551 re-defining its virtual method and there is class C with no virtual
1552 methods with B as virtual base.
1554 Here we output B's virtual method in two variant - for non-virtual
1555 and virtual inheritance. B's virtual table has non-virtual version,
1556 while C's has virtual.
1558 For this reason we need to know about C in order to include both
1559 variants of B. More correctly, record_target_from_binfo should
1560 add both variants of the method when walking B, but we have no
1561 link in between them.
1563 We rely on fact that either the method is exported and thus we
1564 assume it is called externally or C is in anonymous namespace and
1565 thus we will see the vtable. */
1567 else if (is_a <varpool_node *> (n)
1568 && DECL_VIRTUAL_P (n->decl)
1569 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1570 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1571 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1572 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1573 if (inheritance_dump_file)
1575 dump_type_inheritance_graph (inheritance_dump_file);
1576 dump_end (TDI_inheritance, inheritance_dump_file);
1578 timevar_pop (TV_IPA_INHERITANCE);
1581 /* Return true if N has reference from live virtual table
1582 (and thus can be a destination of polymorphic call).
1583 Be conservatively correct when callgraph is not built or
1584 if the method may be referred externally. */
1586 static bool
1587 referenced_from_vtable_p (struct cgraph_node *node)
1589 int i;
1590 struct ipa_ref *ref;
1591 bool found = false;
1593 if (node->externally_visible
1594 || DECL_EXTERNAL (node->decl)
1595 || node->used_from_other_partition)
1596 return true;
1598 /* Keep this test constant time.
1599 It is unlikely this can happen except for the case where speculative
1600 devirtualization introduced many speculative edges to this node.
1601 In this case the target is very likely alive anyway. */
1602 if (node->ref_list.referring.length () > 100)
1603 return true;
1605 /* We need references built. */
1606 if (symtab->state <= CONSTRUCTION)
1607 return true;
1609 for (i = 0; node->iterate_referring (i, ref); i++)
1611 if ((ref->use == IPA_REF_ALIAS
1612 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1613 || (ref->use == IPA_REF_ADDR
1614 && TREE_CODE (ref->referring->decl) == VAR_DECL
1615 && DECL_VIRTUAL_P (ref->referring->decl)))
1617 found = true;
1618 break;
1620 return found;
1623 /* If TARGET has associated node, record it in the NODES array.
1624 CAN_REFER specify if program can refer to the target directly.
1625 if TARGET is unknown (NULL) or it can not be inserted (for example because
1626 its body was already removed and there is no way to refer to it), clear
1627 COMPLETEP. */
1629 static void
1630 maybe_record_node (vec <cgraph_node *> &nodes,
1631 tree target, hash_set<tree> *inserted,
1632 bool can_refer,
1633 bool *completep)
1635 struct cgraph_node *target_node, *alias_target;
1636 enum availability avail;
1638 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1639 list of targets; the runtime effect of calling them is undefined.
1640 Only "real" virtual methods should be accounted. */
1641 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1642 return;
1644 if (!can_refer)
1646 /* The only case when method of anonymous namespace becomes unreferable
1647 is when we completely optimized it out. */
1648 if (flag_ltrans
1649 || !target
1650 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1651 *completep = false;
1652 return;
1655 if (!target)
1656 return;
1658 target_node = cgraph_node::get (target);
1660 /* Preffer alias target over aliases, so we do not get confused by
1661 fake duplicates. */
1662 if (target_node)
1664 alias_target = target_node->ultimate_alias_target (&avail);
1665 if (target_node != alias_target
1666 && avail >= AVAIL_AVAILABLE
1667 && target_node->get_availability ())
1668 target_node = alias_target;
1671 /* Method can only be called by polymorphic call if any
1672 of vtables refering to it are alive.
1674 While this holds for non-anonymous functions, too, there are
1675 cases where we want to keep them in the list; for example
1676 inline functions with -fno-weak are static, but we still
1677 may devirtualize them when instance comes from other unit.
1678 The same holds for LTO.
1680 Currently we ignore these functions in speculative devirtualization.
1681 ??? Maybe it would make sense to be more aggressive for LTO even
1682 eslewhere. */
1683 if (!flag_ltrans
1684 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1685 && (!target_node
1686 || !referenced_from_vtable_p (target_node)))
1688 /* See if TARGET is useful function we can deal with. */
1689 else if (target_node != NULL
1690 && (TREE_PUBLIC (target)
1691 || DECL_EXTERNAL (target)
1692 || target_node->definition)
1693 && target_node->real_symbol_p ())
1695 gcc_assert (!target_node->global.inlined_to);
1696 gcc_assert (target_node->real_symbol_p ());
1697 if (!inserted->add (target))
1699 cached_polymorphic_call_targets->add (target_node);
1700 nodes.safe_push (target_node);
1703 else if (completep
1704 && (!type_in_anonymous_namespace_p
1705 (DECL_CONTEXT (target))
1706 || flag_ltrans))
1707 *completep = false;
1710 /* See if BINFO's type match OUTER_TYPE. If so, lookup
1711 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1712 method in vtable and insert method to NODES array
1713 or BASES_TO_CONSIDER if this array is non-NULL.
1714 Otherwise recurse to base BINFOs.
1715 This match what get_binfo_at_offset does, but with offset
1716 being unknown.
1718 TYPE_BINFOS is a stack of BINFOS of types with defined
1719 virtual table seen on way from class type to BINFO.
1721 MATCHED_VTABLES tracks virtual tables we already did lookup
1722 for virtual function in. INSERTED tracks nodes we already
1723 inserted.
1725 ANONYMOUS is true if BINFO is part of anonymous namespace.
1727 Clear COMPLETEP when we hit unreferable target.
1730 static void
1731 record_target_from_binfo (vec <cgraph_node *> &nodes,
1732 vec <tree> *bases_to_consider,
1733 tree binfo,
1734 tree otr_type,
1735 vec <tree> &type_binfos,
1736 HOST_WIDE_INT otr_token,
1737 tree outer_type,
1738 HOST_WIDE_INT offset,
1739 hash_set<tree> *inserted,
1740 hash_set<tree> *matched_vtables,
1741 bool anonymous,
1742 bool *completep)
1744 tree type = BINFO_TYPE (binfo);
1745 int i;
1746 tree base_binfo;
1749 if (BINFO_VTABLE (binfo))
1750 type_binfos.safe_push (binfo);
1751 if (types_same_for_odr (type, outer_type))
1753 int i;
1754 tree type_binfo = NULL;
1756 /* Lookup BINFO with virtual table. For normal types it is always last
1757 binfo on stack. */
1758 for (i = type_binfos.length () - 1; i >= 0; i--)
1759 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1761 type_binfo = type_binfos[i];
1762 break;
1764 if (BINFO_VTABLE (binfo))
1765 type_binfos.pop ();
1766 /* If this is duplicated BINFO for base shared by virtual inheritance,
1767 we may not have its associated vtable. This is not a problem, since
1768 we will walk it on the other path. */
1769 if (!type_binfo)
1770 return;
1771 tree inner_binfo = get_binfo_at_offset (type_binfo,
1772 offset, otr_type);
1773 if (!inner_binfo)
1775 gcc_assert (odr_violation_reported);
1776 return;
1778 /* For types in anonymous namespace first check if the respective vtable
1779 is alive. If not, we know the type can't be called. */
1780 if (!flag_ltrans && anonymous)
1782 tree vtable = BINFO_VTABLE (inner_binfo);
1783 varpool_node *vnode;
1785 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1786 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1787 vnode = varpool_node::get (vtable);
1788 if (!vnode || !vnode->definition)
1789 return;
1791 gcc_assert (inner_binfo);
1792 if (bases_to_consider
1793 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
1794 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
1796 bool can_refer;
1797 tree target = gimple_get_virt_method_for_binfo (otr_token,
1798 inner_binfo,
1799 &can_refer);
1800 if (!bases_to_consider)
1801 maybe_record_node (nodes, target, inserted, can_refer, completep);
1802 /* Destructors are never called via construction vtables. */
1803 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1804 bases_to_consider->safe_push (target);
1806 return;
1809 /* Walk bases. */
1810 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1811 /* Walking bases that have no virtual method is pointless excercise. */
1812 if (polymorphic_type_binfo_p (base_binfo))
1813 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1814 type_binfos,
1815 otr_token, outer_type, offset, inserted,
1816 matched_vtables, anonymous, completep);
1817 if (BINFO_VTABLE (binfo))
1818 type_binfos.pop ();
1821 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1822 of TYPE, insert them to NODES, recurse into derived nodes.
1823 INSERTED is used to avoid duplicate insertions of methods into NODES.
1824 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1825 Clear COMPLETEP if unreferable target is found.
1827 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1828 all cases where BASE_SKIPPED is true (because the base is abstract
1829 class). */
1831 static void
1832 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1833 hash_set<tree> *inserted,
1834 hash_set<tree> *matched_vtables,
1835 tree otr_type,
1836 odr_type type,
1837 HOST_WIDE_INT otr_token,
1838 tree outer_type,
1839 HOST_WIDE_INT offset,
1840 bool *completep,
1841 vec <tree> &bases_to_consider,
1842 bool consider_construction)
1844 tree binfo = TYPE_BINFO (type->type);
1845 unsigned int i;
1846 vec <tree> type_binfos = vNULL;
1847 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1849 /* We may need to consider types w/o instances because of possible derived
1850 types using their methods either directly or via construction vtables.
1851 We are safe to skip them when all derivations are known, since we will
1852 handle them later.
1853 This is done by recording them to BASES_TO_CONSIDER array. */
1854 if (possibly_instantiated || consider_construction)
1856 record_target_from_binfo (nodes,
1857 (!possibly_instantiated
1858 && type_all_derivations_known_p (type->type))
1859 ? &bases_to_consider : NULL,
1860 binfo, otr_type, type_binfos, otr_token,
1861 outer_type, offset,
1862 inserted, matched_vtables,
1863 type->anonymous_namespace, completep);
1865 type_binfos.release ();
1866 for (i = 0; i < type->derived_types.length (); i++)
1867 possible_polymorphic_call_targets_1 (nodes, inserted,
1868 matched_vtables,
1869 otr_type,
1870 type->derived_types[i],
1871 otr_token, outer_type, offset, completep,
1872 bases_to_consider, consider_construction);
1875 /* Cache of queries for polymorphic call targets.
1877 Enumerating all call targets may get expensive when there are many
1878 polymorphic calls in the program, so we memoize all the previous
1879 queries and avoid duplicated work. */
1881 struct polymorphic_call_target_d
1883 HOST_WIDE_INT otr_token;
1884 ipa_polymorphic_call_context context;
1885 odr_type type;
1886 vec <cgraph_node *> targets;
1887 int speculative_targets;
1888 bool complete;
1889 int type_warning;
1890 tree decl_warning;
1893 /* Polymorphic call target cache helpers. */
1895 struct polymorphic_call_target_hasher
1897 typedef polymorphic_call_target_d value_type;
1898 typedef polymorphic_call_target_d compare_type;
1899 static inline hashval_t hash (const value_type *);
1900 static inline bool equal (const value_type *, const compare_type *);
1901 static inline void remove (value_type *);
1904 /* Return the computed hashcode for ODR_QUERY. */
1906 inline hashval_t
1907 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1909 inchash::hash hstate (odr_query->otr_token);
1911 hstate.add_wide_int (odr_query->type->id);
1912 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
1913 hstate.add_wide_int (odr_query->context.offset);
1915 if (odr_query->context.speculative_outer_type)
1917 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
1918 hstate.add_wide_int (odr_query->context.speculative_offset);
1920 hstate.add_flag (odr_query->context.maybe_in_construction);
1921 hstate.add_flag (odr_query->context.maybe_derived_type);
1922 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
1923 hstate.commit_flag ();
1924 return hstate.end ();
1927 /* Compare cache entries T1 and T2. */
1929 inline bool
1930 polymorphic_call_target_hasher::equal (const value_type *t1,
1931 const compare_type *t2)
1933 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1934 && t1->context.offset == t2->context.offset
1935 && t1->context.speculative_offset == t2->context.speculative_offset
1936 && t1->context.outer_type == t2->context.outer_type
1937 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
1938 && t1->context.maybe_in_construction
1939 == t2->context.maybe_in_construction
1940 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1941 && (t1->context.speculative_maybe_derived_type
1942 == t2->context.speculative_maybe_derived_type));
1945 /* Remove entry in polymorphic call target cache hash. */
1947 inline void
1948 polymorphic_call_target_hasher::remove (value_type *v)
1950 v->targets.release ();
1951 free (v);
1954 /* Polymorphic call target query cache. */
1956 typedef hash_table<polymorphic_call_target_hasher>
1957 polymorphic_call_target_hash_type;
1958 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1960 /* Destroy polymorphic call target query cache. */
1962 static void
1963 free_polymorphic_call_targets_hash ()
1965 if (cached_polymorphic_call_targets)
1967 delete polymorphic_call_target_hash;
1968 polymorphic_call_target_hash = NULL;
1969 delete cached_polymorphic_call_targets;
1970 cached_polymorphic_call_targets = NULL;
1974 /* When virtual function is removed, we may need to flush the cache. */
1976 static void
1977 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1979 if (cached_polymorphic_call_targets
1980 && cached_polymorphic_call_targets->contains (n))
1981 free_polymorphic_call_targets_hash ();
1984 /* Return true when TYPE contains an polymorphic type and thus is interesting
1985 for devirtualization machinery. */
1987 bool
1988 contains_polymorphic_type_p (const_tree type)
1990 type = TYPE_MAIN_VARIANT (type);
1992 if (RECORD_OR_UNION_TYPE_P (type))
1994 if (TYPE_BINFO (type)
1995 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1996 return true;
1997 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1998 if (TREE_CODE (fld) == FIELD_DECL
1999 && !DECL_ARTIFICIAL (fld)
2000 && contains_polymorphic_type_p (TREE_TYPE (fld)))
2001 return true;
2002 return false;
2004 if (TREE_CODE (type) == ARRAY_TYPE)
2005 return contains_polymorphic_type_p (TREE_TYPE (type));
2006 return false;
2009 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
2010 at possition CUR_OFFSET within TYPE.
2012 POD can be changed to an instance of a polymorphic type by
2013 placement new. Here we play safe and assume that any
2014 non-polymorphic type is POD. */
2015 bool
2016 possible_placement_new (tree type, tree expected_type,
2017 HOST_WIDE_INT cur_offset)
2019 return ((TREE_CODE (type) != RECORD_TYPE
2020 || !TYPE_BINFO (type)
2021 || cur_offset >= BITS_PER_WORD
2022 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
2023 && (!TYPE_SIZE (type)
2024 || !tree_fits_shwi_p (TYPE_SIZE (type))
2025 || (cur_offset
2026 + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
2027 : 1)
2028 <= tree_to_uhwi (TYPE_SIZE (type)))));
2031 /* THIS->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
2032 is contained at THIS->OFFSET. Walk the memory representation of
2033 THIS->OUTER_TYPE and find the outermost class type that match
2034 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update THIS
2035 to represent it.
2037 If EXPECTED_TYPE is NULL, just find outermost polymorphic type with
2038 virtual table present at possition OFFSET.
2040 For example when THIS represents type
2041 class A
2043 int a;
2044 class B b;
2046 and we look for type at offset sizeof(int), we end up with B and offset 0.
2047 If the same is produced by multiple inheritance, we end up with A and offset
2048 sizeof(int).
2050 If we can not find corresponding class, give up by setting
2051 THIS->OUTER_TYPE to EXPECTED_TYPE and THIS->OFFSET to NULL.
2052 Return true when lookup was sucesful. */
2054 bool
2055 ipa_polymorphic_call_context::restrict_to_inner_class (tree expected_type)
2057 tree type = outer_type;
2058 HOST_WIDE_INT cur_offset = offset;
2059 bool speculative = false;
2060 bool size_unknown = false;
2062 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
2063 if (!outer_type)
2065 clear_outer_type (expected_type);
2066 type = expected_type;
2067 cur_offset = 0;
2069 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
2070 that the context is either invalid, or the instance type must be
2071 derived from OUTER_TYPE.
2073 Because the instance type may contain field whose type is of OUTER_TYPE,
2074 we can not derive any effective information about it.
2076 TODO: In the case we know all derrived types, we can definitely do better
2077 here. */
2078 else if (TYPE_SIZE (outer_type)
2079 && tree_fits_shwi_p (TYPE_SIZE (outer_type))
2080 && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
2081 && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
2083 clear_outer_type (expected_type);
2084 type = expected_type;
2085 cur_offset = 0;
2087 /* If derived type is not allowed, we know that the context is invalid. */
2088 if (!maybe_derived_type)
2090 clear_speculation ();
2091 invalid = true;
2092 return false;
2096 if (speculative_outer_type)
2098 /* Short cirucit the busy work bellow and give up on case when speculation
2099 is obviously the same as outer_type. */
2100 if ((!maybe_derived_type
2101 || speculative_maybe_derived_type)
2102 && types_must_be_same_for_odr (speculative_outer_type, outer_type))
2103 clear_speculation ();
2105 /* See if SPECULATIVE_OUTER_TYPE is contained in or derived from OUTER_TYPE.
2106 In this case speculation is valid only if derived types are allowed.
2108 The test does not really look for derivate, but also accepts the case where
2109 outer_type is a field of speculative_outer_type. In this case eiter
2110 MAYBE_DERIVED_TYPE is false and we have full non-speculative information or
2111 the loop bellow will correctly update SPECULATIVE_OUTER_TYPE
2112 and SPECULATIVE_MAYBE_DERIVED_TYPE. */
2113 else if (speculative_offset < offset
2114 || !contains_type_p (speculative_outer_type,
2115 speculative_offset - offset,
2116 outer_type)
2117 || !maybe_derived_type)
2118 clear_speculation ();
2120 else
2121 /* Regularize things little bit and clear all the fields when no useful
2122 speculatin is known. */
2123 clear_speculation ();
2125 if (!type)
2126 goto no_useful_type_info;
2128 /* Find the sub-object the constant actually refers to and mark whether it is
2129 an artificial one (as opposed to a user-defined one).
2131 This loop is performed twice; first time for outer_type and second time
2132 for speculative_outer_type. The second run has SPECULATIVE set. */
2133 while (true)
2135 HOST_WIDE_INT pos, size;
2136 tree fld;
2138 /* If we do not know size of TYPE, we need to be more conservative
2139 about accepting cases where we can not find EXPECTED_TYPE.
2140 Generally the types that do matter here are of constant size.
2141 Size_unknown case should be very rare. */
2142 if (TYPE_SIZE (type)
2143 && tree_fits_shwi_p (TYPE_SIZE (type))
2144 && tree_to_shwi (TYPE_SIZE (type)) >= 0)
2145 size_unknown = false;
2146 else
2147 size_unknown = true;
2149 /* On a match, just return what we found. */
2150 if ((types_odr_comparable (type, expected_type)
2151 && types_same_for_odr (type, expected_type))
2152 || (!expected_type
2153 && TREE_CODE (type) == RECORD_TYPE
2154 && TYPE_BINFO (type)
2155 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
2157 if (speculative)
2159 /* If we did not match the offset, just give up on speculation. */
2160 if (cur_offset != 0
2161 /* Also check if speculation did not end up being same as
2162 non-speculation. */
2163 || (types_must_be_same_for_odr (speculative_outer_type,
2164 outer_type)
2165 && (maybe_derived_type
2166 == speculative_maybe_derived_type)))
2167 clear_speculation ();
2168 return true;
2170 else
2172 /* If type is known to be final, do not worry about derived
2173 types. Testing it here may help us to avoid speculation. */
2174 if (type_all_derivations_known_p (outer_type)
2175 && (TYPE_FINAL_P (outer_type)
2176 || (odr_hash
2177 && !get_odr_type (outer_type, true)->derived_types.length())))
2178 maybe_derived_type = false;
2180 /* Type can not contain itself on an non-zero offset. In that case
2181 just give up. Still accept the case where size is now known.
2182 Either the second copy may appear past the end of type or within
2183 the non-POD buffer located inside the variably sized type
2184 itself. */
2185 if (cur_offset != 0)
2186 goto no_useful_type_info;
2187 /* If we determined type precisely or we have no clue on
2188 speuclation, we are done. */
2189 if (!maybe_derived_type || !speculative_outer_type)
2191 clear_speculation ();
2192 return true;
2194 /* Otherwise look into speculation now. */
2195 else
2197 speculative = true;
2198 type = speculative_outer_type;
2199 cur_offset = speculative_offset;
2200 continue;
2205 /* Walk fields and find corresponding on at OFFSET. */
2206 if (TREE_CODE (type) == RECORD_TYPE)
2208 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
2210 if (TREE_CODE (fld) != FIELD_DECL)
2211 continue;
2213 pos = int_bit_position (fld);
2214 size = tree_to_uhwi (DECL_SIZE (fld));
2215 if (pos <= cur_offset && (pos + size) > cur_offset)
2216 break;
2219 if (!fld)
2220 goto no_useful_type_info;
2222 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
2223 cur_offset -= pos;
2224 /* DECL_ARTIFICIAL represents a basetype. */
2225 if (!DECL_ARTIFICIAL (fld))
2227 if (!speculative)
2229 outer_type = type;
2230 offset = cur_offset;
2231 /* As soon as we se an field containing the type,
2232 we know we are not looking for derivations. */
2233 maybe_derived_type = false;
2235 else
2237 speculative_outer_type = type;
2238 speculative_offset = cur_offset;
2239 speculative_maybe_derived_type = false;
2243 else if (TREE_CODE (type) == ARRAY_TYPE)
2245 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
2247 /* Give up if we don't know array size. */
2248 if (!TYPE_SIZE (subtype)
2249 || !tree_fits_shwi_p (TYPE_SIZE (subtype))
2250 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
2251 || !contains_polymorphic_type_p (subtype))
2252 goto no_useful_type_info;
2254 HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
2256 /* We may see buffer for placement new. In this case the expected type
2257 can be bigger than the subtype. */
2258 if (TYPE_SIZE (subtype)
2259 && (cur_offset
2260 + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
2261 : 0)
2262 > tree_to_uhwi (TYPE_SIZE (type))))
2263 goto no_useful_type_info;
2265 cur_offset = new_offset;
2266 type = subtype;
2267 if (!speculative)
2269 outer_type = type;
2270 offset = cur_offset;
2271 maybe_derived_type = false;
2273 else
2275 speculative_outer_type = type;
2276 speculative_offset = cur_offset;
2277 speculative_maybe_derived_type = false;
2280 /* Give up on anything else. */
2281 else
2283 no_useful_type_info:
2284 /* We found no way to embedd EXPECTED_TYPE in TYPE.
2285 We still permit two special cases - placement new and
2286 the case of variadic types containing themselves. */
2287 if (!speculative
2288 && (size_unknown || !type
2289 || possible_placement_new (type, expected_type, cur_offset)))
2291 /* In these weird cases we want to accept the context.
2292 In non-speculative run we have no useful outer_type info
2293 (TODO: we may eventually want to record upper bound on the
2294 type size that can be used to prune the walk),
2295 but we still want to consider speculation that may
2296 give useful info. */
2297 if (!speculative)
2299 clear_outer_type (expected_type);
2300 if (speculative_outer_type)
2302 speculative = true;
2303 type = speculative_outer_type;
2304 cur_offset = speculative_offset;
2306 else
2307 return true;
2309 else
2310 clear_speculation ();
2311 return true;
2313 else
2315 clear_speculation ();
2316 if (speculative)
2317 return true;
2318 clear_outer_type (expected_type);
2319 invalid = true;
2320 return false;
2326 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
2328 static bool
2329 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
2330 tree otr_type)
2332 ipa_polymorphic_call_context context;
2333 context.offset = offset;
2334 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
2335 context.maybe_derived_type = false;
2336 return context.restrict_to_inner_class (otr_type);
2339 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
2341 static tree
2342 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2343 tree vtable)
2345 tree v = BINFO_VTABLE (binfo);
2346 int i;
2347 tree base_binfo;
2348 unsigned HOST_WIDE_INT this_offset;
2350 if (v)
2352 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2353 gcc_unreachable ();
2355 if (offset == this_offset
2356 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2357 return binfo;
2360 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2361 if (polymorphic_type_binfo_p (base_binfo))
2363 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2364 if (base_binfo)
2365 return base_binfo;
2367 return NULL;
2370 /* T is known constant value of virtual table pointer.
2371 Store virtual table to V and its offset to OFFSET.
2372 Return false if T does not look like virtual table reference. */
2374 bool
2375 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2376 unsigned HOST_WIDE_INT *offset)
2378 /* We expect &MEM[(void *)&virtual_table + 16B].
2379 We obtain object's BINFO from the context of the virtual table.
2380 This one contains pointer to virtual table represented via
2381 POINTER_PLUS_EXPR. Verify that this pointer match to what
2382 we propagated through.
2384 In the case of virtual inheritance, the virtual tables may
2385 be nested, i.e. the offset may be different from 16 and we may
2386 need to dive into the type representation. */
2387 if (TREE_CODE (t) == ADDR_EXPR
2388 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2389 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2390 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2391 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2392 == VAR_DECL)
2393 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2394 (TREE_OPERAND (t, 0), 0), 0)))
2396 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2397 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2398 return true;
2401 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2402 We need to handle it when T comes from static variable initializer or
2403 BINFO. */
2404 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2406 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2407 t = TREE_OPERAND (t, 0);
2409 else
2410 *offset = 0;
2412 if (TREE_CODE (t) != ADDR_EXPR)
2413 return false;
2414 *v = TREE_OPERAND (t, 0);
2415 return true;
2418 /* T is known constant value of virtual table pointer. Return BINFO of the
2419 instance type. */
2421 tree
2422 vtable_pointer_value_to_binfo (const_tree t)
2424 tree vtable;
2425 unsigned HOST_WIDE_INT offset;
2427 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2428 return NULL_TREE;
2430 /* FIXME: for stores of construction vtables we return NULL,
2431 because we do not have BINFO for those. Eventually we should fix
2432 our representation to allow this case to be handled, too.
2433 In the case we see store of BINFO we however may assume
2434 that standard folding will be ale to cope with it. */
2435 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2436 offset, vtable);
2439 /* We know that the instance is stored in variable or parameter
2440 (not dynamically allocated) and we want to disprove the fact
2441 that it may be in construction at invocation of CALL.
2443 For the variable to be in construction we actually need to
2444 be in constructor of corresponding global variable or
2445 the inline stack of CALL must contain the constructor.
2446 Check this condition. This check works safely only before
2447 IPA passes, because inline stacks may become out of date
2448 later. */
2450 bool
2451 decl_maybe_in_construction_p (tree base, tree outer_type,
2452 gimple call, tree function)
2454 outer_type = TYPE_MAIN_VARIANT (outer_type);
2455 gcc_assert (DECL_P (base));
2457 /* After inlining the code unification optimizations may invalidate
2458 inline stacks. Also we need to give up on global variables after
2459 IPA, because addresses of these may have been propagated to their
2460 constructors. */
2461 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
2462 return true;
2464 /* Pure functions can not do any changes on the dynamic type;
2465 that require writting to memory. */
2466 if (!auto_var_in_fn_p (base, function)
2467 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
2468 return false;
2470 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
2471 block = BLOCK_SUPERCONTEXT (block))
2472 if (BLOCK_ABSTRACT_ORIGIN (block)
2473 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2475 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2477 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2478 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2479 && !DECL_CXX_DESTRUCTOR_P (fn)))
2481 /* Watch for clones where we constant propagated the first
2482 argument (pointer to the instance). */
2483 fn = DECL_ABSTRACT_ORIGIN (fn);
2484 if (!fn
2485 || !is_global_var (base)
2486 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2487 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2488 && !DECL_CXX_DESTRUCTOR_P (fn)))
2489 continue;
2491 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2492 continue;
2494 /* FIXME: this can go away once we have ODR types equivalency on
2495 LTO level. */
2496 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2497 return true;
2498 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2499 if (types_same_for_odr (type, outer_type))
2500 return true;
2503 if (TREE_CODE (base) == VAR_DECL
2504 && is_global_var (base))
2506 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2507 || (!DECL_CXX_CONSTRUCTOR_P (function)
2508 && !DECL_CXX_DESTRUCTOR_P (function)))
2510 if (!DECL_ABSTRACT_ORIGIN (function))
2511 return false;
2512 /* Watch for clones where we constant propagated the first
2513 argument (pointer to the instance). */
2514 function = DECL_ABSTRACT_ORIGIN (function);
2515 if (!function
2516 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2517 || (!DECL_CXX_CONSTRUCTOR_P (function)
2518 && !DECL_CXX_DESTRUCTOR_P (function)))
2519 return false;
2521 /* FIXME: this can go away once we have ODR types equivalency on
2522 LTO level. */
2523 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2524 return true;
2525 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2526 if (types_same_for_odr (type, outer_type))
2527 return true;
2529 return false;
2532 /* Dump human readable context to F. */
2534 void
2535 ipa_polymorphic_call_context::dump (FILE *f) const
2537 fprintf (f, " ");
2538 if (invalid)
2539 fprintf (f, "Call is known to be undefined\n");
2540 else
2542 if (!outer_type && !offset && !speculative_outer_type)
2543 fprintf (f, "Empty context\n");
2544 if (outer_type || offset)
2546 fprintf (f, "Outer type:");
2547 print_generic_expr (f, outer_type, TDF_SLIM);
2548 if (maybe_derived_type)
2549 fprintf (f, " (or a derived type)");
2550 if (maybe_in_construction)
2551 fprintf (f, " (maybe in construction)");
2552 fprintf (f, " offset "HOST_WIDE_INT_PRINT_DEC,
2553 offset);
2555 if (speculative_outer_type)
2557 fprintf (f, " speculative outer type:");
2558 print_generic_expr (f, speculative_outer_type, TDF_SLIM);
2559 if (speculative_maybe_derived_type)
2560 fprintf (f, " (or a derived type)");
2561 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC,
2562 speculative_offset);
2565 fprintf(f, "\n");
2568 /* Print context to stderr. */
2570 void
2571 ipa_polymorphic_call_context::debug () const
2573 dump (stderr);
2576 /* Stream out the context to OB. */
2578 void
2579 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
2581 struct bitpack_d bp = bitpack_create (ob->main_stream);
2583 bp_pack_value (&bp, invalid, 1);
2584 bp_pack_value (&bp, maybe_in_construction, 1);
2585 bp_pack_value (&bp, maybe_derived_type, 1);
2586 bp_pack_value (&bp, speculative_maybe_derived_type, 1);
2587 bp_pack_value (&bp, outer_type != NULL, 1);
2588 bp_pack_value (&bp, offset != 0, 1);
2589 bp_pack_value (&bp, speculative_outer_type != NULL, 1);
2590 streamer_write_bitpack (&bp);
2592 if (outer_type != NULL)
2593 stream_write_tree (ob, outer_type, true);
2594 if (offset)
2595 streamer_write_hwi (ob, offset);
2596 if (speculative_outer_type != NULL)
2598 stream_write_tree (ob, speculative_outer_type, true);
2599 streamer_write_hwi (ob, speculative_offset);
2601 else
2602 gcc_assert (!speculative_offset);
2605 /* Stream in the context from IB and DATA_IN. */
2607 void
2608 ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
2609 struct data_in *data_in)
2611 struct bitpack_d bp = streamer_read_bitpack (ib);
2613 invalid = bp_unpack_value (&bp, 1);
2614 maybe_in_construction = bp_unpack_value (&bp, 1);
2615 maybe_derived_type = bp_unpack_value (&bp, 1);
2616 speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
2617 bool outer_type_p = bp_unpack_value (&bp, 1);
2618 bool offset_p = bp_unpack_value (&bp, 1);
2619 bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
2621 if (outer_type_p)
2622 outer_type = stream_read_tree (ib, data_in);
2623 else
2624 outer_type = NULL;
2625 if (offset_p)
2626 offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
2627 else
2628 offset = 0;
2629 if (speculative_outer_type_p)
2631 speculative_outer_type = stream_read_tree (ib, data_in);
2632 speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
2634 else
2636 speculative_outer_type = NULL;
2637 speculative_offset = 0;
2641 /* Proudce polymorphic call context for call method of instance
2642 that is located within BASE (that is assumed to be a decl) at offset OFF. */
2644 void
2645 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
2647 gcc_assert (DECL_P (base));
2649 outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
2650 offset = off;
2651 clear_speculation ();
2652 /* Make very conservative assumption that all objects
2653 may be in construction.
2655 It is up to caller to revisit this via
2656 get_dynamic_type or decl_maybe_in_construction_p. */
2657 maybe_in_construction = true;
2658 maybe_derived_type = false;
2661 /* CST is an invariant (address of decl), try to get meaningful
2662 polymorphic call context for polymorphic call of method
2663 if instance of OTR_TYPE that is located at offset OFF of this invariant.
2664 Return FALSE if nothing meaningful can be found. */
2666 bool
2667 ipa_polymorphic_call_context::set_by_invariant (tree cst,
2668 tree otr_type,
2669 HOST_WIDE_INT off)
2671 HOST_WIDE_INT offset2, size, max_size;
2672 tree base;
2674 invalid = false;
2675 off = 0;
2676 clear_outer_type (otr_type);
2678 if (TREE_CODE (cst) != ADDR_EXPR)
2679 return false;
2681 cst = TREE_OPERAND (cst, 0);
2682 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
2683 if (!DECL_P (base) || max_size == -1 || max_size != size)
2684 return false;
2686 /* Only type inconsistent programs can have otr_type that is
2687 not part of outer type. */
2688 if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
2689 return false;
2691 set_by_decl (base, off);
2692 return true;
2695 /* See if OP is SSA name initialized as a copy or by single assignment.
2696 If so, walk the SSA graph up. */
2698 static tree
2699 walk_ssa_copies (tree op)
2701 STRIP_NOPS (op);
2702 while (TREE_CODE (op) == SSA_NAME
2703 && !SSA_NAME_IS_DEFAULT_DEF (op)
2704 && SSA_NAME_DEF_STMT (op)
2705 && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
2707 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
2708 return op;
2709 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
2710 STRIP_NOPS (op);
2712 return op;
2715 /* Create polymorphic call context from IP invariant CST.
2716 This is typically &global_var.
2717 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
2718 is offset of call. */
2720 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
2721 tree otr_type,
2722 HOST_WIDE_INT off)
2724 clear_speculation ();
2725 set_by_invariant (cst, otr_type, off);
2728 /* Build context for pointer REF contained in FNDECL at statement STMT.
2729 if INSTANCE is non-NULL, return pointer to the object described by
2730 the context or DECL where context is contained in. */
2732 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
2733 tree ref,
2734 gimple stmt,
2735 tree *instance)
2737 tree otr_type = NULL;
2738 tree base_pointer;
2740 if (TREE_CODE (ref) == OBJ_TYPE_REF)
2742 otr_type = obj_type_ref_class (ref);
2743 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2745 else
2746 base_pointer = ref;
2748 /* Set up basic info in case we find nothing interesting in the analysis. */
2749 clear_speculation ();
2750 clear_outer_type (otr_type);
2751 invalid = false;
2753 /* Walk SSA for outer object. */
2756 base_pointer = walk_ssa_copies (base_pointer);
2757 if (TREE_CODE (base_pointer) == ADDR_EXPR)
2759 HOST_WIDE_INT size, max_size;
2760 HOST_WIDE_INT offset2;
2761 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2762 &offset2, &size, &max_size);
2764 /* If this is a varying address, punt. */
2765 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2766 && max_size != -1
2767 && max_size == size)
2769 /* We found dereference of a pointer. Type of the pointer
2770 and MEM_REF is meaningless, but we can look futher. */
2771 if (TREE_CODE (base) == MEM_REF)
2773 base_pointer = TREE_OPERAND (base, 0);
2774 offset
2775 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
2776 outer_type = NULL;
2778 /* We found base object. In this case the outer_type
2779 is known. */
2780 else if (DECL_P (base))
2782 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
2784 /* Only type inconsistent programs can have otr_type that is
2785 not part of outer type. */
2786 if (otr_type
2787 && !contains_type_p (TREE_TYPE (base),
2788 offset + offset2, otr_type))
2790 invalid = true;
2791 if (instance)
2792 *instance = base_pointer;
2793 return;
2795 set_by_decl (base, offset + offset2);
2796 if (maybe_in_construction && stmt)
2797 maybe_in_construction
2798 = decl_maybe_in_construction_p (base,
2799 outer_type,
2800 stmt,
2801 fndecl);
2802 if (instance)
2803 *instance = base;
2804 return;
2806 else
2807 break;
2809 else
2810 break;
2812 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2813 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2815 offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2816 * BITS_PER_UNIT;
2817 base_pointer = TREE_OPERAND (base_pointer, 0);
2819 else
2820 break;
2822 while (true);
2824 /* Try to determine type of the outer object. */
2825 if (TREE_CODE (base_pointer) == SSA_NAME
2826 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2827 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2829 /* See if parameter is THIS pointer of a method. */
2830 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2831 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2833 outer_type
2834 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2835 gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE);
2837 /* Dynamic casting has possibly upcasted the type
2838 in the hiearchy. In this case outer type is less
2839 informative than inner type and we should forget
2840 about it. */
2841 if (otr_type
2842 && !contains_type_p (outer_type, offset,
2843 otr_type))
2845 outer_type = NULL;
2846 if (instance)
2847 *instance = base_pointer;
2848 return;
2851 /* If the function is constructor or destructor, then
2852 the type is possibly in construction, but we know
2853 it is not derived type. */
2854 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2855 || DECL_CXX_DESTRUCTOR_P (fndecl))
2857 maybe_in_construction = true;
2858 maybe_derived_type = false;
2860 else
2862 maybe_derived_type = true;
2863 maybe_in_construction = false;
2865 if (instance)
2866 *instance = base_pointer;
2867 return;
2869 /* Non-PODs passed by value are really passed by invisible
2870 reference. In this case we also know the type of the
2871 object. */
2872 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2874 outer_type
2875 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2876 gcc_assert (!POINTER_TYPE_P (outer_type));
2877 /* Only type inconsistent programs can have otr_type that is
2878 not part of outer type. */
2879 if (!contains_type_p (outer_type, offset,
2880 otr_type))
2882 invalid = true;
2883 if (instance)
2884 *instance = base_pointer;
2885 return;
2887 maybe_derived_type = false;
2888 maybe_in_construction = false;
2889 if (instance)
2890 *instance = base_pointer;
2891 return;
2895 tree base_type = TREE_TYPE (base_pointer);
2897 if (TREE_CODE (base_pointer) == SSA_NAME
2898 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2899 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
2901 invalid = true;
2902 if (instance)
2903 *instance = base_pointer;
2904 return;
2906 if (TREE_CODE (base_pointer) == SSA_NAME
2907 && SSA_NAME_DEF_STMT (base_pointer)
2908 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2909 base_type = TREE_TYPE (gimple_assign_rhs1
2910 (SSA_NAME_DEF_STMT (base_pointer)));
2912 if (POINTER_TYPE_P (base_type)
2913 && (otr_type
2914 || !contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
2915 offset,
2916 otr_type)))
2918 speculative_outer_type = TYPE_MAIN_VARIANT
2919 (TREE_TYPE (base_type));
2920 speculative_offset = offset;
2921 speculative_maybe_derived_type = true;
2923 /* TODO: There are multiple ways to derive a type. For instance
2924 if BASE_POINTER is passed to an constructor call prior our refernece.
2925 We do not make this type of flow sensitive analysis yet. */
2926 if (instance)
2927 *instance = base_pointer;
2928 return;
2931 /* Structure to be passed in between detect_type_change and
2932 check_stmt_for_type_change. */
2934 struct type_change_info
2936 /* Offset into the object where there is the virtual method pointer we are
2937 looking for. */
2938 HOST_WIDE_INT offset;
2939 /* The declaration or SSA_NAME pointer of the base that we are checking for
2940 type change. */
2941 tree instance;
2942 /* The reference to virtual table pointer used. */
2943 tree vtbl_ptr_ref;
2944 tree otr_type;
2945 /* If we actually can tell the type that the object has changed to, it is
2946 stored in this field. Otherwise it remains NULL_TREE. */
2947 tree known_current_type;
2948 HOST_WIDE_INT known_current_offset;
2950 /* Set to true if dynamic type change has been detected. */
2951 bool type_maybe_changed;
2952 /* Set to true if multiple types have been encountered. known_current_type
2953 must be disregarded in that case. */
2954 bool multiple_types_encountered;
2955 /* Set to true if we possibly missed some dynamic type changes and we should
2956 consider the set to be speculative. */
2957 bool speculative;
2958 bool seen_unanalyzed_store;
2961 /* Return true if STMT is not call and can modify a virtual method table pointer.
2962 We take advantage of fact that vtable stores must appear within constructor
2963 and destructor functions. */
2965 static bool
2966 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
2968 if (is_gimple_assign (stmt))
2970 tree lhs = gimple_assign_lhs (stmt);
2972 if (gimple_clobber_p (stmt))
2973 return false;
2974 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
2976 if (flag_strict_aliasing
2977 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
2978 return false;
2980 if (TREE_CODE (lhs) == COMPONENT_REF
2981 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
2982 return false;
2983 /* In the future we might want to use get_base_ref_and_offset to find
2984 if there is a field corresponding to the offset and if so, proceed
2985 almost like if it was a component ref. */
2989 /* Code unification may mess with inline stacks. */
2990 if (cfun->after_inlining)
2991 return true;
2993 /* Walk the inline stack and watch out for ctors/dtors.
2994 TODO: Maybe we can require the store to appear in toplevel
2995 block of CTOR/DTOR. */
2996 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
2997 block = BLOCK_SUPERCONTEXT (block))
2998 if (BLOCK_ABSTRACT_ORIGIN (block)
2999 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
3001 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
3003 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
3004 return false;
3005 return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
3006 && (DECL_CXX_CONSTRUCTOR_P (fn)
3007 || DECL_CXX_DESTRUCTOR_P (fn)));
3009 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
3010 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
3011 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
3014 /* If STMT can be proved to be an assignment to the virtual method table
3015 pointer of ANALYZED_OBJ and the type associated with the new table
3016 identified, return the type. Otherwise return NULL_TREE. */
3018 static tree
3019 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
3020 HOST_WIDE_INT *type_offset)
3022 HOST_WIDE_INT offset, size, max_size;
3023 tree lhs, rhs, base;
3025 if (!gimple_assign_single_p (stmt))
3026 return NULL_TREE;
3028 lhs = gimple_assign_lhs (stmt);
3029 rhs = gimple_assign_rhs1 (stmt);
3030 if (TREE_CODE (lhs) != COMPONENT_REF
3031 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
3033 if (dump_file)
3034 fprintf (dump_file, " LHS is not virtual table.\n");
3035 return NULL_TREE;
3038 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
3040 else
3042 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
3043 if (offset != tci->offset
3044 || size != POINTER_SIZE
3045 || max_size != POINTER_SIZE)
3047 if (dump_file)
3048 fprintf (dump_file, " wrong offset %i!=%i or size %i\n",
3049 (int)offset, (int)tci->offset, (int)size);
3050 return NULL_TREE;
3052 if (DECL_P (tci->instance))
3054 if (base != tci->instance)
3056 if (dump_file)
3058 fprintf (dump_file, " base:");
3059 print_generic_expr (dump_file, base, TDF_SLIM);
3060 fprintf (dump_file, " does not match instance:");
3061 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
3062 fprintf (dump_file, "\n");
3064 return NULL_TREE;
3067 else if (TREE_CODE (base) == MEM_REF)
3069 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0)
3070 || !integer_zerop (TREE_OPERAND (base, 1)))
3072 if (dump_file)
3074 fprintf (dump_file, " base mem ref:");
3075 print_generic_expr (dump_file, base, TDF_SLIM);
3076 fprintf (dump_file, " has nonzero offset or does not match instance:");
3077 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
3078 fprintf (dump_file, "\n");
3080 return NULL_TREE;
3083 else if (!operand_equal_p (tci->instance, base, 0)
3084 || tci->offset)
3086 if (dump_file)
3088 fprintf (dump_file, " base:");
3089 print_generic_expr (dump_file, base, TDF_SLIM);
3090 fprintf (dump_file, " does not match instance:");
3091 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
3092 fprintf (dump_file, " with offset %i\n", (int)tci->offset);
3094 return NULL_TREE;
3098 tree vtable;
3099 unsigned HOST_WIDE_INT offset2;
3101 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
3103 if (dump_file)
3104 fprintf (dump_file, " Failed to lookup binfo\n");
3105 return NULL;
3108 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
3109 offset2, vtable);
3110 if (!binfo)
3112 if (dump_file)
3113 fprintf (dump_file, " Construction vtable used\n");
3114 /* FIXME: We should suport construction contextes. */
3115 return NULL;
3118 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
3119 return DECL_CONTEXT (vtable);
3122 /* Record dynamic type change of TCI to TYPE. */
3124 void
3125 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
3127 if (dump_file)
3129 if (type)
3131 fprintf (dump_file, " Recording type: ");
3132 print_generic_expr (dump_file, type, TDF_SLIM);
3133 fprintf (dump_file, " at offset %i\n", (int)offset);
3135 else
3136 fprintf (dump_file, " Recording unknown type\n");
3139 /* If we found a constructor of type that is not polymorphic or
3140 that may contain the type in question as a field (not as base),
3141 restrict to the inner class first to make type matching bellow
3142 happier. */
3143 if (type
3144 && (offset
3145 || (TREE_CODE (type) != RECORD_TYPE
3146 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
3148 ipa_polymorphic_call_context context;
3150 context.offset = offset;
3151 context.outer_type = type;
3152 context.maybe_in_construction = false;
3153 context.maybe_derived_type = false;
3154 /* If we failed to find the inner type, we know that the call
3155 would be undefined for type produced here. */
3156 if (!context.restrict_to_inner_class (tci->otr_type))
3158 if (dump_file)
3159 fprintf (dump_file, " Ignoring; does not contain otr_type\n");
3160 return;
3162 /* Watch for case we reached an POD type and anticipate placement
3163 new. */
3164 if (!context.maybe_derived_type)
3166 type = context.outer_type;
3167 offset = context.offset;
3170 if (tci->type_maybe_changed
3171 && (!types_same_for_odr (type, tci->known_current_type)
3172 || offset != tci->known_current_offset))
3173 tci->multiple_types_encountered = true;
3174 tci->known_current_type = TYPE_MAIN_VARIANT (type);
3175 tci->known_current_offset = offset;
3176 tci->type_maybe_changed = true;
3179 /* Callback of walk_aliased_vdefs and a helper function for
3180 detect_type_change to check whether a particular statement may modify
3181 the virtual table pointer, and if possible also determine the new type of
3182 the (sub-)object. It stores its result into DATA, which points to a
3183 type_change_info structure. */
3185 static bool
3186 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
3188 gimple stmt = SSA_NAME_DEF_STMT (vdef);
3189 struct type_change_info *tci = (struct type_change_info *) data;
3190 tree fn;
3192 /* If we already gave up, just terminate the rest of walk. */
3193 if (tci->multiple_types_encountered)
3194 return true;
3196 if (is_gimple_call (stmt))
3198 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
3199 return false;
3201 /* Check for a constructor call. */
3202 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
3203 && DECL_CXX_CONSTRUCTOR_P (fn)
3204 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
3205 && gimple_call_num_args (stmt))
3207 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
3208 tree type = method_class_type (TREE_TYPE (fn));
3209 HOST_WIDE_INT offset = 0, size, max_size;
3211 if (dump_file)
3213 fprintf (dump_file, " Checking constructor call: ");
3214 print_gimple_stmt (dump_file, stmt, 0, 0);
3217 /* See if THIS parameter seems like instance pointer. */
3218 if (TREE_CODE (op) == ADDR_EXPR)
3220 op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
3221 &offset, &size, &max_size);
3222 if (size != max_size || max_size == -1)
3224 tci->speculative = true;
3225 return false;
3227 if (op && TREE_CODE (op) == MEM_REF)
3229 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
3231 tci->speculative = true;
3232 return false;
3234 offset += tree_to_shwi (TREE_OPERAND (op, 1))
3235 * BITS_PER_UNIT;
3236 op = TREE_OPERAND (op, 0);
3238 else if (DECL_P (op))
3240 else
3242 tci->speculative = true;
3243 return false;
3245 op = walk_ssa_copies (op);
3247 if (operand_equal_p (op, tci->instance, 0)
3248 && TYPE_SIZE (type)
3249 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3250 && tree_fits_shwi_p (TYPE_SIZE (type))
3251 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset)
3253 record_known_type (tci, type, tci->offset - offset);
3254 return true;
3257 /* Calls may possibly change dynamic type by placement new. Assume
3258 it will not happen, but make result speculative only. */
3259 if (dump_file)
3261 fprintf (dump_file, " Function call may change dynamic type:");
3262 print_gimple_stmt (dump_file, stmt, 0, 0);
3264 tci->speculative = true;
3265 return false;
3267 /* Check for inlined virtual table store. */
3268 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
3270 tree type;
3271 HOST_WIDE_INT offset = 0;
3272 if (dump_file)
3274 fprintf (dump_file, " Checking vtbl store: ");
3275 print_gimple_stmt (dump_file, stmt, 0, 0);
3278 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
3279 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
3280 if (!type)
3282 if (dump_file)
3283 fprintf (dump_file, " Unanalyzed store may change type.\n");
3284 tci->seen_unanalyzed_store = true;
3285 tci->speculative = true;
3287 else
3288 record_known_type (tci, type, offset);
3289 return true;
3291 else
3292 return false;
3295 /* THIS is polymorphic call context obtained from get_polymorphic_context.
3296 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
3297 INSTANCE is pointer to the outer instance as returned by
3298 get_polymorphic_context. To avoid creation of temporary expressions,
3299 INSTANCE may also be an declaration of get_polymorphic_context found the
3300 value to be in static storage.
3302 If the type of instance is not fully determined
3303 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
3304 is set), try to walk memory writes and find the actual construction of the
3305 instance.
3307 We do not include this analysis in the context analysis itself, because
3308 it needs memory SSA to be fully built and the walk may be expensive.
3309 So it is not suitable for use withing fold_stmt and similar uses. */
3311 bool
3312 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
3313 tree otr_object,
3314 tree otr_type,
3315 gimple call)
3317 struct type_change_info tci;
3318 ao_ref ao;
3319 bool function_entry_reached = false;
3320 tree instance_ref = NULL;
3321 gimple stmt = call;
3322 /* Remember OFFSET before it is modified by restrict_to_inner_class.
3323 This is because we do not update INSTANCE when walking inwards. */
3324 HOST_WIDE_INT instance_offset = offset;
3326 otr_type = TYPE_MAIN_VARIANT (otr_type);
3328 /* Walk into inner type. This may clear maybe_derived_type and save us
3329 from useless work. It also makes later comparsions with static type
3330 easier. */
3331 if (outer_type)
3333 if (!restrict_to_inner_class (otr_type))
3334 return false;
3337 if (!maybe_in_construction && !maybe_derived_type)
3338 return false;
3340 /* We need to obtain refernce to virtual table pointer. It is better
3341 to look it up in the code rather than build our own. This require bit
3342 of pattern matching, but we end up verifying that what we found is
3343 correct.
3345 What we pattern match is:
3347 tmp = instance->_vptr.A; // vtbl ptr load
3348 tmp2 = tmp[otr_token]; // vtable lookup
3349 OBJ_TYPE_REF(tmp2;instance->0) (instance);
3351 We want to start alias oracle walk from vtbl pointer load,
3352 but we may not be able to identify it, for example, when PRE moved the
3353 load around. */
3355 if (gimple_code (call) == GIMPLE_CALL)
3357 tree ref = gimple_call_fn (call);
3358 HOST_WIDE_INT offset2, size, max_size;
3360 if (TREE_CODE (ref) == OBJ_TYPE_REF)
3362 ref = OBJ_TYPE_REF_EXPR (ref);
3363 ref = walk_ssa_copies (ref);
3365 /* Check if definition looks like vtable lookup. */
3366 if (TREE_CODE (ref) == SSA_NAME
3367 && !SSA_NAME_IS_DEFAULT_DEF (ref)
3368 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
3369 && TREE_CODE (gimple_assign_rhs1
3370 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
3372 ref = get_base_address
3373 (TREE_OPERAND (gimple_assign_rhs1
3374 (SSA_NAME_DEF_STMT (ref)), 0));
3375 ref = walk_ssa_copies (ref);
3376 /* Find base address of the lookup and see if it looks like
3377 vptr load. */
3378 if (TREE_CODE (ref) == SSA_NAME
3379 && !SSA_NAME_IS_DEFAULT_DEF (ref)
3380 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
3382 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
3383 tree base_ref = get_ref_base_and_extent
3384 (ref_exp, &offset2, &size, &max_size);
3386 /* Finally verify that what we found looks like read from OTR_OBJECT
3387 or from INSTANCE with offset OFFSET. */
3388 if (base_ref
3389 && ((TREE_CODE (base_ref) == MEM_REF
3390 && ((offset2 == instance_offset
3391 && TREE_OPERAND (base_ref, 0) == instance)
3392 || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object)))
3393 || (DECL_P (instance) && base_ref == instance
3394 && offset2 == instance_offset)))
3396 stmt = SSA_NAME_DEF_STMT (ref);
3397 instance_ref = ref_exp;
3404 /* If we failed to look up the refernece in code, build our own. */
3405 if (!instance_ref)
3407 /* If the statement in question does not use memory, we can't tell
3408 anything. */
3409 if (!gimple_vuse (stmt))
3410 return false;
3411 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
3413 else
3414 /* Otherwise use the real reference. */
3415 ao_ref_init (&ao, instance_ref);
3417 /* We look for vtbl pointer read. */
3418 ao.size = POINTER_SIZE;
3419 ao.max_size = ao.size;
3420 ao.ref_alias_set
3421 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
3423 if (dump_file)
3425 fprintf (dump_file, "Determining dynamic type for call: ");
3426 print_gimple_stmt (dump_file, call, 0, 0);
3427 fprintf (dump_file, " Starting walk at: ");
3428 print_gimple_stmt (dump_file, stmt, 0, 0);
3429 fprintf (dump_file, " instance pointer: ");
3430 print_generic_expr (dump_file, otr_object, TDF_SLIM);
3431 fprintf (dump_file, " Outer instance pointer: ");
3432 print_generic_expr (dump_file, instance, TDF_SLIM);
3433 fprintf (dump_file, " offset: %i (bits)", (int)offset);
3434 fprintf (dump_file, " vtbl reference: ");
3435 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
3436 fprintf (dump_file, "\n");
3439 tci.offset = offset;
3440 tci.instance = instance;
3441 tci.vtbl_ptr_ref = instance_ref;
3442 gcc_assert (TREE_CODE (instance) != MEM_REF);
3443 tci.known_current_type = NULL_TREE;
3444 tci.known_current_offset = 0;
3445 tci.otr_type = otr_type;
3446 tci.type_maybe_changed = false;
3447 tci.multiple_types_encountered = false;
3448 tci.speculative = false;
3449 tci.seen_unanalyzed_store = false;
3451 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
3452 &tci, NULL, &function_entry_reached);
3454 /* If we did not find any type changing statements, we may still drop
3455 maybe_in_construction flag if the context already have outer type.
3457 Here we make special assumptions about both constructors and
3458 destructors which are all the functions that are allowed to alter the
3459 VMT pointers. It assumes that destructors begin with assignment into
3460 all VMT pointers and that constructors essentially look in the
3461 following way:
3463 1) The very first thing they do is that they call constructors of
3464 ancestor sub-objects that have them.
3466 2) Then VMT pointers of this and all its ancestors is set to new
3467 values corresponding to the type corresponding to the constructor.
3469 3) Only afterwards, other stuff such as constructor of member
3470 sub-objects and the code written by the user is run. Only this may
3471 include calling virtual functions, directly or indirectly.
3473 4) placement new can not be used to change type of non-POD statically
3474 allocated variables.
3476 There is no way to call a constructor of an ancestor sub-object in any
3477 other way.
3479 This means that we do not have to care whether constructors get the
3480 correct type information because they will always change it (in fact,
3481 if we define the type to be given by the VMT pointer, it is undefined).
3483 The most important fact to derive from the above is that if, for some
3484 statement in the section 3, we try to detect whether the dynamic type
3485 has changed, we can safely ignore all calls as we examine the function
3486 body backwards until we reach statements in section 2 because these
3487 calls cannot be ancestor constructors or destructors (if the input is
3488 not bogus) and so do not change the dynamic type (this holds true only
3489 for automatically allocated objects but at the moment we devirtualize
3490 only these). We then must detect that statements in section 2 change
3491 the dynamic type and can try to derive the new type. That is enough
3492 and we can stop, we will never see the calls into constructors of
3493 sub-objects in this code.
3495 Therefore if the static outer type was found (outer_type)
3496 we can safely ignore tci.speculative that is set on calls and give up
3497 only if there was dyanmic type store that may affect given variable
3498 (seen_unanalyzed_store) */
3500 if (!tci.type_maybe_changed
3501 || (outer_type
3502 && !tci.seen_unanalyzed_store
3503 && !tci.multiple_types_encountered
3504 && offset == tci.offset
3505 && types_same_for_odr (tci.known_current_type,
3506 outer_type)))
3508 if (!outer_type || tci.seen_unanalyzed_store)
3509 return false;
3510 if (maybe_in_construction)
3511 maybe_in_construction = false;
3512 if (dump_file)
3513 fprintf (dump_file, " No dynamic type change found.\n");
3514 return true;
3517 if (tci.known_current_type
3518 && !function_entry_reached
3519 && !tci.multiple_types_encountered)
3521 if (!tci.speculative)
3523 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
3524 offset = tci.known_current_offset;
3525 maybe_in_construction = false;
3526 maybe_derived_type = false;
3527 if (dump_file)
3528 fprintf (dump_file, " Determined dynamic type.\n");
3530 else if (!speculative_outer_type
3531 || speculative_maybe_derived_type)
3533 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
3534 speculative_offset = tci.known_current_offset;
3535 speculative_maybe_derived_type = false;
3536 if (dump_file)
3537 fprintf (dump_file, " Determined speculative dynamic type.\n");
3540 else if (dump_file)
3542 fprintf (dump_file, " Found multiple types%s%s\n",
3543 function_entry_reached ? " (function entry reached)" : "",
3544 function_entry_reached ? " (multiple types encountered)" : "");
3547 return true;
3550 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
3551 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
3552 and insert them to NODES.
3554 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
3556 static void
3557 record_targets_from_bases (tree otr_type,
3558 HOST_WIDE_INT otr_token,
3559 tree outer_type,
3560 HOST_WIDE_INT offset,
3561 vec <cgraph_node *> &nodes,
3562 hash_set<tree> *inserted,
3563 hash_set<tree> *matched_vtables,
3564 bool *completep)
3566 while (true)
3568 HOST_WIDE_INT pos, size;
3569 tree base_binfo;
3570 tree fld;
3572 if (types_same_for_odr (outer_type, otr_type))
3573 return;
3575 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
3577 if (TREE_CODE (fld) != FIELD_DECL)
3578 continue;
3580 pos = int_bit_position (fld);
3581 size = tree_to_shwi (DECL_SIZE (fld));
3582 if (pos <= offset && (pos + size) > offset
3583 /* Do not get confused by zero sized bases. */
3584 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
3585 break;
3587 /* Within a class type we should always find correcponding fields. */
3588 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
3590 /* Nonbasetypes should have been stripped by outer_class_type. */
3591 gcc_assert (DECL_ARTIFICIAL (fld));
3593 outer_type = TREE_TYPE (fld);
3594 offset -= pos;
3596 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
3597 offset, otr_type);
3598 if (!base_binfo)
3600 gcc_assert (odr_violation_reported);
3601 return;
3603 gcc_assert (base_binfo);
3604 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
3606 bool can_refer;
3607 tree target = gimple_get_virt_method_for_binfo (otr_token,
3608 base_binfo,
3609 &can_refer);
3610 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
3611 maybe_record_node (nodes, target, inserted, can_refer, completep);
3612 matched_vtables->add (BINFO_VTABLE (base_binfo));
3617 /* When virtual table is removed, we may need to flush the cache. */
3619 static void
3620 devirt_variable_node_removal_hook (varpool_node *n,
3621 void *d ATTRIBUTE_UNUSED)
3623 if (cached_polymorphic_call_targets
3624 && DECL_VIRTUAL_P (n->decl)
3625 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3626 free_polymorphic_call_targets_hash ();
3629 /* Record about how many calls would benefit from given type to be final. */
3631 struct odr_type_warn_count
3633 tree type;
3634 int count;
3635 gcov_type dyn_count;
3638 /* Record about how many calls would benefit from given method to be final. */
3640 struct decl_warn_count
3642 tree decl;
3643 int count;
3644 gcov_type dyn_count;
3647 /* Information about type and decl warnings. */
3649 struct final_warning_record
3651 gcov_type dyn_count;
3652 vec<odr_type_warn_count> type_warnings;
3653 hash_map<tree, decl_warn_count> decl_warnings;
3655 struct final_warning_record *final_warning_records;
3657 /* Return vector containing possible targets of polymorphic call of type
3658 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3659 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
3660 OTR_TYPE and include their virtual method. This is useful for types
3661 possibly in construction or destruction where the virtual table may
3662 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
3663 us to walk the inheritance graph for all derivations.
3665 If COMPLETEP is non-NULL, store true if the list is complete.
3666 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3667 in the target cache. If user needs to visit every target list
3668 just once, it can memoize them.
3670 SPECULATION_TARGETS specify number of targets that are speculatively
3671 likely. These include targets specified by the speculative part
3672 of polymoprhic call context and also exclude all targets for classes
3673 in construction.
3675 Returned vector is placed into cache. It is NOT caller's responsibility
3676 to free it. The vector can be freed on cgraph_remove_node call if
3677 the particular node is a virtual function present in the cache. */
3679 vec <cgraph_node *>
3680 possible_polymorphic_call_targets (tree otr_type,
3681 HOST_WIDE_INT otr_token,
3682 ipa_polymorphic_call_context context,
3683 bool *completep,
3684 void **cache_token,
3685 int *speculative_targetsp)
3687 static struct cgraph_node_hook_list *node_removal_hook_holder;
3688 vec <cgraph_node *> nodes = vNULL;
3689 vec <tree> bases_to_consider = vNULL;
3690 odr_type type, outer_type;
3691 polymorphic_call_target_d key;
3692 polymorphic_call_target_d **slot;
3693 unsigned int i;
3694 tree binfo, target;
3695 bool complete;
3696 bool can_refer;
3697 bool skipped = false;
3699 otr_type = TYPE_MAIN_VARIANT (otr_type);
3701 /* If ODR is not initialized or the constext is invalid, return empty
3702 incomplete list. */
3703 if (!odr_hash || context.invalid)
3705 if (completep)
3706 *completep = context.invalid;
3707 if (cache_token)
3708 *cache_token = NULL;
3709 if (speculative_targetsp)
3710 *speculative_targetsp = 0;
3711 return nodes;
3714 /* Do not bother to compute speculative info when user do not asks for it. */
3715 if (!speculative_targetsp || !context.speculative_outer_type)
3716 context.clear_speculation ();
3718 type = get_odr_type (otr_type, true);
3720 /* Recording type variants would wast results cache. */
3721 gcc_assert (!context.outer_type
3722 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3724 /* Lookup the outer class type we want to walk.
3725 If we fail to do so, the context is invalid. */
3726 if ((context.outer_type || context.speculative_outer_type)
3727 && !context.restrict_to_inner_class (otr_type))
3729 fprintf (stderr, "Invalid\n");
3730 if (completep)
3731 *completep = true;
3732 if (cache_token)
3733 *cache_token = NULL;
3734 if (speculative_targetsp)
3735 *speculative_targetsp = 0;
3736 return nodes;
3738 gcc_assert (!context.invalid);
3740 /* Check that restrict_to_inner_class kept the main variant. */
3741 gcc_assert (!context.outer_type
3742 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3744 /* We canonicalize our query, so we do not need extra hashtable entries. */
3746 /* Without outer type, we have no use for offset. Just do the
3747 basic search from innter type */
3748 if (!context.outer_type)
3750 context.outer_type = otr_type;
3751 context.offset = 0;
3753 /* We need to update our hiearchy if the type does not exist. */
3754 outer_type = get_odr_type (context.outer_type, true);
3755 /* If the type is complete, there are no derivations. */
3756 if (TYPE_FINAL_P (outer_type->type))
3757 context.maybe_derived_type = false;
3759 /* Initialize query cache. */
3760 if (!cached_polymorphic_call_targets)
3762 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3763 polymorphic_call_target_hash
3764 = new polymorphic_call_target_hash_type (23);
3765 if (!node_removal_hook_holder)
3767 node_removal_hook_holder =
3768 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3769 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3770 NULL);
3774 /* Lookup cached answer. */
3775 key.type = type;
3776 key.otr_token = otr_token;
3777 key.context = context;
3778 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3779 if (cache_token)
3780 *cache_token = (void *)*slot;
3781 if (*slot)
3783 if (completep)
3784 *completep = (*slot)->complete;
3785 if (speculative_targetsp)
3786 *speculative_targetsp = (*slot)->speculative_targets;
3787 if ((*slot)->type_warning && final_warning_records)
3789 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3790 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3791 += final_warning_records->dyn_count;
3793 if ((*slot)->decl_warning && final_warning_records)
3795 struct decl_warn_count *c =
3796 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3797 c->count++;
3798 c->dyn_count += final_warning_records->dyn_count;
3800 return (*slot)->targets;
3803 complete = true;
3805 /* Do actual search. */
3806 timevar_push (TV_IPA_VIRTUAL_CALL);
3807 *slot = XCNEW (polymorphic_call_target_d);
3808 if (cache_token)
3809 *cache_token = (void *)*slot;
3810 (*slot)->type = type;
3811 (*slot)->otr_token = otr_token;
3812 (*slot)->context = context;
3813 (*slot)->speculative_targets = 0;
3815 hash_set<tree> inserted;
3816 hash_set<tree> matched_vtables;
3818 /* First insert targets we speculatively identified as likely. */
3819 if (context.speculative_outer_type)
3821 odr_type speculative_outer_type;
3822 bool speculation_complete = true;
3824 /* First insert target from type itself and check if it may have derived types. */
3825 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3826 if (TYPE_FINAL_P (speculative_outer_type->type))
3827 context.speculative_maybe_derived_type = false;
3828 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3829 context.speculative_offset, otr_type);
3830 if (binfo)
3831 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3832 &can_refer);
3833 else
3834 target = NULL;
3836 /* In the case we get complete method, we don't need
3837 to walk derivations. */
3838 if (target && DECL_FINAL_P (target))
3839 context.speculative_maybe_derived_type = false;
3840 if (type_possibly_instantiated_p (speculative_outer_type->type))
3841 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3842 if (binfo)
3843 matched_vtables.add (BINFO_VTABLE (binfo));
3846 /* Next walk recursively all derived types. */
3847 if (context.speculative_maybe_derived_type)
3848 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3849 possible_polymorphic_call_targets_1 (nodes, &inserted,
3850 &matched_vtables,
3851 otr_type,
3852 speculative_outer_type->derived_types[i],
3853 otr_token, speculative_outer_type->type,
3854 context.speculative_offset,
3855 &speculation_complete,
3856 bases_to_consider,
3857 false);
3858 (*slot)->speculative_targets = nodes.length();
3861 /* First see virtual method of type itself. */
3862 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3863 context.offset, otr_type);
3864 if (binfo)
3865 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3866 &can_refer);
3867 else
3869 gcc_assert (odr_violation_reported);
3870 target = NULL;
3873 /* Destructors are never called through construction virtual tables,
3874 because the type is always known. */
3875 if (target && DECL_CXX_DESTRUCTOR_P (target))
3876 context.maybe_in_construction = false;
3878 if (target)
3880 /* In the case we get complete method, we don't need
3881 to walk derivations. */
3882 if (DECL_FINAL_P (target))
3883 context.maybe_derived_type = false;
3886 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3887 if (type_possibly_instantiated_p (outer_type->type))
3888 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3889 else
3890 skipped = true;
3892 if (binfo)
3893 matched_vtables.add (BINFO_VTABLE (binfo));
3895 /* Next walk recursively all derived types. */
3896 if (context.maybe_derived_type)
3898 for (i = 0; i < outer_type->derived_types.length(); i++)
3899 possible_polymorphic_call_targets_1 (nodes, &inserted,
3900 &matched_vtables,
3901 otr_type,
3902 outer_type->derived_types[i],
3903 otr_token, outer_type->type,
3904 context.offset, &complete,
3905 bases_to_consider,
3906 context.maybe_in_construction);
3908 if (!outer_type->all_derivations_known)
3910 if (final_warning_records)
3912 if (complete
3913 && nodes.length () == 1
3914 && warn_suggest_final_types
3915 && !outer_type->derived_types.length ())
3917 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3918 final_warning_records->type_warnings.safe_grow_cleared
3919 (odr_types.length ());
3920 final_warning_records->type_warnings[outer_type->id].count++;
3921 final_warning_records->type_warnings[outer_type->id].dyn_count
3922 += final_warning_records->dyn_count;
3923 final_warning_records->type_warnings[outer_type->id].type
3924 = outer_type->type;
3925 (*slot)->type_warning = outer_type->id + 1;
3927 if (complete
3928 && warn_suggest_final_methods
3929 && nodes.length () == 1
3930 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3931 outer_type->type))
3933 bool existed;
3934 struct decl_warn_count &c =
3935 final_warning_records->decl_warnings.get_or_insert
3936 (nodes[0]->decl, &existed);
3938 if (existed)
3940 c.count++;
3941 c.dyn_count += final_warning_records->dyn_count;
3943 else
3945 c.count = 1;
3946 c.dyn_count = final_warning_records->dyn_count;
3947 c.decl = nodes[0]->decl;
3949 (*slot)->decl_warning = nodes[0]->decl;
3952 complete = false;
3956 /* Finally walk bases, if asked to. */
3957 if (!(*slot)->speculative_targets)
3958 (*slot)->speculative_targets = nodes.length();
3960 /* Destructors are never called through construction virtual tables,
3961 because the type is always known. One of entries may be cxa_pure_virtual
3962 so look to at least two of them. */
3963 if (context.maybe_in_construction)
3964 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3965 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3966 context.maybe_in_construction = false;
3967 if (context.maybe_in_construction)
3969 if (type != outer_type
3970 && (!skipped
3971 || (context.maybe_derived_type
3972 && !type_all_derivations_known_p (outer_type->type))))
3973 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3974 context.offset, nodes, &inserted,
3975 &matched_vtables, &complete);
3976 if (skipped)
3977 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3978 for (i = 0; i < bases_to_consider.length(); i++)
3979 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3981 bases_to_consider.release();
3983 (*slot)->targets = nodes;
3984 (*slot)->complete = complete;
3985 if (completep)
3986 *completep = complete;
3987 if (speculative_targetsp)
3988 *speculative_targetsp = (*slot)->speculative_targets;
3990 timevar_pop (TV_IPA_VIRTUAL_CALL);
3991 return nodes;
3994 bool
3995 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3996 vec<const decl_warn_count*> *vec)
3998 vec->safe_push (&value);
3999 return true;
4002 /* Dump all possible targets of a polymorphic call. */
4004 void
4005 dump_possible_polymorphic_call_targets (FILE *f,
4006 tree otr_type,
4007 HOST_WIDE_INT otr_token,
4008 const ipa_polymorphic_call_context &ctx)
4010 vec <cgraph_node *> targets;
4011 bool final;
4012 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
4013 unsigned int i;
4014 int speculative;
4016 if (!type)
4017 return;
4018 targets = possible_polymorphic_call_targets (otr_type, otr_token,
4019 ctx,
4020 &final, NULL, &speculative);
4021 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
4022 print_generic_expr (f, type->type, TDF_SLIM);
4023 fprintf (f, " token %i\n", (int)otr_token);
4025 ctx.dump (f);
4027 fprintf (f, " %s%s%s%s\n ",
4028 final ? "This is a complete list." :
4029 "This is partial list; extra targets may be defined in other units.",
4030 ctx.maybe_in_construction ? " (base types included)" : "",
4031 ctx.maybe_derived_type ? " (derived types included)" : "",
4032 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
4033 for (i = 0; i < targets.length (); i++)
4035 char *name = NULL;
4036 if (i == (unsigned)speculative)
4037 fprintf (f, "\n Targets that are not likely:\n"
4038 " ");
4039 if (in_lto_p)
4040 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
4041 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
4042 if (in_lto_p)
4043 free (name);
4044 if (!targets[i]->definition)
4045 fprintf (f, " (no definition%s)",
4046 DECL_DECLARED_INLINE_P (targets[i]->decl)
4047 ? " inline" : "");
4049 fprintf (f, "\n\n");
4053 /* Return true if N can be possibly target of a polymorphic call of
4054 OTR_TYPE/OTR_TOKEN. */
4056 bool
4057 possible_polymorphic_call_target_p (tree otr_type,
4058 HOST_WIDE_INT otr_token,
4059 const ipa_polymorphic_call_context &ctx,
4060 struct cgraph_node *n)
4062 vec <cgraph_node *> targets;
4063 unsigned int i;
4064 enum built_in_function fcode;
4065 bool final;
4067 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
4068 && ((fcode = DECL_FUNCTION_CODE (n->decl))
4069 == BUILT_IN_UNREACHABLE
4070 || fcode == BUILT_IN_TRAP))
4071 return true;
4073 if (!odr_hash)
4074 return true;
4075 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
4076 for (i = 0; i < targets.length (); i++)
4077 if (n->semantically_equivalent_p (targets[i]))
4078 return true;
4080 /* At a moment we allow middle end to dig out new external declarations
4081 as a targets of polymorphic calls. */
4082 if (!final && !n->definition)
4083 return true;
4084 return false;
4089 /* Return true if N can be possibly target of a polymorphic call of
4090 OBJ_TYPE_REF expression REF in STMT. */
4092 bool
4093 possible_polymorphic_call_target_p (tree ref,
4094 gimple stmt,
4095 struct cgraph_node *n)
4097 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
4098 tree call_fn = gimple_call_fn (stmt);
4100 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
4101 tree_to_uhwi
4102 (OBJ_TYPE_REF_TOKEN (call_fn)),
4103 context,
4108 /* After callgraph construction new external nodes may appear.
4109 Add them into the graph. */
4111 void
4112 update_type_inheritance_graph (void)
4114 struct cgraph_node *n;
4116 if (!odr_hash)
4117 return;
4118 free_polymorphic_call_targets_hash ();
4119 timevar_push (TV_IPA_INHERITANCE);
4120 /* We reconstruct the graph starting from types of all methods seen in the
4121 the unit. */
4122 FOR_EACH_FUNCTION (n)
4123 if (DECL_VIRTUAL_P (n->decl)
4124 && !n->definition
4125 && n->real_symbol_p ())
4126 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
4127 true);
4128 timevar_pop (TV_IPA_INHERITANCE);
4132 /* Return true if N looks like likely target of a polymorphic call.
4133 Rule out cxa_pure_virtual, noreturns, function declared cold and
4134 other obvious cases. */
4136 bool
4137 likely_target_p (struct cgraph_node *n)
4139 int flags;
4140 /* cxa_pure_virtual and similar things are not likely. */
4141 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
4142 return false;
4143 flags = flags_from_decl_or_type (n->decl);
4144 if (flags & ECF_NORETURN)
4145 return false;
4146 if (lookup_attribute ("cold",
4147 DECL_ATTRIBUTES (n->decl)))
4148 return false;
4149 if (n->frequency < NODE_FREQUENCY_NORMAL)
4150 return false;
4151 /* If there are no virtual tables refering the target alive,
4152 the only way the target can be called is an instance comming from other
4153 compilation unit; speculative devirtualization is build around an
4154 assumption that won't happen. */
4155 if (!referenced_from_vtable_p (n))
4156 return false;
4157 return true;
4160 /* Compare type warning records P1 and P2 and chose one with larger count;
4161 helper for qsort. */
4164 type_warning_cmp (const void *p1, const void *p2)
4166 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
4167 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
4169 if (t1->dyn_count < t2->dyn_count)
4170 return 1;
4171 if (t1->dyn_count > t2->dyn_count)
4172 return -1;
4173 return t2->count - t1->count;
4176 /* Compare decl warning records P1 and P2 and chose one with larger count;
4177 helper for qsort. */
4180 decl_warning_cmp (const void *p1, const void *p2)
4182 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
4183 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
4185 if (t1->dyn_count < t2->dyn_count)
4186 return 1;
4187 if (t1->dyn_count > t2->dyn_count)
4188 return -1;
4189 return t2->count - t1->count;
4192 /* The ipa-devirt pass.
4193 When polymorphic call has only one likely target in the unit,
4194 turn it into speculative call. */
4196 static unsigned int
4197 ipa_devirt (void)
4199 struct cgraph_node *n;
4200 hash_set<void *> bad_call_targets;
4201 struct cgraph_edge *e;
4203 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
4204 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
4205 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
4207 if (!odr_types_ptr)
4208 return 0;
4210 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
4211 This is implemented by setting up final_warning_records that are updated
4212 by get_polymorphic_call_targets.
4213 We need to clear cache in this case to trigger recomputation of all
4214 entries. */
4215 if (warn_suggest_final_methods || warn_suggest_final_types)
4217 final_warning_records = new (final_warning_record);
4218 final_warning_records->type_warnings = vNULL;
4219 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
4220 free_polymorphic_call_targets_hash ();
4223 FOR_EACH_DEFINED_FUNCTION (n)
4225 bool update = false;
4226 if (dump_file && n->indirect_calls)
4227 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
4228 n->name (), n->order);
4229 for (e = n->indirect_calls; e; e = e->next_callee)
4230 if (e->indirect_info->polymorphic)
4232 struct cgraph_node *likely_target = NULL;
4233 void *cache_token;
4234 bool final;
4235 int speculative_targets;
4237 if (final_warning_records)
4238 final_warning_records->dyn_count = e->count;
4240 vec <cgraph_node *>targets
4241 = possible_polymorphic_call_targets
4242 (e, &final, &cache_token, &speculative_targets);
4243 unsigned int i;
4245 if (dump_file)
4246 dump_possible_polymorphic_call_targets
4247 (dump_file, e);
4249 npolymorphic++;
4251 if (!flag_devirtualize_speculatively)
4252 continue;
4254 if (!e->maybe_hot_p ())
4256 if (dump_file)
4257 fprintf (dump_file, "Call is cold\n\n");
4258 ncold++;
4259 continue;
4261 if (e->speculative)
4263 if (dump_file)
4264 fprintf (dump_file, "Call is aready speculated\n\n");
4265 nspeculated++;
4267 /* When dumping see if we agree with speculation. */
4268 if (!dump_file)
4269 continue;
4271 if (bad_call_targets.contains (cache_token))
4273 if (dump_file)
4274 fprintf (dump_file, "Target list is known to be useless\n\n");
4275 nmultiple++;
4276 continue;
4278 for (i = 0; i < targets.length (); i++)
4279 if (likely_target_p (targets[i]))
4281 if (likely_target)
4283 if (i < (unsigned) speculative_targets)
4285 likely_target = NULL;
4286 if (dump_file)
4287 fprintf (dump_file, "More than one likely target\n\n");
4288 nmultiple++;
4290 break;
4292 likely_target = targets[i];
4294 if (!likely_target)
4296 bad_call_targets.add (cache_token);
4297 continue;
4299 /* This is reached only when dumping; check if we agree or disagree
4300 with the speculation. */
4301 if (e->speculative)
4303 struct cgraph_edge *e2;
4304 struct ipa_ref *ref;
4305 e->speculative_call_info (e2, e, ref);
4306 if (e2->callee->ultimate_alias_target ()
4307 == likely_target->ultimate_alias_target ())
4309 fprintf (dump_file, "We agree with speculation\n\n");
4310 nok++;
4312 else
4314 fprintf (dump_file, "We disagree with speculation\n\n");
4315 nwrong++;
4317 continue;
4319 if (!likely_target->definition)
4321 if (dump_file)
4322 fprintf (dump_file, "Target is not an definition\n\n");
4323 nnotdefined++;
4324 continue;
4326 /* Do not introduce new references to external symbols. While we
4327 can handle these just well, it is common for programs to
4328 incorrectly with headers defining methods they are linked
4329 with. */
4330 if (DECL_EXTERNAL (likely_target->decl))
4332 if (dump_file)
4333 fprintf (dump_file, "Target is external\n\n");
4334 nexternal++;
4335 continue;
4337 /* Don't use an implicitly-declared destructor (c++/58678). */
4338 struct cgraph_node *non_thunk_target
4339 = likely_target->function_symbol ();
4340 if (DECL_ARTIFICIAL (non_thunk_target->decl))
4342 if (dump_file)
4343 fprintf (dump_file, "Target is artificial\n\n");
4344 nartificial++;
4345 continue;
4347 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
4348 && likely_target->can_be_discarded_p ())
4350 if (dump_file)
4351 fprintf (dump_file, "Target is overwritable\n\n");
4352 noverwritable++;
4353 continue;
4355 else if (dbg_cnt (devirt))
4357 if (dump_enabled_p ())
4359 location_t locus = gimple_location_safe (e->call_stmt);
4360 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
4361 "speculatively devirtualizing call in %s/%i to %s/%i\n",
4362 n->name (), n->order,
4363 likely_target->name (),
4364 likely_target->order);
4366 if (!likely_target->can_be_discarded_p ())
4368 cgraph_node *alias;
4369 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
4370 if (alias)
4371 likely_target = alias;
4373 nconverted++;
4374 update = true;
4375 e->make_speculative
4376 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
4379 if (update)
4380 inline_update_overall_summary (n);
4382 if (warn_suggest_final_methods || warn_suggest_final_types)
4384 if (warn_suggest_final_types)
4386 final_warning_records->type_warnings.qsort (type_warning_cmp);
4387 for (unsigned int i = 0;
4388 i < final_warning_records->type_warnings.length (); i++)
4389 if (final_warning_records->type_warnings[i].count)
4391 tree type = final_warning_records->type_warnings[i].type;
4392 int count = final_warning_records->type_warnings[i].count;
4393 long long dyn_count
4394 = final_warning_records->type_warnings[i].dyn_count;
4396 if (!dyn_count)
4397 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
4398 OPT_Wsuggest_final_types, count,
4399 "Declaring type %qD final "
4400 "would enable devirtualization of %i call",
4401 "Declaring type %qD final "
4402 "would enable devirtualization of %i calls",
4403 type,
4404 count);
4405 else
4406 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
4407 OPT_Wsuggest_final_types, count,
4408 "Declaring type %qD final "
4409 "would enable devirtualization of %i call "
4410 "executed %lli times",
4411 "Declaring type %qD final "
4412 "would enable devirtualization of %i calls "
4413 "executed %lli times",
4414 type,
4415 count,
4416 dyn_count);
4420 if (warn_suggest_final_methods)
4422 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
4424 final_warning_records->decl_warnings.traverse
4425 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
4426 decl_warnings_vec.qsort (decl_warning_cmp);
4427 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
4429 tree decl = decl_warnings_vec[i]->decl;
4430 int count = decl_warnings_vec[i]->count;
4431 long long dyn_count = decl_warnings_vec[i]->dyn_count;
4433 if (!dyn_count)
4434 if (DECL_CXX_DESTRUCTOR_P (decl))
4435 warning_n (DECL_SOURCE_LOCATION (decl),
4436 OPT_Wsuggest_final_methods, count,
4437 "Declaring virtual destructor of %qD final "
4438 "would enable devirtualization of %i call",
4439 "Declaring virtual destructor of %qD final "
4440 "would enable devirtualization of %i calls",
4441 DECL_CONTEXT (decl), count);
4442 else
4443 warning_n (DECL_SOURCE_LOCATION (decl),
4444 OPT_Wsuggest_final_methods, count,
4445 "Declaring method %qD final "
4446 "would enable devirtualization of %i call",
4447 "Declaring method %qD final "
4448 "would enable devirtualization of %i calls",
4449 decl, count);
4450 else if (DECL_CXX_DESTRUCTOR_P (decl))
4451 warning_n (DECL_SOURCE_LOCATION (decl),
4452 OPT_Wsuggest_final_methods, count,
4453 "Declaring virtual destructor of %qD final "
4454 "would enable devirtualization of %i call "
4455 "executed %lli times",
4456 "Declaring virtual destructor of %qD final "
4457 "would enable devirtualization of %i calls "
4458 "executed %lli times",
4459 DECL_CONTEXT (decl), count, dyn_count);
4460 else
4461 warning_n (DECL_SOURCE_LOCATION (decl),
4462 OPT_Wsuggest_final_methods, count,
4463 "Declaring method %qD final "
4464 "would enable devirtualization of %i call "
4465 "executed %lli times",
4466 "Declaring method %qD final "
4467 "would enable devirtualization of %i calls "
4468 "executed %lli times",
4469 decl, count, dyn_count);
4473 delete (final_warning_records);
4474 final_warning_records = 0;
4477 if (dump_file)
4478 fprintf (dump_file,
4479 "%i polymorphic calls, %i devirtualized,"
4480 " %i speculatively devirtualized, %i cold\n"
4481 "%i have multiple targets, %i overwritable,"
4482 " %i already speculated (%i agree, %i disagree),"
4483 " %i external, %i not defined, %i artificial\n",
4484 npolymorphic, ndevirtualized, nconverted, ncold,
4485 nmultiple, noverwritable, nspeculated, nok, nwrong,
4486 nexternal, nnotdefined, nartificial);
4487 return ndevirtualized ? TODO_remove_functions : 0;
4490 namespace {
4492 const pass_data pass_data_ipa_devirt =
4494 IPA_PASS, /* type */
4495 "devirt", /* name */
4496 OPTGROUP_NONE, /* optinfo_flags */
4497 TV_IPA_DEVIRT, /* tv_id */
4498 0, /* properties_required */
4499 0, /* properties_provided */
4500 0, /* properties_destroyed */
4501 0, /* todo_flags_start */
4502 ( TODO_dump_symtab ), /* todo_flags_finish */
4505 class pass_ipa_devirt : public ipa_opt_pass_d
4507 public:
4508 pass_ipa_devirt (gcc::context *ctxt)
4509 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
4510 NULL, /* generate_summary */
4511 NULL, /* write_summary */
4512 NULL, /* read_summary */
4513 NULL, /* write_optimization_summary */
4514 NULL, /* read_optimization_summary */
4515 NULL, /* stmt_fixup */
4516 0, /* function_transform_todo_flags_start */
4517 NULL, /* function_transform */
4518 NULL) /* variable_transform */
4521 /* opt_pass methods: */
4522 virtual bool gate (function *)
4524 return (flag_devirtualize
4525 && (flag_devirtualize_speculatively
4526 || (warn_suggest_final_methods
4527 || warn_suggest_final_types))
4528 && optimize);
4531 virtual unsigned int execute (function *) { return ipa_devirt (); }
4533 }; // class pass_ipa_devirt
4535 } // anon namespace
4537 ipa_opt_pass_d *
4538 make_pass_ipa_devirt (gcc::context *ctxt)
4540 return new pass_ipa_devirt (ctxt);
4543 #include "gt-ipa-devirt.h"