2014-09-18 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / ipa-devirt.c
blob1480b29a89f74e1da677df54d37b6f75a02480d0
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
82 What we do here:
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "hash-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "inchash.h"
122 #include "tree-pretty-print.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-alias.h"
125 #include "internal-fn.h"
126 #include "gimple-fold.h"
127 #include "gimple-expr.h"
128 #include "gimple.h"
129 #include "ipa-inline.h"
130 #include "diagnostic.h"
131 #include "tree-dfa.h"
132 #include "demangle.h"
133 #include "dbgcnt.h"
134 #include "gimple-pretty-print.h"
135 #include "stor-layout.h"
136 #include "intl.h"
137 #include "hash-map.h"
139 /* Hash based set of pairs of types. */
140 typedef struct
142 tree first;
143 tree second;
144 } type_pair;
146 struct pair_traits : default_hashset_traits
148 static hashval_t
149 hash (type_pair p)
151 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
153 static bool
154 is_empty (type_pair p)
156 return p.first == NULL;
158 static bool
159 is_deleted (type_pair p ATTRIBUTE_UNUSED)
161 return false;
163 static bool
164 equal (const type_pair &a, const type_pair &b)
166 return a.first==b.first && a.second == b.second;
168 static void
169 mark_empty (type_pair &e)
171 e.first = NULL;
175 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
176 hash_set<type_pair,pair_traits> *);
178 static bool odr_violation_reported = false;
181 /* Pointer set of all call targets appearing in the cache. */
182 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
184 /* The node of type inheritance graph. For each type unique in
185 One Defintion Rule (ODR) sense, we produce one node linking all
186 main variants of types equivalent to it, bases and derived types. */
188 struct GTY(()) odr_type_d
190 /* leader type. */
191 tree type;
192 /* All bases; built only for main variants of types */
193 vec<odr_type> GTY((skip)) bases;
194 /* All derrived types with virtual methods seen in unit;
195 built only for main variants oftypes */
196 vec<odr_type> GTY((skip)) derived_types;
198 /* All equivalent types, if more than one. */
199 vec<tree, va_gc> *types;
200 /* Set of all equivalent types, if NON-NULL. */
201 hash_set<tree> * GTY((skip)) types_set;
203 /* Unique ID indexing the type in odr_types array. */
204 int id;
205 /* Is it in anonymous namespace? */
206 bool anonymous_namespace;
207 /* Do we know about all derivations of given type? */
208 bool all_derivations_known;
209 /* Did we report ODR violation here? */
210 bool odr_violated;
213 static bool contains_type_p (tree, HOST_WIDE_INT, tree);
216 /* Return true if BINFO corresponds to a type with virtual methods.
218 Every type has several BINFOs. One is the BINFO associated by the type
219 while other represents bases of derived types. The BINFOs representing
220 bases do not have BINFO_VTABLE pointer set when this is the single
221 inheritance (because vtables are shared). Look up the BINFO of type
222 and check presence of its vtable. */
224 static inline bool
225 polymorphic_type_binfo_p (tree binfo)
227 /* See if BINFO's type has an virtual table associtated with it. */
228 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
231 /* Return TRUE if all derived types of T are known and thus
232 we may consider the walk of derived type complete.
234 This is typically true only for final anonymous namespace types and types
235 defined within functions (that may be COMDAT and thus shared across units,
236 but with the same set of derived types). */
238 static bool
239 type_all_derivations_known_p (tree t)
241 if (TYPE_FINAL_P (t))
242 return true;
243 if (flag_ltrans)
244 return false;
245 if (type_in_anonymous_namespace_p (t))
246 return true;
247 return (decl_function_context (TYPE_NAME (t)) != NULL);
250 /* Return TURE if type's constructors are all visible. */
252 static bool
253 type_all_ctors_visible_p (tree t)
255 return !flag_ltrans
256 && symtab->state >= CONSTRUCTION
257 /* We can not always use type_all_derivations_known_p.
258 For function local types we must assume case where
259 the function is COMDAT and shared in between units.
261 TODO: These cases are quite easy to get, but we need
262 to keep track of C++ privatizing via -Wno-weak
263 as well as the IPA privatizing. */
264 && type_in_anonymous_namespace_p (t);
267 /* Return TRUE if type may have instance. */
269 static bool
270 type_possibly_instantiated_p (tree t)
272 tree vtable;
273 varpool_node *vnode;
275 /* TODO: Add abstract types here. */
276 if (!type_all_ctors_visible_p (t))
277 return true;
279 vtable = BINFO_VTABLE (TYPE_BINFO (t));
280 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
281 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
282 vnode = varpool_node::get (vtable);
283 return vnode && vnode->definition;
286 /* One Definition Rule hashtable helpers. */
288 struct odr_hasher
290 typedef odr_type_d value_type;
291 typedef union tree_node compare_type;
292 static inline hashval_t hash (const value_type *);
293 static inline bool equal (const value_type *, const compare_type *);
294 static inline void remove (value_type *);
297 /* Return type that was declared with T's name so that T is an
298 qualified variant of it. */
300 static inline tree
301 main_odr_variant (const_tree t)
303 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
304 return TREE_TYPE (TYPE_NAME (t));
305 /* Unnamed types and non-C++ produced types can be compared by variants. */
306 else
307 return TYPE_MAIN_VARIANT (t);
310 /* Produce hash based on type name. */
312 static hashval_t
313 hash_type_name (tree t)
315 gcc_checking_assert (main_odr_variant (t) == t);
317 /* If not in LTO, all main variants are unique, so we can do
318 pointer hash. */
319 if (!in_lto_p)
320 return htab_hash_pointer (t);
322 /* Anonymous types are unique. */
323 if (type_in_anonymous_namespace_p (t))
324 return htab_hash_pointer (t);
326 /* ODR types have name specified. */
327 if (TYPE_NAME (t)
328 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)))
329 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
331 /* For polymorphic types that was compiled with -fno-lto-odr-type-merging
332 we can simply hash the virtual table. */
333 if (TREE_CODE (t) == RECORD_TYPE
334 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
336 tree v = BINFO_VTABLE (TYPE_BINFO (t));
337 hashval_t hash = 0;
339 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
341 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
342 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
345 v = DECL_ASSEMBLER_NAME (v);
346 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
347 return hash;
350 /* Builtin types may appear as main variants of ODR types and are unique.
351 Sanity check we do not get anything that looks non-builtin. */
352 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
353 || TREE_CODE (t) == VOID_TYPE
354 || TREE_CODE (t) == COMPLEX_TYPE
355 || TREE_CODE (t) == REAL_TYPE
356 || TREE_CODE (t) == POINTER_TYPE);
357 return htab_hash_pointer (t);
360 /* Return the computed hashcode for ODR_TYPE. */
362 inline hashval_t
363 odr_hasher::hash (const value_type *odr_type)
365 return hash_type_name (odr_type->type);
368 /* For languages with One Definition Rule, work out if
369 types are the same based on their name.
371 This is non-trivial for LTO where minnor differences in
372 the type representation may have prevented type merging
373 to merge two copies of otherwise equivalent type.
375 Until we start streaming mangled type names, this function works
376 only for polymorphic types. */
378 bool
379 types_same_for_odr (const_tree type1, const_tree type2)
381 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
383 type1 = main_odr_variant (type1);
384 type2 = main_odr_variant (type2);
386 if (type1 == type2)
387 return true;
389 if (!in_lto_p)
390 return false;
392 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
393 on the corresponding TYPE_STUB_DECL. */
394 if (type_in_anonymous_namespace_p (type1)
395 || type_in_anonymous_namespace_p (type2))
396 return false;
399 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
401 Ideally we should never meed types without ODR names here. It can however
402 happen in two cases:
404 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
405 Here testing for equivalence is safe, since their MAIN_VARIANTs are
406 unique.
407 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
408 establish precise ODR equivalency, but for correctness we care only
409 about equivalency on complete polymorphic types. For these we can
410 compare assembler names of their virtual tables. */
411 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
412 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
414 /* See if types are obvoiusly different (i.e. different codes
415 or polymorphis wrt non-polymorphic). This is not strictly correct
416 for ODR violating programs, but we can't do better without streaming
417 ODR names. */
418 if (TREE_CODE (type1) != TREE_CODE (type2))
419 return false;
420 if (TREE_CODE (type1) == RECORD_TYPE
421 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
422 return false;
423 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
424 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
425 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
426 return false;
428 /* At the moment we have no way to establish ODR equivlaence at LTO
429 other than comparing virtual table pointrs of polymorphic types.
430 Eventually we should start saving mangled names in TYPE_NAME.
431 Then this condition will become non-trivial. */
433 if (TREE_CODE (type1) == RECORD_TYPE
434 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
435 && BINFO_VTABLE (TYPE_BINFO (type1))
436 && BINFO_VTABLE (TYPE_BINFO (type2)))
438 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
439 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
440 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
441 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
442 return (operand_equal_p (TREE_OPERAND (v1, 1),
443 TREE_OPERAND (v2, 1), 0)
444 && DECL_ASSEMBLER_NAME
445 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
446 == DECL_ASSEMBLER_NAME
447 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
449 gcc_unreachable ();
451 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
452 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
456 /* Compare types T1 and T2 and return true if they are
457 equivalent. */
459 inline bool
460 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
462 tree t2 = const_cast <tree> (ct2);
464 gcc_checking_assert (main_odr_variant (t2) == t2);
465 if (t1->type == t2)
466 return true;
467 if (!in_lto_p)
468 return false;
469 return types_same_for_odr (t1->type, t2);
472 /* Free ODR type V. */
474 inline void
475 odr_hasher::remove (value_type *v)
477 v->bases.release ();
478 v->derived_types.release ();
479 if (v->types_set)
480 delete v->types_set;
481 ggc_free (v);
484 /* ODR type hash used to lookup ODR type based on tree type node. */
486 typedef hash_table<odr_hasher> odr_hash_type;
487 static odr_hash_type *odr_hash;
489 /* ODR types are also stored into ODR_TYPE vector to allow consistent
490 walking. Bases appear before derived types. Vector is garbage collected
491 so we won't end up visiting empty types. */
493 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
494 #define odr_types (*odr_types_ptr)
496 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
497 void
498 set_type_binfo (tree type, tree binfo)
500 for (; type; type = TYPE_NEXT_VARIANT (type))
501 if (COMPLETE_TYPE_P (type))
502 TYPE_BINFO (type) = binfo;
503 else
504 gcc_assert (!TYPE_BINFO (type));
507 /* Compare T2 and T2 based on name or structure. */
509 static bool
510 odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<type_pair,pair_traits> *visited)
512 bool an1, an2;
514 /* This can happen in incomplete types that should be handled earlier. */
515 gcc_assert (t1 && t2);
517 t1 = main_odr_variant (t1);
518 t2 = main_odr_variant (t2);
519 if (t1 == t2)
520 return true;
522 /* Anonymous namespace types must match exactly. */
523 an1 = type_in_anonymous_namespace_p (t1);
524 an2 = type_in_anonymous_namespace_p (t2);
525 if (an1 != an2 || an1)
526 return false;
528 /* For ODR types be sure to compare their names. */
529 if ((odr_type_p (t1) && !odr_type_p (t2))
530 || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
531 && TYPE_BINFO (t1) && TYPE_BINFO (t2)
532 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
533 && polymorphic_type_binfo_p (TYPE_BINFO (t2))))
535 if (!types_same_for_odr (t1, t2))
536 return false;
537 /* Limit recursion: If subtypes are ODR types and we know
538 that they are same, be happy. */
539 if (!get_odr_type (t1, true)->odr_violated)
540 return true;
543 /* Component types, builtins and possibly vioalting ODR types
544 have to be compared structurally. */
545 if (TREE_CODE (t1) != TREE_CODE (t2))
546 return false;
547 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
548 return false;
549 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
550 return false;
552 type_pair pair={t1,t2};
553 if (TYPE_UID (t1) > TYPE_UID (t2))
555 pair.first = t2;
556 pair.second = t1;
558 if (visited->add (pair))
559 return true;
560 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
563 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
564 violation warings. */
566 void
567 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
569 int n1, n2;
570 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
572 odr_violation_reported = true;
573 if (DECL_VIRTUAL_P (prevailing->decl))
575 varpool_node *tmp = prevailing;
576 prevailing = vtable;
577 vtable = tmp;
579 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
580 OPT_Wodr,
581 "virtual table of type %qD violates one definition rule",
582 DECL_CONTEXT (vtable->decl)))
583 inform (DECL_SOURCE_LOCATION (prevailing->decl),
584 "variable of same assembler name as the virtual table is "
585 "defined in another translation unit");
586 return;
588 if (!prevailing->definition || !vtable->definition)
589 return;
590 for (n1 = 0, n2 = 0; true; n1++, n2++)
592 struct ipa_ref *ref1, *ref2;
593 bool end1, end2;
594 end1 = !prevailing->iterate_reference (n1, ref1);
595 end2 = !vtable->iterate_reference (n2, ref2);
596 if (end1 && end2)
597 return;
598 if (!end1 && !end2
599 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
600 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
601 && !n2
602 && !DECL_VIRTUAL_P (ref2->referred->decl)
603 && DECL_VIRTUAL_P (ref1->referred->decl))
605 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
606 "virtual table of type %qD contains RTTI information",
607 DECL_CONTEXT (vtable->decl)))
609 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
610 "but is prevailed by one without from other translation unit");
611 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
612 "RTTI will not work on this type");
614 n2++;
615 end2 = !vtable->iterate_reference (n2, ref2);
617 if (!end1 && !end2
618 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
619 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
620 && !n1
621 && !DECL_VIRTUAL_P (ref1->referred->decl)
622 && DECL_VIRTUAL_P (ref2->referred->decl))
624 n1++;
625 end1 = !vtable->iterate_reference (n1, ref1);
627 if (end1 || end2)
629 if (end1)
631 varpool_node *tmp = prevailing;
632 prevailing = vtable;
633 vtable = tmp;
634 ref1 = ref2;
636 if (warning_at (DECL_SOURCE_LOCATION
637 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
638 "virtual table of type %qD violates "
639 "one definition rule",
640 DECL_CONTEXT (vtable->decl)))
642 inform (DECL_SOURCE_LOCATION
643 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
644 "the conflicting type defined in another translation "
645 "unit");
646 inform (DECL_SOURCE_LOCATION
647 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
648 "contains additional virtual method %qD",
649 ref1->referred->decl);
651 return;
653 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
654 != DECL_ASSEMBLER_NAME (ref2->referred->decl))
656 if (warning_at (DECL_SOURCE_LOCATION
657 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
658 "virtual table of type %qD violates "
659 "one definition rule ",
660 DECL_CONTEXT (vtable->decl)))
662 inform (DECL_SOURCE_LOCATION
663 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
664 "the conflicting type defined in another translation "
665 "unit");
666 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
667 "virtual method %qD", ref1->referred->decl);
668 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
669 "ought to match virtual method %qD but does not",
670 ref2->referred->decl);
671 return;
677 /* Output ODR violation warning about T1 and T2 with REASON.
678 Display location of ST1 and ST2 if REASON speaks about field or
679 method of the type.
680 If WARN is false, do nothing. Set WARNED if warning was indeed
681 output. */
683 void
684 warn_odr (tree t1, tree t2, tree st1, tree st2,
685 bool warn, bool *warned, const char *reason)
687 tree decl2 = TYPE_NAME (t2);
689 if (!warn)
690 return;
691 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
692 "type %qT violates one definition rule",
693 t1))
694 return;
695 if (!st1 && !st2)
697 /* For FIELD_DECL support also case where one of fields is
698 NULL - this is used when the structures have mismatching number of
699 elements. */
700 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
702 inform (DECL_SOURCE_LOCATION (decl2),
703 "a different type is defined in another translation unit");
704 if (!st1)
706 st1 = st2;
707 st2 = NULL;
709 inform (DECL_SOURCE_LOCATION (st1),
710 "the first difference of corresponding definitions is field %qD",
711 st1);
712 if (st2)
713 decl2 = st2;
715 else if (TREE_CODE (st1) == FUNCTION_DECL)
717 inform (DECL_SOURCE_LOCATION (decl2),
718 "a different type is defined in another translation unit");
719 inform (DECL_SOURCE_LOCATION (st1),
720 "the first difference of corresponding definitions is method %qD",
721 st1);
722 decl2 = st2;
724 else
725 return;
726 inform (DECL_SOURCE_LOCATION (decl2), reason);
728 if (warned)
729 *warned = true;
732 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
733 because they are used on same place in ODR matching types.
734 They are not; inform the user. */
736 void
737 warn_types_mismatch (tree t1, tree t2)
739 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
740 return;
741 /* In Firefox it is a common bug to have same types but in
742 different namespaces. Be a bit more informative on
743 this. */
744 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
745 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
746 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
747 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
748 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
749 DECL_NAME (TYPE_CONTEXT (t2))))))
750 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
751 "type %qT should match type %qT but is defined "
752 "in different namespace ",
753 t1, t2);
754 else
755 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
756 "type %qT should match type %qT",
757 t1, t2);
758 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
759 "the incompatible type is defined here");
762 /* Compare T1 and T2, report ODR violations if WARN is true and set
763 WARNED to true if anything is reported. Return true if types match.
764 If true is returned, the types are also compatible in the sense of
765 gimple_canonical_types_compatible_p. */
767 static bool
768 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<type_pair,pair_traits> *visited)
770 /* Check first for the obvious case of pointer identity. */
771 if (t1 == t2)
772 return true;
773 gcc_assert (!type_in_anonymous_namespace_p (t1));
774 gcc_assert (!type_in_anonymous_namespace_p (t2));
776 /* Can't be the same type if the types don't have the same code. */
777 if (TREE_CODE (t1) != TREE_CODE (t2))
779 warn_odr (t1, t2, NULL, NULL, warn, warned,
780 G_("a different type is defined in another translation unit"));
781 return false;
784 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
786 warn_odr (t1, t2, NULL, NULL, warn, warned,
787 G_("a type with different qualifiers is defined in another "
788 "translation unit"));
789 return false;
792 if (comp_type_attributes (t1, t2) != 1)
794 warn_odr (t1, t2, NULL, NULL, warn, warned,
795 G_("a type with attributes "
796 "is defined in another translation unit"));
797 return false;
800 if (TREE_CODE (t1) == ENUMERAL_TYPE)
802 tree v1, v2;
803 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
804 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
806 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
808 warn_odr (t1, t2, NULL, NULL, warn, warned,
809 G_("an enum with different value name"
810 " is defined in another translation unit"));
811 return false;
813 if (TREE_VALUE (v1) != TREE_VALUE (v2)
814 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
815 DECL_INITIAL (TREE_VALUE (v2)), 0))
817 warn_odr (t1, t2, NULL, NULL, warn, warned,
818 G_("an enum with different values is defined"
819 " in another translation unit"));
820 return false;
823 if (v1 || v2)
825 warn_odr (t1, t2, NULL, NULL, warn, warned,
826 G_("an enum with mismatching number of values "
827 "is defined in another translation unit"));
828 return false;
832 /* Non-aggregate types can be handled cheaply. */
833 if (INTEGRAL_TYPE_P (t1)
834 || SCALAR_FLOAT_TYPE_P (t1)
835 || FIXED_POINT_TYPE_P (t1)
836 || TREE_CODE (t1) == VECTOR_TYPE
837 || TREE_CODE (t1) == COMPLEX_TYPE
838 || TREE_CODE (t1) == OFFSET_TYPE
839 || POINTER_TYPE_P (t1))
841 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
843 warn_odr (t1, t2, NULL, NULL, warn, warned,
844 G_("a type with different precision is defined "
845 "in another translation unit"));
846 return false;
848 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
850 warn_odr (t1, t2, NULL, NULL, warn, warned,
851 G_("a type with different signedness is defined "
852 "in another translation unit"));
853 return false;
856 if (TREE_CODE (t1) == INTEGER_TYPE
857 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
859 /* char WRT uint_8? */
860 warn_odr (t1, t2, NULL, NULL, warn, warned,
861 G_("a different type is defined in another "
862 "translation unit"));
863 return false;
866 /* For canonical type comparisons we do not want to build SCCs
867 so we cannot compare pointed-to types. But we can, for now,
868 require the same pointed-to type kind and match what
869 useless_type_conversion_p would do. */
870 if (POINTER_TYPE_P (t1))
872 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
873 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
875 warn_odr (t1, t2, NULL, NULL, warn, warned,
876 G_("it is defined as a pointer in different address "
877 "space in another translation unit"));
878 return false;
881 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
883 warn_odr (t1, t2, NULL, NULL, warn, warned,
884 G_("it is defined as a pointer to different type "
885 "in another translation unit"));
886 if (warn && warned)
887 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
888 return false;
892 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
893 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
895 /* Probably specific enough. */
896 warn_odr (t1, t2, NULL, NULL, warn, warned,
897 G_("a different type is defined "
898 "in another translation unit"));
899 if (warn && warned)
900 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
901 return false;
904 /* Do type-specific comparisons. */
905 else switch (TREE_CODE (t1))
907 case ARRAY_TYPE:
909 /* Array types are the same if the element types are the same and
910 the number of elements are the same. */
911 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
913 warn_odr (t1, t2, NULL, NULL, warn, warned,
914 G_("a different type is defined in another "
915 "translation unit"));
916 if (warn && warned)
917 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
919 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
920 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
921 == TYPE_NONALIASED_COMPONENT (t2));
923 tree i1 = TYPE_DOMAIN (t1);
924 tree i2 = TYPE_DOMAIN (t2);
926 /* For an incomplete external array, the type domain can be
927 NULL_TREE. Check this condition also. */
928 if (i1 == NULL_TREE || i2 == NULL_TREE)
929 return true;
931 tree min1 = TYPE_MIN_VALUE (i1);
932 tree min2 = TYPE_MIN_VALUE (i2);
933 tree max1 = TYPE_MAX_VALUE (i1);
934 tree max2 = TYPE_MAX_VALUE (i2);
936 /* In C++, minimums should be always 0. */
937 gcc_assert (min1 == min2);
938 if (!operand_equal_p (max1, max2, 0))
940 warn_odr (t1, t2, NULL, NULL, warn, warned,
941 G_("an array of different size is defined "
942 "in another translation unit"));
943 return false;
946 break;
948 case METHOD_TYPE:
949 case FUNCTION_TYPE:
950 /* Function types are the same if the return type and arguments types
951 are the same. */
952 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
954 warn_odr (t1, t2, NULL, NULL, warn, warned,
955 G_("has different return value "
956 "in another translation unit"));
957 if (warn && warned)
958 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
959 return false;
962 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
963 return true;
964 else
966 tree parms1, parms2;
968 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
969 parms1 && parms2;
970 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
972 if (!odr_subtypes_equivalent_p
973 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
975 warn_odr (t1, t2, NULL, NULL, warn, warned,
976 G_("has different parameters in another "
977 "translation unit"));
978 if (warn && warned)
979 warn_types_mismatch (TREE_VALUE (parms1),
980 TREE_VALUE (parms2));
981 return false;
985 if (parms1 || parms2)
987 warn_odr (t1, t2, NULL, NULL, warn, warned,
988 G_("has different parameters "
989 "in another translation unit"));
990 return false;
993 return true;
996 case RECORD_TYPE:
997 case UNION_TYPE:
998 case QUAL_UNION_TYPE:
1000 tree f1, f2;
1002 /* For aggregate types, all the fields must be the same. */
1003 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1005 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1006 f1 || f2;
1007 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1009 /* Skip non-fields. */
1010 while (f1 && TREE_CODE (f1) != FIELD_DECL)
1011 f1 = TREE_CHAIN (f1);
1012 while (f2 && TREE_CODE (f2) != FIELD_DECL)
1013 f2 = TREE_CHAIN (f2);
1014 if (!f1 || !f2)
1015 break;
1016 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1017 break;
1018 if (DECL_NAME (f1) != DECL_NAME (f2)
1019 && !DECL_ARTIFICIAL (f1))
1021 warn_odr (t1, t2, f1, f2, warn, warned,
1022 G_("a field with different name is defined "
1023 "in another translation unit"));
1024 return false;
1026 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1028 /* Do not warn about artificial fields and just go into generic
1029 field mismatch warning. */
1030 if (DECL_ARTIFICIAL (f1))
1031 break;
1033 warn_odr (t1, t2, f1, f2, warn, warned,
1034 G_("a field of same name but different type "
1035 "is defined in another translation unit"));
1036 if (warn && warned)
1037 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
1038 return false;
1040 if (!gimple_compare_field_offset (f1, f2))
1042 /* Do not warn about artificial fields and just go into generic
1043 field mismatch warning. */
1044 if (DECL_ARTIFICIAL (f1))
1045 break;
1046 warn_odr (t1, t2, t1, t2, warn, warned,
1047 G_("fields has different layout "
1048 "in another translation unit"));
1049 return false;
1051 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1052 == DECL_NONADDRESSABLE_P (f2));
1055 /* If one aggregate has more fields than the other, they
1056 are not the same. */
1057 if (f1 || f2)
1059 if (f1 && DECL_ARTIFICIAL (f1))
1060 f1 = NULL;
1061 if (f2 && DECL_ARTIFICIAL (f2))
1062 f2 = NULL;
1063 if (f1 || f2)
1064 warn_odr (t1, t2, f1, f2, warn, warned,
1065 G_("a type with different number of fields "
1066 "is defined in another translation unit"));
1067 /* Ideally we should never get this generic message. */
1068 else
1069 warn_odr (t1, t2, f1, f2, warn, warned,
1070 G_("a type with different memory representation "
1071 "is defined in another translation unit"));
1073 return false;
1075 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1076 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1077 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1079 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1080 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1081 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1083 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1085 warn_odr (t1, t2, f1, f2, warn, warned,
1086 G_("a different method of same type "
1087 "is defined in another translation unit"));
1088 return false;
1090 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1092 warn_odr (t1, t2, f1, f2, warn, warned,
1093 G_("s definition that differs by virtual "
1094 "keyword in another translation unit"));
1095 return false;
1097 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1099 warn_odr (t1, t2, f1, f2, warn, warned,
1100 G_("virtual table layout differs in another "
1101 "translation unit"));
1102 return false;
1104 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1106 warn_odr (t1, t2, f1, f2, warn, warned,
1107 G_("method with incompatible type is defined "
1108 "in another translation unit"));
1109 return false;
1112 if (f1 || f2)
1114 warn_odr (t1, t2, NULL, NULL, warn, warned,
1115 G_("a type with different number of methods "
1116 "is defined in another translation unit"));
1117 return false;
1121 break;
1123 case VOID_TYPE:
1124 break;
1126 default:
1127 debug_tree (t1);
1128 gcc_unreachable ();
1131 /* Those are better to come last as they are utterly uninformative. */
1132 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1133 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1135 warn_odr (t1, t2, NULL, NULL, warn, warned,
1136 G_("a type with different size "
1137 "is defined in another translation unit"));
1138 return false;
1140 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1141 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1143 warn_odr (t1, t2, NULL, NULL, warn, warned,
1144 G_("a type with different alignment "
1145 "is defined in another translation unit"));
1146 return false;
1148 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1149 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1150 TYPE_SIZE_UNIT (t2), 0));
1151 return true;
1154 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1155 from VAL->type. This may happen in LTO where tree merging did not merge
1156 all variants of the same type. It may or may not mean the ODR violation.
1157 Add it to the list of duplicates and warn on some violations. */
1159 static bool
1160 add_type_duplicate (odr_type val, tree type)
1162 bool build_bases = false;
1163 if (!val->types_set)
1164 val->types_set = new hash_set<tree>;
1166 /* Always prefer complete type to be the leader. */
1167 if (!COMPLETE_TYPE_P (val->type)
1168 && COMPLETE_TYPE_P (type))
1170 tree tmp = type;
1172 build_bases = true;
1173 type = val->type;
1174 val->type = tmp;
1177 /* See if this duplicate is new. */
1178 if (!val->types_set->add (type))
1180 bool merge = true;
1181 bool base_mismatch = false;
1182 unsigned int i,j;
1183 bool warned = false;
1184 hash_set<type_pair,pair_traits> visited;
1186 gcc_assert (in_lto_p);
1187 vec_safe_push (val->types, type);
1189 /* First we compare memory layout. */
1190 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
1191 &warned, &visited))
1193 merge = false;
1194 odr_violation_reported = true;
1195 val->odr_violated = true;
1196 if (symtab->dump_file)
1198 fprintf (symtab->dump_file, "ODR violation\n");
1200 print_node (symtab->dump_file, "", val->type, 0);
1201 putc ('\n',symtab->dump_file);
1202 print_node (symtab->dump_file, "", type, 0);
1203 putc ('\n',symtab->dump_file);
1207 /* Next sanity check that bases are the same. If not, we will end
1208 up producing wrong answers. */
1209 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1210 && TREE_CODE (val->type) == RECORD_TYPE
1211 && TREE_CODE (type) == RECORD_TYPE
1212 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1214 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1215 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
1217 odr_type base = get_odr_type
1218 (BINFO_TYPE
1219 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1220 i)),
1221 true);
1222 if (val->bases.length () <= j || val->bases[j] != base)
1223 base_mismatch = true;
1224 j++;
1226 if (base_mismatch)
1228 merge = false;
1229 odr_violation_reported = true;
1231 if (!warned && !val->odr_violated)
1232 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1233 "a type with the same name but different bases is "
1234 "defined in another translation unit");
1235 val->odr_violated = true;
1236 if (symtab->dump_file)
1238 fprintf (symtab->dump_file, "ODR bse violation or merging bug?\n");
1240 print_node (symtab->dump_file, "", val->type, 0);
1241 putc ('\n',symtab->dump_file);
1242 print_node (symtab->dump_file, "", type, 0);
1243 putc ('\n',symtab->dump_file);
1248 /* Regularize things a little. During LTO same types may come with
1249 different BINFOs. Either because their virtual table was
1250 not merged by tree merging and only later at decl merging or
1251 because one type comes with external vtable, while other
1252 with internal. We want to merge equivalent binfos to conserve
1253 memory and streaming overhead.
1255 The external vtables are more harmful: they contain references
1256 to external declarations of methods that may be defined in the
1257 merged LTO unit. For this reason we absolutely need to remove
1258 them and replace by internal variants. Not doing so will lead
1259 to incomplete answers from possible_polymorphic_call_targets.
1261 FIXME: disable for now; because ODR types are now build during
1262 streaming in, the variants do not need to be linked to the type,
1263 yet. We need to do the merging in cleanup pass to be implemented
1264 soon. */
1265 if (!flag_ltrans && merge
1266 && 0
1267 && TREE_CODE (val->type) == RECORD_TYPE
1268 && TREE_CODE (type) == RECORD_TYPE
1269 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1270 && TYPE_MAIN_VARIANT (type) == type
1271 && TYPE_MAIN_VARIANT (val->type) == val->type
1272 && BINFO_VTABLE (TYPE_BINFO (val->type))
1273 && BINFO_VTABLE (TYPE_BINFO (type)))
1275 tree master_binfo = TYPE_BINFO (val->type);
1276 tree v1 = BINFO_VTABLE (master_binfo);
1277 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1279 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1281 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1282 && operand_equal_p (TREE_OPERAND (v1, 1),
1283 TREE_OPERAND (v2, 1), 0));
1284 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1285 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1287 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1288 == DECL_ASSEMBLER_NAME (v2));
1290 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1292 unsigned int i;
1294 set_type_binfo (val->type, TYPE_BINFO (type));
1295 for (i = 0; i < val->types->length (); i++)
1297 if (TYPE_BINFO ((*val->types)[i])
1298 == master_binfo)
1299 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1301 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1303 else
1304 set_type_binfo (type, master_binfo);
1307 return build_bases;
1310 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1311 possibly new entry. */
1313 odr_type
1314 get_odr_type (tree type, bool insert)
1316 odr_type_d **slot;
1317 odr_type val;
1318 hashval_t hash;
1319 bool build_bases = false;
1320 bool insert_to_odr_array = false;
1321 int base_id = -1;
1323 type = main_odr_variant (type);
1325 hash = hash_type_name (type);
1326 slot
1327 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
1328 if (!slot)
1329 return NULL;
1331 /* See if we already have entry for type. */
1332 if (*slot)
1334 val = *slot;
1336 /* With LTO we need to support multiple tree representation of
1337 the same ODR type. */
1338 if (val->type != type)
1339 build_bases = add_type_duplicate (val, type);
1341 else
1343 val = ggc_cleared_alloc<odr_type_d> ();
1344 val->type = type;
1345 val->bases = vNULL;
1346 val->derived_types = vNULL;
1347 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1348 build_bases = COMPLETE_TYPE_P (val->type);
1349 insert_to_odr_array = true;
1352 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1353 && type == TYPE_MAIN_VARIANT (type))
1355 tree binfo = TYPE_BINFO (type);
1356 unsigned int i;
1358 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1360 val->all_derivations_known = type_all_derivations_known_p (type);
1361 *slot = val;
1362 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1363 /* For now record only polymorphic types. other are
1364 pointless for devirtualization and we can not precisely
1365 determine ODR equivalency of these during LTO. */
1366 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1368 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1369 i)),
1370 true);
1371 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1372 base->derived_types.safe_push (val);
1373 val->bases.safe_push (base);
1374 if (base->id > base_id)
1375 base_id = base->id;
1378 /* Ensure that type always appears after bases. */
1379 if (insert_to_odr_array)
1381 if (odr_types_ptr)
1382 val->id = odr_types.length ();
1383 vec_safe_push (odr_types_ptr, val);
1385 else if (base_id > val->id)
1387 odr_types[val->id] = 0;
1388 /* Be sure we did not recorded any derived types; these may need
1389 renumbering too. */
1390 gcc_assert (val->derived_types.length() == 0);
1391 if (odr_types_ptr)
1392 val->id = odr_types.length ();
1393 vec_safe_push (odr_types_ptr, val);
1395 return val;
1398 /* Add TYPE od ODR type hash. */
1400 void
1401 register_odr_type (tree type)
1403 if (!odr_hash)
1404 odr_hash = new odr_hash_type (23);
1405 /* Arrange things to be nicer and insert main variants first. */
1406 if (odr_type_p (TYPE_MAIN_VARIANT (type)))
1407 get_odr_type (TYPE_MAIN_VARIANT (type), true);
1408 if (TYPE_MAIN_VARIANT (type) != type)
1409 get_odr_type (type, true);
1412 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
1413 recusive printing. */
1415 static void
1416 dump_odr_type (FILE *f, odr_type t, int indent=0)
1418 unsigned int i;
1419 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1420 print_generic_expr (f, t->type, TDF_SLIM);
1421 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1422 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1423 if (TYPE_NAME (t->type))
1425 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1426 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1427 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1429 if (t->bases.length ())
1431 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1432 for (i = 0; i < t->bases.length (); i++)
1433 fprintf (f, " %i", t->bases[i]->id);
1434 fprintf (f, "\n");
1436 if (t->derived_types.length ())
1438 fprintf (f, "%*s derived types:\n", indent * 2, "");
1439 for (i = 0; i < t->derived_types.length (); i++)
1440 dump_odr_type (f, t->derived_types[i], indent + 1);
1442 fprintf (f, "\n");
1445 /* Dump the type inheritance graph. */
1447 static void
1448 dump_type_inheritance_graph (FILE *f)
1450 unsigned int i;
1451 if (!odr_types_ptr)
1452 return;
1453 fprintf (f, "\n\nType inheritance graph:\n");
1454 for (i = 0; i < odr_types.length (); i++)
1456 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1457 dump_odr_type (f, odr_types[i]);
1459 for (i = 0; i < odr_types.length (); i++)
1461 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1463 unsigned int j;
1464 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1465 print_node (f, "", odr_types[i]->type, 0);
1466 for (j = 0; j < odr_types[i]->types->length (); j++)
1468 tree t;
1469 fprintf (f, "duplicate #%i\n", j);
1470 print_node (f, "", (*odr_types[i]->types)[j], 0);
1471 t = (*odr_types[i]->types)[j];
1472 while (TYPE_P (t) && TYPE_CONTEXT (t))
1474 t = TYPE_CONTEXT (t);
1475 print_node (f, "", t, 0);
1477 putc ('\n',f);
1483 /* Given method type T, return type of class it belongs to.
1484 Lookup this pointer and get its type. */
1486 tree
1487 method_class_type (const_tree t)
1489 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1490 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1492 return TREE_TYPE (first_parm_type);
1495 /* Initialize IPA devirt and build inheritance tree graph. */
1497 void
1498 build_type_inheritance_graph (void)
1500 struct symtab_node *n;
1501 FILE *inheritance_dump_file;
1502 int flags;
1504 if (odr_hash)
1505 return;
1506 timevar_push (TV_IPA_INHERITANCE);
1507 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1508 odr_hash = new odr_hash_type (23);
1510 /* We reconstruct the graph starting of types of all methods seen in the
1511 the unit. */
1512 FOR_EACH_SYMBOL (n)
1513 if (is_a <cgraph_node *> (n)
1514 && DECL_VIRTUAL_P (n->decl)
1515 && n->real_symbol_p ())
1516 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1517 true);
1519 /* Look also for virtual tables of types that do not define any methods.
1521 We need it in a case where class B has virtual base of class A
1522 re-defining its virtual method and there is class C with no virtual
1523 methods with B as virtual base.
1525 Here we output B's virtual method in two variant - for non-virtual
1526 and virtual inheritance. B's virtual table has non-virtual version,
1527 while C's has virtual.
1529 For this reason we need to know about C in order to include both
1530 variants of B. More correctly, record_target_from_binfo should
1531 add both variants of the method when walking B, but we have no
1532 link in between them.
1534 We rely on fact that either the method is exported and thus we
1535 assume it is called externally or C is in anonymous namespace and
1536 thus we will see the vtable. */
1538 else if (is_a <varpool_node *> (n)
1539 && DECL_VIRTUAL_P (n->decl)
1540 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1541 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1542 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1543 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1544 if (inheritance_dump_file)
1546 dump_type_inheritance_graph (inheritance_dump_file);
1547 dump_end (TDI_inheritance, inheritance_dump_file);
1549 timevar_pop (TV_IPA_INHERITANCE);
1552 /* Return true if N has reference from live virtual table
1553 (and thus can be a destination of polymorphic call).
1554 Be conservatively correct when callgraph is not built or
1555 if the method may be referred externally. */
1557 static bool
1558 referenced_from_vtable_p (struct cgraph_node *node)
1560 int i;
1561 struct ipa_ref *ref;
1562 bool found = false;
1564 if (node->externally_visible
1565 || DECL_EXTERNAL (node->decl)
1566 || node->used_from_other_partition)
1567 return true;
1569 /* Keep this test constant time.
1570 It is unlikely this can happen except for the case where speculative
1571 devirtualization introduced many speculative edges to this node.
1572 In this case the target is very likely alive anyway. */
1573 if (node->ref_list.referring.length () > 100)
1574 return true;
1576 /* We need references built. */
1577 if (symtab->state <= CONSTRUCTION)
1578 return true;
1580 for (i = 0; node->iterate_referring (i, ref); i++)
1582 if ((ref->use == IPA_REF_ALIAS
1583 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1584 || (ref->use == IPA_REF_ADDR
1585 && TREE_CODE (ref->referring->decl) == VAR_DECL
1586 && DECL_VIRTUAL_P (ref->referring->decl)))
1588 found = true;
1589 break;
1591 return found;
1594 /* If TARGET has associated node, record it in the NODES array.
1595 CAN_REFER specify if program can refer to the target directly.
1596 if TARGET is unknown (NULL) or it can not be inserted (for example because
1597 its body was already removed and there is no way to refer to it), clear
1598 COMPLETEP. */
1600 static void
1601 maybe_record_node (vec <cgraph_node *> &nodes,
1602 tree target, hash_set<tree> *inserted,
1603 bool can_refer,
1604 bool *completep)
1606 struct cgraph_node *target_node, *alias_target;
1607 enum availability avail;
1609 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1610 list of targets; the runtime effect of calling them is undefined.
1611 Only "real" virtual methods should be accounted. */
1612 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1613 return;
1615 if (!can_refer)
1617 /* The only case when method of anonymous namespace becomes unreferable
1618 is when we completely optimized it out. */
1619 if (flag_ltrans
1620 || !target
1621 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1622 *completep = false;
1623 return;
1626 if (!target)
1627 return;
1629 target_node = cgraph_node::get (target);
1631 /* Preffer alias target over aliases, so we do not get confused by
1632 fake duplicates. */
1633 if (target_node)
1635 alias_target = target_node->ultimate_alias_target (&avail);
1636 if (target_node != alias_target
1637 && avail >= AVAIL_AVAILABLE
1638 && target_node->get_availability ())
1639 target_node = alias_target;
1642 /* Method can only be called by polymorphic call if any
1643 of vtables refering to it are alive.
1645 While this holds for non-anonymous functions, too, there are
1646 cases where we want to keep them in the list; for example
1647 inline functions with -fno-weak are static, but we still
1648 may devirtualize them when instance comes from other unit.
1649 The same holds for LTO.
1651 Currently we ignore these functions in speculative devirtualization.
1652 ??? Maybe it would make sense to be more aggressive for LTO even
1653 eslewhere. */
1654 if (!flag_ltrans
1655 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1656 && (!target_node
1657 || !referenced_from_vtable_p (target_node)))
1659 /* See if TARGET is useful function we can deal with. */
1660 else if (target_node != NULL
1661 && (TREE_PUBLIC (target)
1662 || DECL_EXTERNAL (target)
1663 || target_node->definition)
1664 && target_node->real_symbol_p ())
1666 gcc_assert (!target_node->global.inlined_to);
1667 gcc_assert (target_node->real_symbol_p ());
1668 if (!inserted->add (target))
1670 cached_polymorphic_call_targets->add (target_node);
1671 nodes.safe_push (target_node);
1674 else if (completep
1675 && (!type_in_anonymous_namespace_p
1676 (DECL_CONTEXT (target))
1677 || flag_ltrans))
1678 *completep = false;
1681 /* See if BINFO's type match OUTER_TYPE. If so, lookup
1682 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1683 method in vtable and insert method to NODES array
1684 or BASES_TO_CONSIDER if this array is non-NULL.
1685 Otherwise recurse to base BINFOs.
1686 This match what get_binfo_at_offset does, but with offset
1687 being unknown.
1689 TYPE_BINFOS is a stack of BINFOS of types with defined
1690 virtual table seen on way from class type to BINFO.
1692 MATCHED_VTABLES tracks virtual tables we already did lookup
1693 for virtual function in. INSERTED tracks nodes we already
1694 inserted.
1696 ANONYMOUS is true if BINFO is part of anonymous namespace.
1698 Clear COMPLETEP when we hit unreferable target.
1701 static void
1702 record_target_from_binfo (vec <cgraph_node *> &nodes,
1703 vec <tree> *bases_to_consider,
1704 tree binfo,
1705 tree otr_type,
1706 vec <tree> &type_binfos,
1707 HOST_WIDE_INT otr_token,
1708 tree outer_type,
1709 HOST_WIDE_INT offset,
1710 hash_set<tree> *inserted,
1711 hash_set<tree> *matched_vtables,
1712 bool anonymous,
1713 bool *completep)
1715 tree type = BINFO_TYPE (binfo);
1716 int i;
1717 tree base_binfo;
1720 if (BINFO_VTABLE (binfo))
1721 type_binfos.safe_push (binfo);
1722 if (types_same_for_odr (type, outer_type))
1724 int i;
1725 tree type_binfo = NULL;
1727 /* Lookup BINFO with virtual table. For normal types it is always last
1728 binfo on stack. */
1729 for (i = type_binfos.length () - 1; i >= 0; i--)
1730 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1732 type_binfo = type_binfos[i];
1733 break;
1735 if (BINFO_VTABLE (binfo))
1736 type_binfos.pop ();
1737 /* If this is duplicated BINFO for base shared by virtual inheritance,
1738 we may not have its associated vtable. This is not a problem, since
1739 we will walk it on the other path. */
1740 if (!type_binfo)
1741 return;
1742 tree inner_binfo = get_binfo_at_offset (type_binfo,
1743 offset, otr_type);
1744 if (!inner_binfo)
1746 gcc_assert (odr_violation_reported);
1747 return;
1749 /* For types in anonymous namespace first check if the respective vtable
1750 is alive. If not, we know the type can't be called. */
1751 if (!flag_ltrans && anonymous)
1753 tree vtable = BINFO_VTABLE (inner_binfo);
1754 varpool_node *vnode;
1756 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1757 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1758 vnode = varpool_node::get (vtable);
1759 if (!vnode || !vnode->definition)
1760 return;
1762 gcc_assert (inner_binfo);
1763 if (bases_to_consider
1764 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
1765 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
1767 bool can_refer;
1768 tree target = gimple_get_virt_method_for_binfo (otr_token,
1769 inner_binfo,
1770 &can_refer);
1771 if (!bases_to_consider)
1772 maybe_record_node (nodes, target, inserted, can_refer, completep);
1773 /* Destructors are never called via construction vtables. */
1774 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1775 bases_to_consider->safe_push (target);
1777 return;
1780 /* Walk bases. */
1781 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1782 /* Walking bases that have no virtual method is pointless excercise. */
1783 if (polymorphic_type_binfo_p (base_binfo))
1784 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1785 type_binfos,
1786 otr_token, outer_type, offset, inserted,
1787 matched_vtables, anonymous, completep);
1788 if (BINFO_VTABLE (binfo))
1789 type_binfos.pop ();
1792 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1793 of TYPE, insert them to NODES, recurse into derived nodes.
1794 INSERTED is used to avoid duplicate insertions of methods into NODES.
1795 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1796 Clear COMPLETEP if unreferable target is found.
1798 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1799 all cases where BASE_SKIPPED is true (because the base is abstract
1800 class). */
1802 static void
1803 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1804 hash_set<tree> *inserted,
1805 hash_set<tree> *matched_vtables,
1806 tree otr_type,
1807 odr_type type,
1808 HOST_WIDE_INT otr_token,
1809 tree outer_type,
1810 HOST_WIDE_INT offset,
1811 bool *completep,
1812 vec <tree> &bases_to_consider,
1813 bool consider_construction)
1815 tree binfo = TYPE_BINFO (type->type);
1816 unsigned int i;
1817 vec <tree> type_binfos = vNULL;
1818 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1820 /* We may need to consider types w/o instances because of possible derived
1821 types using their methods either directly or via construction vtables.
1822 We are safe to skip them when all derivations are known, since we will
1823 handle them later.
1824 This is done by recording them to BASES_TO_CONSIDER array. */
1825 if (possibly_instantiated || consider_construction)
1827 record_target_from_binfo (nodes,
1828 (!possibly_instantiated
1829 && type_all_derivations_known_p (type->type))
1830 ? &bases_to_consider : NULL,
1831 binfo, otr_type, type_binfos, otr_token,
1832 outer_type, offset,
1833 inserted, matched_vtables,
1834 type->anonymous_namespace, completep);
1836 type_binfos.release ();
1837 for (i = 0; i < type->derived_types.length (); i++)
1838 possible_polymorphic_call_targets_1 (nodes, inserted,
1839 matched_vtables,
1840 otr_type,
1841 type->derived_types[i],
1842 otr_token, outer_type, offset, completep,
1843 bases_to_consider, consider_construction);
1846 /* Cache of queries for polymorphic call targets.
1848 Enumerating all call targets may get expensive when there are many
1849 polymorphic calls in the program, so we memoize all the previous
1850 queries and avoid duplicated work. */
1852 struct polymorphic_call_target_d
1854 HOST_WIDE_INT otr_token;
1855 ipa_polymorphic_call_context context;
1856 odr_type type;
1857 vec <cgraph_node *> targets;
1858 int speculative_targets;
1859 bool complete;
1860 int type_warning;
1861 tree decl_warning;
1864 /* Polymorphic call target cache helpers. */
1866 struct polymorphic_call_target_hasher
1868 typedef polymorphic_call_target_d value_type;
1869 typedef polymorphic_call_target_d compare_type;
1870 static inline hashval_t hash (const value_type *);
1871 static inline bool equal (const value_type *, const compare_type *);
1872 static inline void remove (value_type *);
1875 /* Return the computed hashcode for ODR_QUERY. */
1877 inline hashval_t
1878 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1880 inchash::hash hstate (odr_query->otr_token);
1882 hstate.add_wide_int (odr_query->type->id);
1883 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
1884 hstate.add_wide_int (odr_query->context.offset);
1886 if (odr_query->context.speculative_outer_type)
1888 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
1889 hstate.add_wide_int (odr_query->context.speculative_offset);
1891 hstate.add_flag (odr_query->context.maybe_in_construction);
1892 hstate.add_flag (odr_query->context.maybe_derived_type);
1893 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
1894 hstate.commit_flag ();
1895 return hstate.end ();
1898 /* Compare cache entries T1 and T2. */
1900 inline bool
1901 polymorphic_call_target_hasher::equal (const value_type *t1,
1902 const compare_type *t2)
1904 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1905 && t1->context.offset == t2->context.offset
1906 && t1->context.speculative_offset == t2->context.speculative_offset
1907 && t1->context.outer_type == t2->context.outer_type
1908 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
1909 && t1->context.maybe_in_construction
1910 == t2->context.maybe_in_construction
1911 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1912 && (t1->context.speculative_maybe_derived_type
1913 == t2->context.speculative_maybe_derived_type));
1916 /* Remove entry in polymorphic call target cache hash. */
1918 inline void
1919 polymorphic_call_target_hasher::remove (value_type *v)
1921 v->targets.release ();
1922 free (v);
1925 /* Polymorphic call target query cache. */
1927 typedef hash_table<polymorphic_call_target_hasher>
1928 polymorphic_call_target_hash_type;
1929 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1931 /* Destroy polymorphic call target query cache. */
1933 static void
1934 free_polymorphic_call_targets_hash ()
1936 if (cached_polymorphic_call_targets)
1938 delete polymorphic_call_target_hash;
1939 polymorphic_call_target_hash = NULL;
1940 delete cached_polymorphic_call_targets;
1941 cached_polymorphic_call_targets = NULL;
1945 /* When virtual function is removed, we may need to flush the cache. */
1947 static void
1948 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1950 if (cached_polymorphic_call_targets
1951 && cached_polymorphic_call_targets->contains (n))
1952 free_polymorphic_call_targets_hash ();
1955 /* Return true when TYPE contains an polymorphic type and thus is interesting
1956 for devirtualization machinery. */
1958 bool
1959 contains_polymorphic_type_p (const_tree type)
1961 type = TYPE_MAIN_VARIANT (type);
1963 if (RECORD_OR_UNION_TYPE_P (type))
1965 if (TYPE_BINFO (type)
1966 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1967 return true;
1968 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1969 if (TREE_CODE (fld) == FIELD_DECL
1970 && !DECL_ARTIFICIAL (fld)
1971 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1972 return true;
1973 return false;
1975 if (TREE_CODE (type) == ARRAY_TYPE)
1976 return contains_polymorphic_type_p (TREE_TYPE (type));
1977 return false;
1980 /* THIS->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1981 is contained at THIS->OFFSET. Walk the memory representation of
1982 THIS->OUTER_TYPE and find the outermost class type that match
1983 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update THIS
1984 to represent it.
1986 For example when THIS represents type
1987 class A
1989 int a;
1990 class B b;
1992 and we look for type at offset sizeof(int), we end up with B and offset 0.
1993 If the same is produced by multiple inheritance, we end up with A and offset
1994 sizeof(int).
1996 If we can not find corresponding class, give up by setting
1997 THIS->OUTER_TYPE to EXPECTED_TYPE and THIS->OFFSET to NULL.
1998 Return true when lookup was sucesful. */
2000 bool
2001 ipa_polymorphic_call_context::restrict_to_inner_class (tree expected_type)
2003 tree type = outer_type;
2004 HOST_WIDE_INT cur_offset = offset;
2005 bool speculative = false;
2006 bool speculation_valid = false;
2007 bool valid = false;
2009 if (!outer_type)
2011 type = outer_type = expected_type;
2012 offset = cur_offset = 0;
2015 if (speculative_outer_type == outer_type
2016 && (!maybe_derived_type
2017 || speculative_maybe_derived_type))
2019 speculative_outer_type = NULL;
2020 speculative_offset = 0;
2021 speculative_maybe_derived_type = false;
2024 /* See if speculative type seem to be derrived from outer_type.
2025 Then speculation is valid only if it really is a derivate and derived types
2026 are allowed.
2028 The test does not really look for derivate, but also accepts the case where
2029 outer_type is a field of speculative_outer_type. In this case eiter
2030 MAYBE_DERIVED_TYPE is false and we have full non-speculative information or
2031 the loop bellow will correctly update SPECULATIVE_OUTER_TYPE
2032 and SPECULATIVE_MAYBE_DERIVED_TYPE. */
2033 if (speculative_outer_type
2034 && speculative_offset >= offset
2035 && contains_type_p (speculative_outer_type,
2036 offset - speculative_offset,
2037 outer_type))
2038 speculation_valid = maybe_derived_type;
2039 else
2040 clear_speculation ();
2042 /* Find the sub-object the constant actually refers to and mark whether it is
2043 an artificial one (as opposed to a user-defined one).
2045 This loop is performed twice; first time for outer_type and second time
2046 for speculative_outer_type. The second iteration has SPECULATIVE set. */
2047 while (true)
2049 HOST_WIDE_INT pos, size;
2050 tree fld;
2052 /* On a match, just return what we found. */
2053 if (TREE_CODE (type) == TREE_CODE (expected_type)
2054 && (!in_lto_p
2055 || (TREE_CODE (type) == RECORD_TYPE
2056 && TYPE_BINFO (type)
2057 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
2058 && types_same_for_odr (type, expected_type))
2060 if (speculative)
2062 gcc_assert (speculation_valid);
2063 gcc_assert (valid);
2065 /* If we did not match the offset, just give up on speculation. */
2066 if (cur_offset != 0
2067 || (types_same_for_odr (speculative_outer_type,
2068 outer_type)
2069 && (maybe_derived_type
2070 == speculative_maybe_derived_type)))
2071 clear_speculation ();
2072 return true;
2074 else
2076 /* Type can not contain itself on an non-zero offset. In that case
2077 just give up. */
2078 if (cur_offset != 0)
2080 valid = false;
2081 goto give_up;
2083 valid = true;
2084 /* If speculation is not valid or we determined type precisely,
2085 we are done. */
2086 if (!speculation_valid
2087 || !maybe_derived_type)
2089 clear_speculation ();
2090 return true;
2092 /* Otherwise look into speculation now. */
2093 else
2095 speculative = true;
2096 type = speculative_outer_type;
2097 cur_offset = speculative_offset;
2098 continue;
2103 /* Walk fields and find corresponding on at OFFSET. */
2104 if (TREE_CODE (type) == RECORD_TYPE)
2106 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
2108 if (TREE_CODE (fld) != FIELD_DECL)
2109 continue;
2111 pos = int_bit_position (fld);
2112 size = tree_to_uhwi (DECL_SIZE (fld));
2113 if (pos <= cur_offset && (pos + size) > cur_offset)
2114 break;
2117 if (!fld)
2118 goto give_up;
2120 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
2121 cur_offset -= pos;
2122 /* DECL_ARTIFICIAL represents a basetype. */
2123 if (!DECL_ARTIFICIAL (fld))
2125 if (!speculative)
2127 outer_type = type;
2128 offset = cur_offset;
2129 /* As soon as we se an field containing the type,
2130 we know we are not looking for derivations. */
2131 maybe_derived_type = false;
2133 else
2135 speculative_outer_type = type;
2136 speculative_offset = cur_offset;
2137 speculative_maybe_derived_type = false;
2141 else if (TREE_CODE (type) == ARRAY_TYPE)
2143 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
2145 /* Give up if we don't know array size. */
2146 if (!TYPE_SIZE (subtype)
2147 || !tree_fits_shwi_p (TYPE_SIZE (subtype))
2148 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
2149 || !contains_polymorphic_type_p (subtype))
2150 goto give_up;
2151 cur_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
2152 type = subtype;
2153 if (!speculative)
2155 outer_type = type;
2156 offset = cur_offset;
2157 maybe_derived_type = false;
2159 else
2161 speculative_outer_type = type;
2162 speculative_offset = cur_offset;
2163 speculative_maybe_derived_type = false;
2166 /* Give up on anything else. */
2167 else
2168 goto give_up;
2171 /* If we failed to find subtype we look for, give up and fall back to the
2172 most generic query. */
2173 give_up:
2174 clear_speculation ();
2175 if (valid)
2176 return true;
2177 outer_type = expected_type;
2178 offset = 0;
2179 maybe_derived_type = true;
2180 maybe_in_construction = true;
2181 /* POD can be changed to an instance of a polymorphic type by
2182 placement new. Here we play safe and assume that any
2183 non-polymorphic type is POD. */
2184 if ((TREE_CODE (type) != RECORD_TYPE
2185 || !TYPE_BINFO (type)
2186 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
2187 && (!TYPE_SIZE (type)
2188 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2189 || (cur_offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
2190 tree_to_uhwi (TYPE_SIZE (type)))))
2191 return true;
2192 return false;
2195 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
2197 static bool
2198 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
2199 tree otr_type)
2201 ipa_polymorphic_call_context context;
2202 context.offset = offset;
2203 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
2204 return context.restrict_to_inner_class (otr_type);
2207 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
2209 static tree
2210 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2211 tree vtable)
2213 tree v = BINFO_VTABLE (binfo);
2214 int i;
2215 tree base_binfo;
2216 unsigned HOST_WIDE_INT this_offset;
2218 if (v)
2220 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2221 gcc_unreachable ();
2223 if (offset == this_offset
2224 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2225 return binfo;
2228 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2229 if (polymorphic_type_binfo_p (base_binfo))
2231 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2232 if (base_binfo)
2233 return base_binfo;
2235 return NULL;
2238 /* T is known constant value of virtual table pointer.
2239 Store virtual table to V and its offset to OFFSET.
2240 Return false if T does not look like virtual table reference. */
2242 bool
2243 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2244 unsigned HOST_WIDE_INT *offset)
2246 /* We expect &MEM[(void *)&virtual_table + 16B].
2247 We obtain object's BINFO from the context of the virtual table.
2248 This one contains pointer to virtual table represented via
2249 POINTER_PLUS_EXPR. Verify that this pointer match to what
2250 we propagated through.
2252 In the case of virtual inheritance, the virtual tables may
2253 be nested, i.e. the offset may be different from 16 and we may
2254 need to dive into the type representation. */
2255 if (TREE_CODE (t) == ADDR_EXPR
2256 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2257 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2258 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2259 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2260 == VAR_DECL)
2261 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2262 (TREE_OPERAND (t, 0), 0), 0)))
2264 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2265 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2266 return true;
2269 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2270 We need to handle it when T comes from static variable initializer or
2271 BINFO. */
2272 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2274 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2275 t = TREE_OPERAND (t, 0);
2277 else
2278 *offset = 0;
2280 if (TREE_CODE (t) != ADDR_EXPR)
2281 return false;
2282 *v = TREE_OPERAND (t, 0);
2283 return true;
2286 /* T is known constant value of virtual table pointer. Return BINFO of the
2287 instance type. */
2289 tree
2290 vtable_pointer_value_to_binfo (const_tree t)
2292 tree vtable;
2293 unsigned HOST_WIDE_INT offset;
2295 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2296 return NULL_TREE;
2298 /* FIXME: for stores of construction vtables we return NULL,
2299 because we do not have BINFO for those. Eventually we should fix
2300 our representation to allow this case to be handled, too.
2301 In the case we see store of BINFO we however may assume
2302 that standard folding will be ale to cope with it. */
2303 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2304 offset, vtable);
2307 /* We know that the instance is stored in variable or parameter
2308 (not dynamically allocated) and we want to disprove the fact
2309 that it may be in construction at invocation of CALL.
2311 For the variable to be in construction we actually need to
2312 be in constructor of corresponding global variable or
2313 the inline stack of CALL must contain the constructor.
2314 Check this condition. This check works safely only before
2315 IPA passes, because inline stacks may become out of date
2316 later. */
2318 bool
2319 decl_maybe_in_construction_p (tree base, tree outer_type,
2320 gimple call, tree function)
2322 outer_type = TYPE_MAIN_VARIANT (outer_type);
2323 gcc_assert (DECL_P (base));
2325 /* After inlining the code unification optimizations may invalidate
2326 inline stacks. Also we need to give up on global variables after
2327 IPA, because addresses of these may have been propagated to their
2328 constructors. */
2329 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
2330 return true;
2332 /* Pure functions can not do any changes on the dynamic type;
2333 that require writting to memory. */
2334 if (!auto_var_in_fn_p (base, function)
2335 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
2336 return false;
2338 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
2339 block = BLOCK_SUPERCONTEXT (block))
2340 if (BLOCK_ABSTRACT_ORIGIN (block)
2341 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2343 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2345 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2346 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2347 && !DECL_CXX_DESTRUCTOR_P (fn)))
2349 /* Watch for clones where we constant propagated the first
2350 argument (pointer to the instance). */
2351 fn = DECL_ABSTRACT_ORIGIN (fn);
2352 if (!fn
2353 || !is_global_var (base)
2354 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2355 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2356 && !DECL_CXX_DESTRUCTOR_P (fn)))
2357 continue;
2359 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2360 continue;
2362 /* FIXME: this can go away once we have ODR types equivalency on
2363 LTO level. */
2364 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2365 return true;
2366 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2367 if (types_same_for_odr (type, outer_type))
2368 return true;
2371 if (TREE_CODE (base) == VAR_DECL
2372 && is_global_var (base))
2374 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2375 || (!DECL_CXX_CONSTRUCTOR_P (function)
2376 && !DECL_CXX_DESTRUCTOR_P (function)))
2378 if (!DECL_ABSTRACT_ORIGIN (function))
2379 return false;
2380 /* Watch for clones where we constant propagated the first
2381 argument (pointer to the instance). */
2382 function = DECL_ABSTRACT_ORIGIN (function);
2383 if (!function
2384 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2385 || (!DECL_CXX_CONSTRUCTOR_P (function)
2386 && !DECL_CXX_DESTRUCTOR_P (function)))
2387 return false;
2389 /* FIXME: this can go away once we have ODR types equivalency on
2390 LTO level. */
2391 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2392 return true;
2393 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2394 if (types_same_for_odr (type, outer_type))
2395 return true;
2397 return false;
2400 /* Proudce polymorphic call context for call method of instance
2401 that is located within BASE (that is assumed to be a decl) at OFFSET. */
2403 static void
2404 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
2405 tree base, HOST_WIDE_INT offset)
2407 gcc_assert (DECL_P (base));
2409 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
2410 context->offset = offset;
2411 context->speculative_outer_type = NULL;
2412 context->speculative_offset = 0;
2413 context->speculative_maybe_derived_type = true;
2414 /* Make very conservative assumption that all objects
2415 may be in construction.
2416 TODO: ipa-prop already contains code to tell better.
2417 merge it later. */
2418 context->maybe_in_construction = true;
2419 context->maybe_derived_type = false;
2422 /* CST is an invariant (address of decl), try to get meaningful
2423 polymorphic call context for polymorphic call of method
2424 if instance of OTR_TYPE that is located at OFFSET of this invariant.
2425 Return FALSE if nothing meaningful can be found. */
2427 bool
2428 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
2429 tree cst,
2430 tree otr_type,
2431 HOST_WIDE_INT offset)
2433 HOST_WIDE_INT offset2, size, max_size;
2434 tree base;
2436 if (TREE_CODE (cst) != ADDR_EXPR)
2437 return false;
2439 cst = TREE_OPERAND (cst, 0);
2440 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
2441 if (!DECL_P (base) || max_size == -1 || max_size != size)
2442 return false;
2444 /* Only type inconsistent programs can have otr_type that is
2445 not part of outer type. */
2446 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
2447 return false;
2449 get_polymorphic_call_info_for_decl (context, base, offset);
2450 return true;
2453 /* See if OP is SSA name initialized as a copy or by single assignment.
2454 If so, walk the SSA graph up. */
2456 static tree
2457 walk_ssa_copies (tree op)
2459 STRIP_NOPS (op);
2460 while (TREE_CODE (op) == SSA_NAME
2461 && !SSA_NAME_IS_DEFAULT_DEF (op)
2462 && SSA_NAME_DEF_STMT (op)
2463 && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
2465 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
2466 return op;
2467 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
2468 STRIP_NOPS (op);
2470 return op;
2473 /* Given REF call in FNDECL, determine class of the polymorphic
2474 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
2475 CALL is optional argument giving the actual statement (usually call) where
2476 the context is used.
2477 Return pointer to object described by the context or an declaration if
2478 we found the instance to be stored in the static storage. */
2480 tree
2481 get_polymorphic_call_info (tree fndecl,
2482 tree ref,
2483 tree *otr_type,
2484 HOST_WIDE_INT *otr_token,
2485 ipa_polymorphic_call_context *context,
2486 gimple call)
2488 tree base_pointer;
2489 *otr_type = obj_type_ref_class (ref);
2490 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
2492 /* Set up basic info in case we find nothing interesting in the analysis. */
2493 context->speculative_outer_type = NULL;
2494 context->speculative_offset = 0;
2495 context->speculative_maybe_derived_type = true;
2496 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
2497 context->offset = 0;
2498 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2499 context->maybe_derived_type = true;
2500 context->maybe_in_construction = true;
2502 /* Walk SSA for outer object. */
2505 base_pointer = walk_ssa_copies (base_pointer);
2506 if (TREE_CODE (base_pointer) == ADDR_EXPR)
2508 HOST_WIDE_INT size, max_size;
2509 HOST_WIDE_INT offset2;
2510 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2511 &offset2, &size, &max_size);
2513 /* If this is a varying address, punt. */
2514 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2515 && max_size != -1
2516 && max_size == size)
2518 /* We found dereference of a pointer. Type of the pointer
2519 and MEM_REF is meaningless, but we can look futher. */
2520 if (TREE_CODE (base) == MEM_REF)
2522 base_pointer = TREE_OPERAND (base, 0);
2523 context->offset
2524 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
2525 context->outer_type = NULL;
2527 /* We found base object. In this case the outer_type
2528 is known. */
2529 else if (DECL_P (base))
2531 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
2533 /* Only type inconsistent programs can have otr_type that is
2534 not part of outer type. */
2535 if (!contains_type_p (TREE_TYPE (base),
2536 context->offset + offset2, *otr_type))
2538 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2539 code sequences; we arrange the calls to be builtin_unreachable
2540 later. */
2541 *otr_token = INT_MAX;
2542 return base_pointer;
2544 get_polymorphic_call_info_for_decl (context, base,
2545 context->offset + offset2);
2546 if (context->maybe_in_construction && call)
2547 context->maybe_in_construction
2548 = decl_maybe_in_construction_p (base,
2549 context->outer_type,
2550 call,
2551 fndecl);
2552 return base;
2554 else
2555 break;
2557 else
2558 break;
2560 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2561 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2563 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2564 * BITS_PER_UNIT;
2565 base_pointer = TREE_OPERAND (base_pointer, 0);
2567 else
2568 break;
2570 while (true);
2572 /* Try to determine type of the outer object. */
2573 if (TREE_CODE (base_pointer) == SSA_NAME
2574 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2575 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2577 /* See if parameter is THIS pointer of a method. */
2578 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2579 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2581 context->outer_type
2582 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2583 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
2585 /* Dynamic casting has possibly upcasted the type
2586 in the hiearchy. In this case outer type is less
2587 informative than inner type and we should forget
2588 about it. */
2589 if (!contains_type_p (context->outer_type, context->offset,
2590 *otr_type))
2592 context->outer_type = NULL;
2593 return base_pointer;
2596 /* If the function is constructor or destructor, then
2597 the type is possibly in construction, but we know
2598 it is not derived type. */
2599 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2600 || DECL_CXX_DESTRUCTOR_P (fndecl))
2602 context->maybe_in_construction = true;
2603 context->maybe_derived_type = false;
2605 else
2607 context->maybe_derived_type = true;
2608 context->maybe_in_construction = false;
2610 return base_pointer;
2612 /* Non-PODs passed by value are really passed by invisible
2613 reference. In this case we also know the type of the
2614 object. */
2615 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2617 context->outer_type
2618 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2619 gcc_assert (!POINTER_TYPE_P (context->outer_type));
2620 /* Only type inconsistent programs can have otr_type that is
2621 not part of outer type. */
2622 if (!contains_type_p (context->outer_type, context->offset,
2623 *otr_type))
2625 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2626 code sequences; we arrange the calls to be builtin_unreachable
2627 later. */
2628 *otr_token = INT_MAX;
2629 return base_pointer;
2631 context->maybe_derived_type = false;
2632 context->maybe_in_construction = false;
2633 return base_pointer;
2637 tree base_type = TREE_TYPE (base_pointer);
2639 if (TREE_CODE (base_pointer) == SSA_NAME
2640 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2641 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
2643 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2644 code sequences; we arrange the calls to be builtin_unreachable
2645 later. */
2646 *otr_token = INT_MAX;
2647 return base_pointer;
2649 if (TREE_CODE (base_pointer) == SSA_NAME
2650 && SSA_NAME_DEF_STMT (base_pointer)
2651 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2652 base_type = TREE_TYPE (gimple_assign_rhs1
2653 (SSA_NAME_DEF_STMT (base_pointer)));
2655 if (POINTER_TYPE_P (base_type)
2656 && contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
2657 context->offset,
2658 *otr_type))
2660 context->speculative_outer_type = TYPE_MAIN_VARIANT
2661 (TREE_TYPE (base_type));
2662 context->speculative_offset = context->offset;
2663 context->speculative_maybe_derived_type = true;
2665 /* TODO: There are multiple ways to derive a type. For instance
2666 if BASE_POINTER is passed to an constructor call prior our refernece.
2667 We do not make this type of flow sensitive analysis yet. */
2668 return base_pointer;
2671 /* Structure to be passed in between detect_type_change and
2672 check_stmt_for_type_change. */
2674 struct type_change_info
2676 /* Offset into the object where there is the virtual method pointer we are
2677 looking for. */
2678 HOST_WIDE_INT offset;
2679 /* The declaration or SSA_NAME pointer of the base that we are checking for
2680 type change. */
2681 tree instance;
2682 /* The reference to virtual table pointer used. */
2683 tree vtbl_ptr_ref;
2684 tree otr_type;
2685 /* If we actually can tell the type that the object has changed to, it is
2686 stored in this field. Otherwise it remains NULL_TREE. */
2687 tree known_current_type;
2688 HOST_WIDE_INT known_current_offset;
2690 /* Set to true if dynamic type change has been detected. */
2691 bool type_maybe_changed;
2692 /* Set to true if multiple types have been encountered. known_current_type
2693 must be disregarded in that case. */
2694 bool multiple_types_encountered;
2695 /* Set to true if we possibly missed some dynamic type changes and we should
2696 consider the set to be speculative. */
2697 bool speculative;
2698 bool seen_unanalyzed_store;
2701 /* Return true if STMT is not call and can modify a virtual method table pointer.
2702 We take advantage of fact that vtable stores must appear within constructor
2703 and destructor functions. */
2705 bool
2706 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
2708 if (is_gimple_assign (stmt))
2710 tree lhs = gimple_assign_lhs (stmt);
2712 if (gimple_clobber_p (stmt))
2713 return false;
2714 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
2716 if (flag_strict_aliasing
2717 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
2718 return false;
2720 if (TREE_CODE (lhs) == COMPONENT_REF
2721 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
2722 return false;
2723 /* In the future we might want to use get_base_ref_and_offset to find
2724 if there is a field corresponding to the offset and if so, proceed
2725 almost like if it was a component ref. */
2729 /* Code unification may mess with inline stacks. */
2730 if (cfun->after_inlining)
2731 return true;
2733 /* Walk the inline stack and watch out for ctors/dtors.
2734 TODO: Maybe we can require the store to appear in toplevel
2735 block of CTOR/DTOR. */
2736 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
2737 block = BLOCK_SUPERCONTEXT (block))
2738 if (BLOCK_ABSTRACT_ORIGIN (block)
2739 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2741 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2743 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2744 return false;
2745 return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
2746 && (DECL_CXX_CONSTRUCTOR_P (fn)
2747 || DECL_CXX_DESTRUCTOR_P (fn)));
2749 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
2750 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
2751 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
2754 /* If STMT can be proved to be an assignment to the virtual method table
2755 pointer of ANALYZED_OBJ and the type associated with the new table
2756 identified, return the type. Otherwise return NULL_TREE. */
2758 static tree
2759 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
2760 HOST_WIDE_INT *type_offset)
2762 HOST_WIDE_INT offset, size, max_size;
2763 tree lhs, rhs, base;
2765 if (!gimple_assign_single_p (stmt))
2766 return NULL_TREE;
2768 lhs = gimple_assign_lhs (stmt);
2769 rhs = gimple_assign_rhs1 (stmt);
2770 if (TREE_CODE (lhs) != COMPONENT_REF
2771 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
2773 if (dump_file)
2774 fprintf (dump_file, " LHS is not virtual table.\n");
2775 return NULL_TREE;
2778 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
2780 else
2782 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2783 if (offset != tci->offset
2784 || size != POINTER_SIZE
2785 || max_size != POINTER_SIZE)
2787 if (dump_file)
2788 fprintf (dump_file, " wrong offset %i!=%i or size %i\n",
2789 (int)offset, (int)tci->offset, (int)size);
2790 return NULL_TREE;
2792 if (DECL_P (tci->instance))
2794 if (base != tci->instance)
2796 if (dump_file)
2798 fprintf (dump_file, " base:");
2799 print_generic_expr (dump_file, base, TDF_SLIM);
2800 fprintf (dump_file, " does not match instance:");
2801 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
2802 fprintf (dump_file, "\n");
2804 return NULL_TREE;
2807 else if (TREE_CODE (base) == MEM_REF)
2809 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0)
2810 || !integer_zerop (TREE_OPERAND (base, 1)))
2812 if (dump_file)
2814 fprintf (dump_file, " base mem ref:");
2815 print_generic_expr (dump_file, base, TDF_SLIM);
2816 fprintf (dump_file, " has nonzero offset or does not match instance:");
2817 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
2818 fprintf (dump_file, "\n");
2820 return NULL_TREE;
2823 else if (!operand_equal_p (tci->instance, base, 0)
2824 || tci->offset)
2826 if (dump_file)
2828 fprintf (dump_file, " base:");
2829 print_generic_expr (dump_file, base, TDF_SLIM);
2830 fprintf (dump_file, " does not match instance:");
2831 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
2832 fprintf (dump_file, " with offset %i\n", (int)tci->offset);
2834 return NULL_TREE;
2838 tree vtable;
2839 unsigned HOST_WIDE_INT offset2;
2841 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
2843 if (dump_file)
2844 fprintf (dump_file, " Failed to lookup binfo\n");
2845 return NULL;
2848 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2849 offset2, vtable);
2850 if (!binfo)
2852 if (dump_file)
2853 fprintf (dump_file, " Construction vtable used\n");
2854 /* FIXME: We should suport construction contextes. */
2855 return NULL;
2858 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
2859 return DECL_CONTEXT (vtable);
2862 /* Record dynamic type change of TCI to TYPE. */
2864 void
2865 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
2867 if (dump_file)
2869 if (type)
2871 fprintf (dump_file, " Recording type: ");
2872 print_generic_expr (dump_file, type, TDF_SLIM);
2873 fprintf (dump_file, " at offset %i\n", (int)offset);
2875 else
2876 fprintf (dump_file, " Recording unknown type\n");
2879 /* If we found a constructor of type that is not polymorphic or
2880 that may contain the type in question as a field (not as base),
2881 restrict to the inner class first to make type matching bellow
2882 happier. */
2883 if (type
2884 && (offset
2885 || (TREE_CODE (type) != RECORD_TYPE
2886 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
2888 ipa_polymorphic_call_context context;
2890 context.offset = offset;
2891 context.outer_type = type;
2892 context.maybe_in_construction = false;
2893 context.maybe_derived_type = false;
2894 /* If we failed to find the inner type, we know that the call
2895 would be undefined for type produced here. */
2896 if (!context.restrict_to_inner_class (tci->otr_type))
2898 if (dump_file)
2899 fprintf (dump_file, " Ignoring; does not contain otr_type\n");
2900 return;
2902 /* Watch for case we reached an POD type and anticipate placement
2903 new. */
2904 if (!context.maybe_derived_type)
2906 type = context.outer_type;
2907 offset = context.offset;
2910 if (tci->type_maybe_changed
2911 && (!types_same_for_odr (type, tci->known_current_type)
2912 || offset != tci->known_current_offset))
2913 tci->multiple_types_encountered = true;
2914 tci->known_current_type = TYPE_MAIN_VARIANT (type);
2915 tci->known_current_offset = offset;
2916 tci->type_maybe_changed = true;
2919 /* Callback of walk_aliased_vdefs and a helper function for
2920 detect_type_change to check whether a particular statement may modify
2921 the virtual table pointer, and if possible also determine the new type of
2922 the (sub-)object. It stores its result into DATA, which points to a
2923 type_change_info structure. */
2925 static bool
2926 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
2928 gimple stmt = SSA_NAME_DEF_STMT (vdef);
2929 struct type_change_info *tci = (struct type_change_info *) data;
2930 tree fn;
2932 /* If we already gave up, just terminate the rest of walk. */
2933 if (tci->multiple_types_encountered)
2934 return true;
2936 if (is_gimple_call (stmt))
2938 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
2939 return false;
2941 /* Check for a constructor call. */
2942 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
2943 && DECL_CXX_CONSTRUCTOR_P (fn)
2944 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
2945 && gimple_call_num_args (stmt))
2947 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
2948 tree type = method_class_type (TREE_TYPE (fn));
2949 HOST_WIDE_INT offset = 0, size, max_size;
2951 if (dump_file)
2953 fprintf (dump_file, " Checking constructor call: ");
2954 print_gimple_stmt (dump_file, stmt, 0, 0);
2957 /* See if THIS parameter seems like instance pointer. */
2958 if (TREE_CODE (op) == ADDR_EXPR)
2960 op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
2961 &offset, &size, &max_size);
2962 if (size != max_size || max_size == -1)
2964 tci->speculative = true;
2965 return false;
2967 if (op && TREE_CODE (op) == MEM_REF)
2969 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
2971 tci->speculative = true;
2972 return false;
2974 offset += tree_to_shwi (TREE_OPERAND (op, 1))
2975 * BITS_PER_UNIT;
2976 op = TREE_OPERAND (op, 0);
2978 else if (DECL_P (op))
2980 else
2982 tci->speculative = true;
2983 return false;
2985 op = walk_ssa_copies (op);
2987 if (operand_equal_p (op, tci->instance, 0)
2988 && TYPE_SIZE (type)
2989 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2990 && tree_fits_shwi_p (TYPE_SIZE (type))
2991 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset)
2993 record_known_type (tci, type, tci->offset - offset);
2994 return true;
2997 /* Calls may possibly change dynamic type by placement new. Assume
2998 it will not happen, but make result speculative only. */
2999 if (dump_file)
3001 fprintf (dump_file, " Function call may change dynamic type:");
3002 print_gimple_stmt (dump_file, stmt, 0, 0);
3004 tci->speculative = true;
3005 return false;
3007 /* Check for inlined virtual table store. */
3008 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
3010 tree type;
3011 HOST_WIDE_INT offset = 0;
3012 if (dump_file)
3014 fprintf (dump_file, " Checking vtbl store: ");
3015 print_gimple_stmt (dump_file, stmt, 0, 0);
3018 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
3019 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
3020 if (!type)
3022 if (dump_file)
3023 fprintf (dump_file, " Unanalyzed store may change type.\n");
3024 tci->seen_unanalyzed_store = true;
3025 tci->speculative = true;
3027 else
3028 record_known_type (tci, type, offset);
3029 return true;
3031 else
3032 return false;
3035 /* THIS is polymorphic call context obtained from get_polymorphic_context.
3036 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
3037 INSTANCE is pointer to the outer instance as returned by
3038 get_polymorphic_context. To avoid creation of temporary expressions,
3039 INSTANCE may also be an declaration of get_polymorphic_context found the
3040 value to be in static storage.
3042 If the type of instance is not fully determined
3043 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
3044 is set), try to walk memory writes and find the actual construction of the
3045 instance.
3047 We do not include this analysis in the context analysis itself, because
3048 it needs memory SSA to be fully built and the walk may be expensive.
3049 So it is not suitable for use withing fold_stmt and similar uses. */
3051 bool
3052 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
3053 tree otr_object,
3054 tree otr_type,
3055 gimple call)
3057 struct type_change_info tci;
3058 ao_ref ao;
3059 bool function_entry_reached = false;
3060 tree instance_ref = NULL;
3061 gimple stmt = call;
3062 /* Remember OFFSET before it is modified by restrict_to_inner_class.
3063 This is because we do not update INSTANCE when walking inwards. */
3064 HOST_WIDE_INT instance_offset = offset;
3066 otr_type = TYPE_MAIN_VARIANT (otr_type);
3068 /* Walk into inner type. This may clear maybe_derived_type and save us
3069 from useless work. It also makes later comparsions with static type
3070 easier. */
3071 if (outer_type)
3073 if (!restrict_to_inner_class (otr_type))
3074 return false;
3077 if (!maybe_in_construction && !maybe_derived_type)
3078 return false;
3080 /* We need to obtain refernce to virtual table pointer. It is better
3081 to look it up in the code rather than build our own. This require bit
3082 of pattern matching, but we end up verifying that what we found is
3083 correct.
3085 What we pattern match is:
3087 tmp = instance->_vptr.A; // vtbl ptr load
3088 tmp2 = tmp[otr_token]; // vtable lookup
3089 OBJ_TYPE_REF(tmp2;instance->0) (instance);
3091 We want to start alias oracle walk from vtbl pointer load,
3092 but we may not be able to identify it, for example, when PRE moved the
3093 load around. */
3095 if (gimple_code (call) == GIMPLE_CALL)
3097 tree ref = gimple_call_fn (call);
3098 HOST_WIDE_INT offset2, size, max_size;
3100 if (TREE_CODE (ref) == OBJ_TYPE_REF)
3102 ref = OBJ_TYPE_REF_EXPR (ref);
3103 ref = walk_ssa_copies (ref);
3105 /* Check if definition looks like vtable lookup. */
3106 if (TREE_CODE (ref) == SSA_NAME
3107 && !SSA_NAME_IS_DEFAULT_DEF (ref)
3108 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
3109 && TREE_CODE (gimple_assign_rhs1
3110 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
3112 ref = get_base_address
3113 (TREE_OPERAND (gimple_assign_rhs1
3114 (SSA_NAME_DEF_STMT (ref)), 0));
3115 ref = walk_ssa_copies (ref);
3116 /* Find base address of the lookup and see if it looks like
3117 vptr load. */
3118 if (TREE_CODE (ref) == SSA_NAME
3119 && !SSA_NAME_IS_DEFAULT_DEF (ref)
3120 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
3122 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
3123 tree base_ref = get_ref_base_and_extent
3124 (ref_exp, &offset2, &size, &max_size);
3126 /* Finally verify that what we found looks like read from OTR_OBJECT
3127 or from INSTANCE with offset OFFSET. */
3128 if (base_ref
3129 && ((TREE_CODE (base_ref) == MEM_REF
3130 && ((offset2 == instance_offset
3131 && TREE_OPERAND (base_ref, 0) == instance)
3132 || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object)))
3133 || (DECL_P (instance) && base_ref == instance
3134 && offset2 == instance_offset)))
3136 stmt = SSA_NAME_DEF_STMT (ref);
3137 instance_ref = ref_exp;
3144 /* If we failed to look up the refernece in code, build our own. */
3145 if (!instance_ref)
3147 /* If the statement in question does not use memory, we can't tell
3148 anything. */
3149 if (!gimple_vuse (stmt))
3150 return false;
3151 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
3153 else
3154 /* Otherwise use the real reference. */
3155 ao_ref_init (&ao, instance_ref);
3157 /* We look for vtbl pointer read. */
3158 ao.size = POINTER_SIZE;
3159 ao.max_size = ao.size;
3160 ao.ref_alias_set
3161 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
3163 if (dump_file)
3165 fprintf (dump_file, "Determining dynamic type for call: ");
3166 print_gimple_stmt (dump_file, call, 0, 0);
3167 fprintf (dump_file, " Starting walk at: ");
3168 print_gimple_stmt (dump_file, stmt, 0, 0);
3169 fprintf (dump_file, " instance pointer: ");
3170 print_generic_expr (dump_file, otr_object, TDF_SLIM);
3171 fprintf (dump_file, " Outer instance pointer: ");
3172 print_generic_expr (dump_file, instance, TDF_SLIM);
3173 fprintf (dump_file, " offset: %i (bits)", (int)offset);
3174 fprintf (dump_file, " vtbl reference: ");
3175 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
3176 fprintf (dump_file, "\n");
3179 tci.offset = offset;
3180 tci.instance = instance;
3181 tci.vtbl_ptr_ref = instance_ref;
3182 gcc_assert (TREE_CODE (instance) != MEM_REF);
3183 tci.known_current_type = NULL_TREE;
3184 tci.known_current_offset = 0;
3185 tci.otr_type = otr_type;
3186 tci.type_maybe_changed = false;
3187 tci.multiple_types_encountered = false;
3188 tci.speculative = false;
3189 tci.seen_unanalyzed_store = false;
3191 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
3192 &tci, NULL, &function_entry_reached);
3194 /* If we did not find any type changing statements, we may still drop
3195 maybe_in_construction flag if the context already have outer type.
3197 Here we make special assumptions about both constructors and
3198 destructors which are all the functions that are allowed to alter the
3199 VMT pointers. It assumes that destructors begin with assignment into
3200 all VMT pointers and that constructors essentially look in the
3201 following way:
3203 1) The very first thing they do is that they call constructors of
3204 ancestor sub-objects that have them.
3206 2) Then VMT pointers of this and all its ancestors is set to new
3207 values corresponding to the type corresponding to the constructor.
3209 3) Only afterwards, other stuff such as constructor of member
3210 sub-objects and the code written by the user is run. Only this may
3211 include calling virtual functions, directly or indirectly.
3213 4) placement new can not be used to change type of non-POD statically
3214 allocated variables.
3216 There is no way to call a constructor of an ancestor sub-object in any
3217 other way.
3219 This means that we do not have to care whether constructors get the
3220 correct type information because they will always change it (in fact,
3221 if we define the type to be given by the VMT pointer, it is undefined).
3223 The most important fact to derive from the above is that if, for some
3224 statement in the section 3, we try to detect whether the dynamic type
3225 has changed, we can safely ignore all calls as we examine the function
3226 body backwards until we reach statements in section 2 because these
3227 calls cannot be ancestor constructors or destructors (if the input is
3228 not bogus) and so do not change the dynamic type (this holds true only
3229 for automatically allocated objects but at the moment we devirtualize
3230 only these). We then must detect that statements in section 2 change
3231 the dynamic type and can try to derive the new type. That is enough
3232 and we can stop, we will never see the calls into constructors of
3233 sub-objects in this code.
3235 Therefore if the static outer type was found (outer_type)
3236 we can safely ignore tci.speculative that is set on calls and give up
3237 only if there was dyanmic type store that may affect given variable
3238 (seen_unanalyzed_store) */
3240 if (!tci.type_maybe_changed
3241 || (outer_type
3242 && !tci.seen_unanalyzed_store
3243 && !tci.multiple_types_encountered
3244 && offset == tci.offset
3245 && types_same_for_odr (tci.known_current_type,
3246 outer_type)))
3248 if (!outer_type || tci.seen_unanalyzed_store)
3249 return false;
3250 if (maybe_in_construction)
3251 maybe_in_construction = false;
3252 if (dump_file)
3253 fprintf (dump_file, " No dynamic type change found.\n");
3254 return true;
3257 if (tci.known_current_type
3258 && !function_entry_reached
3259 && !tci.multiple_types_encountered)
3261 if (!tci.speculative)
3263 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
3264 offset = tci.known_current_offset;
3265 maybe_in_construction = false;
3266 maybe_derived_type = false;
3267 if (dump_file)
3268 fprintf (dump_file, " Determined dynamic type.\n");
3270 else if (!speculative_outer_type
3271 || speculative_maybe_derived_type)
3273 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
3274 speculative_offset = tci.known_current_offset;
3275 speculative_maybe_derived_type = false;
3276 if (dump_file)
3277 fprintf (dump_file, " Determined speculative dynamic type.\n");
3280 else if (dump_file)
3282 fprintf (dump_file, " Found multiple types%s%s\n",
3283 function_entry_reached ? " (function entry reached)" : "",
3284 function_entry_reached ? " (multiple types encountered)" : "");
3287 return true;
3290 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
3291 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
3292 and insert them to NODES.
3294 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
3296 static void
3297 record_targets_from_bases (tree otr_type,
3298 HOST_WIDE_INT otr_token,
3299 tree outer_type,
3300 HOST_WIDE_INT offset,
3301 vec <cgraph_node *> &nodes,
3302 hash_set<tree> *inserted,
3303 hash_set<tree> *matched_vtables,
3304 bool *completep)
3306 while (true)
3308 HOST_WIDE_INT pos, size;
3309 tree base_binfo;
3310 tree fld;
3312 if (types_same_for_odr (outer_type, otr_type))
3313 return;
3315 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
3317 if (TREE_CODE (fld) != FIELD_DECL)
3318 continue;
3320 pos = int_bit_position (fld);
3321 size = tree_to_shwi (DECL_SIZE (fld));
3322 if (pos <= offset && (pos + size) > offset
3323 /* Do not get confused by zero sized bases. */
3324 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
3325 break;
3327 /* Within a class type we should always find correcponding fields. */
3328 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
3330 /* Nonbasetypes should have been stripped by outer_class_type. */
3331 gcc_assert (DECL_ARTIFICIAL (fld));
3333 outer_type = TREE_TYPE (fld);
3334 offset -= pos;
3336 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
3337 offset, otr_type);
3338 if (!base_binfo)
3340 gcc_assert (odr_violation_reported);
3341 return;
3343 gcc_assert (base_binfo);
3344 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
3346 bool can_refer;
3347 tree target = gimple_get_virt_method_for_binfo (otr_token,
3348 base_binfo,
3349 &can_refer);
3350 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
3351 maybe_record_node (nodes, target, inserted, can_refer, completep);
3352 matched_vtables->add (BINFO_VTABLE (base_binfo));
3357 /* When virtual table is removed, we may need to flush the cache. */
3359 static void
3360 devirt_variable_node_removal_hook (varpool_node *n,
3361 void *d ATTRIBUTE_UNUSED)
3363 if (cached_polymorphic_call_targets
3364 && DECL_VIRTUAL_P (n->decl)
3365 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3366 free_polymorphic_call_targets_hash ();
3369 /* Record about how many calls would benefit from given type to be final. */
3371 struct odr_type_warn_count
3373 tree type;
3374 int count;
3375 gcov_type dyn_count;
3378 /* Record about how many calls would benefit from given method to be final. */
3380 struct decl_warn_count
3382 tree decl;
3383 int count;
3384 gcov_type dyn_count;
3387 /* Information about type and decl warnings. */
3389 struct final_warning_record
3391 gcov_type dyn_count;
3392 vec<odr_type_warn_count> type_warnings;
3393 hash_map<tree, decl_warn_count> decl_warnings;
3395 struct final_warning_record *final_warning_records;
3397 /* Return vector containing possible targets of polymorphic call of type
3398 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3399 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
3400 OTR_TYPE and include their virtual method. This is useful for types
3401 possibly in construction or destruction where the virtual table may
3402 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
3403 us to walk the inheritance graph for all derivations.
3405 OTR_TOKEN == INT_MAX is used to mark calls that are provably
3406 undefined and should be redirected to unreachable.
3408 If COMPLETEP is non-NULL, store true if the list is complete.
3409 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3410 in the target cache. If user needs to visit every target list
3411 just once, it can memoize them.
3413 SPECULATION_TARGETS specify number of targets that are speculatively
3414 likely. These include targets specified by the speculative part
3415 of polymoprhic call context and also exclude all targets for classes
3416 in construction.
3418 Returned vector is placed into cache. It is NOT caller's responsibility
3419 to free it. The vector can be freed on cgraph_remove_node call if
3420 the particular node is a virtual function present in the cache. */
3422 vec <cgraph_node *>
3423 possible_polymorphic_call_targets (tree otr_type,
3424 HOST_WIDE_INT otr_token,
3425 ipa_polymorphic_call_context context,
3426 bool *completep,
3427 void **cache_token,
3428 int *speculative_targetsp)
3430 static struct cgraph_node_hook_list *node_removal_hook_holder;
3431 vec <cgraph_node *> nodes = vNULL;
3432 vec <tree> bases_to_consider = vNULL;
3433 odr_type type, outer_type;
3434 polymorphic_call_target_d key;
3435 polymorphic_call_target_d **slot;
3436 unsigned int i;
3437 tree binfo, target;
3438 bool complete;
3439 bool can_refer;
3440 bool skipped = false;
3442 otr_type = TYPE_MAIN_VARIANT (otr_type);
3444 /* If ODR is not initialized, return empty incomplete list. */
3445 if (!odr_hash)
3447 if (completep)
3448 *completep = false;
3449 if (cache_token)
3450 *cache_token = NULL;
3451 if (speculative_targetsp)
3452 *speculative_targetsp = 0;
3453 return nodes;
3456 /* If we hit type inconsistency, just return empty list of targets. */
3457 if (otr_token == INT_MAX)
3459 if (completep)
3460 *completep = true;
3461 if (cache_token)
3462 *cache_token = NULL;
3463 if (speculative_targetsp)
3464 *speculative_targetsp = 0;
3465 return nodes;
3468 /* Do not bother to compute speculative info when user do not asks for it. */
3469 if (!speculative_targetsp || !context.speculative_outer_type)
3470 context.clear_speculation ();
3472 type = get_odr_type (otr_type, true);
3474 /* Recording type variants would wast results cache. */
3475 gcc_assert (!context.outer_type
3476 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3478 /* Lookup the outer class type we want to walk. */
3479 if ((context.outer_type || context.speculative_outer_type)
3480 && !context.restrict_to_inner_class (otr_type))
3482 if (completep)
3483 *completep = false;
3484 if (cache_token)
3485 *cache_token = NULL;
3486 if (speculative_targetsp)
3487 *speculative_targetsp = 0;
3488 return nodes;
3491 /* Check that restrict_to_inner_class kept the main variant. */
3492 gcc_assert (!context.outer_type
3493 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3495 /* We canonicalize our query, so we do not need extra hashtable entries. */
3497 /* Without outer type, we have no use for offset. Just do the
3498 basic search from innter type */
3499 if (!context.outer_type)
3501 context.outer_type = otr_type;
3502 context.offset = 0;
3504 /* We need to update our hiearchy if the type does not exist. */
3505 outer_type = get_odr_type (context.outer_type, true);
3506 /* If the type is complete, there are no derivations. */
3507 if (TYPE_FINAL_P (outer_type->type))
3508 context.maybe_derived_type = false;
3510 /* Initialize query cache. */
3511 if (!cached_polymorphic_call_targets)
3513 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3514 polymorphic_call_target_hash
3515 = new polymorphic_call_target_hash_type (23);
3516 if (!node_removal_hook_holder)
3518 node_removal_hook_holder =
3519 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3520 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3521 NULL);
3525 /* Lookup cached answer. */
3526 key.type = type;
3527 key.otr_token = otr_token;
3528 key.context = context;
3529 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3530 if (cache_token)
3531 *cache_token = (void *)*slot;
3532 if (*slot)
3534 if (completep)
3535 *completep = (*slot)->complete;
3536 if (speculative_targetsp)
3537 *speculative_targetsp = (*slot)->speculative_targets;
3538 if ((*slot)->type_warning && final_warning_records)
3540 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3541 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3542 += final_warning_records->dyn_count;
3544 if ((*slot)->decl_warning && final_warning_records)
3546 struct decl_warn_count *c =
3547 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3548 c->count++;
3549 c->dyn_count += final_warning_records->dyn_count;
3551 return (*slot)->targets;
3554 complete = true;
3556 /* Do actual search. */
3557 timevar_push (TV_IPA_VIRTUAL_CALL);
3558 *slot = XCNEW (polymorphic_call_target_d);
3559 if (cache_token)
3560 *cache_token = (void *)*slot;
3561 (*slot)->type = type;
3562 (*slot)->otr_token = otr_token;
3563 (*slot)->context = context;
3564 (*slot)->speculative_targets = 0;
3566 hash_set<tree> inserted;
3567 hash_set<tree> matched_vtables;
3569 /* First insert targets we speculatively identified as likely. */
3570 if (context.speculative_outer_type)
3572 odr_type speculative_outer_type;
3573 bool speculation_complete = true;
3575 /* First insert target from type itself and check if it may have derived types. */
3576 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3577 if (TYPE_FINAL_P (speculative_outer_type->type))
3578 context.speculative_maybe_derived_type = false;
3579 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3580 context.speculative_offset, otr_type);
3581 if (binfo)
3582 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3583 &can_refer);
3584 else
3585 target = NULL;
3587 /* In the case we get complete method, we don't need
3588 to walk derivations. */
3589 if (target && DECL_FINAL_P (target))
3590 context.speculative_maybe_derived_type = false;
3591 if (type_possibly_instantiated_p (speculative_outer_type->type))
3592 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3593 if (binfo)
3594 matched_vtables.add (BINFO_VTABLE (binfo));
3597 /* Next walk recursively all derived types. */
3598 if (context.speculative_maybe_derived_type)
3599 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3600 possible_polymorphic_call_targets_1 (nodes, &inserted,
3601 &matched_vtables,
3602 otr_type,
3603 speculative_outer_type->derived_types[i],
3604 otr_token, speculative_outer_type->type,
3605 context.speculative_offset,
3606 &speculation_complete,
3607 bases_to_consider,
3608 false);
3609 (*slot)->speculative_targets = nodes.length();
3612 /* First see virtual method of type itself. */
3613 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3614 context.offset, otr_type);
3615 if (binfo)
3616 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3617 &can_refer);
3618 else
3620 gcc_assert (odr_violation_reported);
3621 target = NULL;
3624 /* Destructors are never called through construction virtual tables,
3625 because the type is always known. */
3626 if (target && DECL_CXX_DESTRUCTOR_P (target))
3627 context.maybe_in_construction = false;
3629 if (target)
3631 /* In the case we get complete method, we don't need
3632 to walk derivations. */
3633 if (DECL_FINAL_P (target))
3634 context.maybe_derived_type = false;
3637 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3638 if (type_possibly_instantiated_p (outer_type->type))
3639 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3640 else
3642 skipped = true;
3643 gcc_assert (in_lto_p || context.maybe_derived_type);
3646 if (binfo)
3647 matched_vtables.add (BINFO_VTABLE (binfo));
3649 /* Next walk recursively all derived types. */
3650 if (context.maybe_derived_type)
3652 for (i = 0; i < outer_type->derived_types.length(); i++)
3653 possible_polymorphic_call_targets_1 (nodes, &inserted,
3654 &matched_vtables,
3655 otr_type,
3656 outer_type->derived_types[i],
3657 otr_token, outer_type->type,
3658 context.offset, &complete,
3659 bases_to_consider,
3660 context.maybe_in_construction);
3662 if (!outer_type->all_derivations_known)
3664 if (final_warning_records)
3666 if (complete
3667 && nodes.length () == 1
3668 && warn_suggest_final_types
3669 && !outer_type->derived_types.length ())
3671 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3672 final_warning_records->type_warnings.safe_grow_cleared
3673 (odr_types.length ());
3674 final_warning_records->type_warnings[outer_type->id].count++;
3675 final_warning_records->type_warnings[outer_type->id].dyn_count
3676 += final_warning_records->dyn_count;
3677 final_warning_records->type_warnings[outer_type->id].type
3678 = outer_type->type;
3679 (*slot)->type_warning = outer_type->id + 1;
3681 if (complete
3682 && warn_suggest_final_methods
3683 && nodes.length () == 1
3684 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3685 outer_type->type))
3687 bool existed;
3688 struct decl_warn_count &c =
3689 final_warning_records->decl_warnings.get_or_insert
3690 (nodes[0]->decl, &existed);
3692 if (existed)
3694 c.count++;
3695 c.dyn_count += final_warning_records->dyn_count;
3697 else
3699 c.count = 1;
3700 c.dyn_count = final_warning_records->dyn_count;
3701 c.decl = nodes[0]->decl;
3703 (*slot)->decl_warning = nodes[0]->decl;
3706 complete = false;
3710 /* Finally walk bases, if asked to. */
3711 if (!(*slot)->speculative_targets)
3712 (*slot)->speculative_targets = nodes.length();
3714 /* Destructors are never called through construction virtual tables,
3715 because the type is always known. One of entries may be cxa_pure_virtual
3716 so look to at least two of them. */
3717 if (context.maybe_in_construction)
3718 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3719 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3720 context.maybe_in_construction = false;
3721 if (context.maybe_in_construction)
3723 if (type != outer_type
3724 && (!skipped
3725 || (context.maybe_derived_type
3726 && !type_all_derivations_known_p (outer_type->type))))
3727 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3728 context.offset, nodes, &inserted,
3729 &matched_vtables, &complete);
3730 if (skipped)
3731 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3732 for (i = 0; i < bases_to_consider.length(); i++)
3733 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3735 bases_to_consider.release();
3737 (*slot)->targets = nodes;
3738 (*slot)->complete = complete;
3739 if (completep)
3740 *completep = complete;
3741 if (speculative_targetsp)
3742 *speculative_targetsp = (*slot)->speculative_targets;
3744 timevar_pop (TV_IPA_VIRTUAL_CALL);
3745 return nodes;
3748 bool
3749 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3750 vec<const decl_warn_count*> *vec)
3752 vec->safe_push (&value);
3753 return true;
3756 /* Dump all possible targets of a polymorphic call. */
3758 void
3759 dump_possible_polymorphic_call_targets (FILE *f,
3760 tree otr_type,
3761 HOST_WIDE_INT otr_token,
3762 const ipa_polymorphic_call_context &ctx)
3764 vec <cgraph_node *> targets;
3765 bool final;
3766 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3767 unsigned int i;
3768 int speculative;
3770 if (!type)
3771 return;
3772 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3773 ctx,
3774 &final, NULL, &speculative);
3775 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3776 print_generic_expr (f, type->type, TDF_SLIM);
3777 fprintf (f, " token %i\n", (int)otr_token);
3778 if (ctx.outer_type || ctx.offset)
3780 fprintf (f, " Contained in type:");
3781 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
3782 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
3783 ctx.offset);
3785 if (ctx.speculative_outer_type)
3787 fprintf (f, " Speculatively contained in type:");
3788 print_generic_expr (f, ctx.speculative_outer_type, TDF_SLIM);
3789 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
3790 ctx.speculative_offset);
3793 fprintf (f, " %s%s%s%s\n ",
3794 final ? "This is a complete list." :
3795 "This is partial list; extra targets may be defined in other units.",
3796 ctx.maybe_in_construction ? " (base types included)" : "",
3797 ctx.maybe_derived_type ? " (derived types included)" : "",
3798 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3799 for (i = 0; i < targets.length (); i++)
3801 char *name = NULL;
3802 if (i == (unsigned)speculative)
3803 fprintf (f, "\n Targets that are not likely:\n"
3804 " ");
3805 if (in_lto_p)
3806 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3807 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
3808 if (in_lto_p)
3809 free (name);
3810 if (!targets[i]->definition)
3811 fprintf (f, " (no definition%s)",
3812 DECL_DECLARED_INLINE_P (targets[i]->decl)
3813 ? " inline" : "");
3815 fprintf (f, "\n\n");
3819 /* Return true if N can be possibly target of a polymorphic call of
3820 OTR_TYPE/OTR_TOKEN. */
3822 bool
3823 possible_polymorphic_call_target_p (tree otr_type,
3824 HOST_WIDE_INT otr_token,
3825 const ipa_polymorphic_call_context &ctx,
3826 struct cgraph_node *n)
3828 vec <cgraph_node *> targets;
3829 unsigned int i;
3830 enum built_in_function fcode;
3831 bool final;
3833 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3834 && ((fcode = DECL_FUNCTION_CODE (n->decl))
3835 == BUILT_IN_UNREACHABLE
3836 || fcode == BUILT_IN_TRAP))
3837 return true;
3839 if (!odr_hash)
3840 return true;
3841 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3842 for (i = 0; i < targets.length (); i++)
3843 if (n->semantically_equivalent_p (targets[i]))
3844 return true;
3846 /* At a moment we allow middle end to dig out new external declarations
3847 as a targets of polymorphic calls. */
3848 if (!final && !n->definition)
3849 return true;
3850 return false;
3854 /* After callgraph construction new external nodes may appear.
3855 Add them into the graph. */
3857 void
3858 update_type_inheritance_graph (void)
3860 struct cgraph_node *n;
3862 if (!odr_hash)
3863 return;
3864 free_polymorphic_call_targets_hash ();
3865 timevar_push (TV_IPA_INHERITANCE);
3866 /* We reconstruct the graph starting from types of all methods seen in the
3867 the unit. */
3868 FOR_EACH_FUNCTION (n)
3869 if (DECL_VIRTUAL_P (n->decl)
3870 && !n->definition
3871 && n->real_symbol_p ())
3872 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
3873 true);
3874 timevar_pop (TV_IPA_INHERITANCE);
3878 /* Return true if N looks like likely target of a polymorphic call.
3879 Rule out cxa_pure_virtual, noreturns, function declared cold and
3880 other obvious cases. */
3882 bool
3883 likely_target_p (struct cgraph_node *n)
3885 int flags;
3886 /* cxa_pure_virtual and similar things are not likely. */
3887 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3888 return false;
3889 flags = flags_from_decl_or_type (n->decl);
3890 if (flags & ECF_NORETURN)
3891 return false;
3892 if (lookup_attribute ("cold",
3893 DECL_ATTRIBUTES (n->decl)))
3894 return false;
3895 if (n->frequency < NODE_FREQUENCY_NORMAL)
3896 return false;
3897 /* If there are no virtual tables refering the target alive,
3898 the only way the target can be called is an instance comming from other
3899 compilation unit; speculative devirtualization is build around an
3900 assumption that won't happen. */
3901 if (!referenced_from_vtable_p (n))
3902 return false;
3903 return true;
3906 /* Compare type warning records P1 and P2 and chose one with larger count;
3907 helper for qsort. */
3910 type_warning_cmp (const void *p1, const void *p2)
3912 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3913 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3915 if (t1->dyn_count < t2->dyn_count)
3916 return 1;
3917 if (t1->dyn_count > t2->dyn_count)
3918 return -1;
3919 return t2->count - t1->count;
3922 /* Compare decl warning records P1 and P2 and chose one with larger count;
3923 helper for qsort. */
3926 decl_warning_cmp (const void *p1, const void *p2)
3928 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3929 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3931 if (t1->dyn_count < t2->dyn_count)
3932 return 1;
3933 if (t1->dyn_count > t2->dyn_count)
3934 return -1;
3935 return t2->count - t1->count;
3938 /* The ipa-devirt pass.
3939 When polymorphic call has only one likely target in the unit,
3940 turn it into speculative call. */
3942 static unsigned int
3943 ipa_devirt (void)
3945 struct cgraph_node *n;
3946 hash_set<void *> bad_call_targets;
3947 struct cgraph_edge *e;
3949 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3950 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3951 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3953 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3954 This is implemented by setting up final_warning_records that are updated
3955 by get_polymorphic_call_targets.
3956 We need to clear cache in this case to trigger recomputation of all
3957 entries. */
3958 if (warn_suggest_final_methods || warn_suggest_final_types)
3960 final_warning_records = new (final_warning_record);
3961 final_warning_records->type_warnings = vNULL;
3962 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3963 free_polymorphic_call_targets_hash ();
3966 FOR_EACH_DEFINED_FUNCTION (n)
3968 bool update = false;
3969 if (dump_file && n->indirect_calls)
3970 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3971 n->name (), n->order);
3972 for (e = n->indirect_calls; e; e = e->next_callee)
3973 if (e->indirect_info->polymorphic)
3975 struct cgraph_node *likely_target = NULL;
3976 void *cache_token;
3977 bool final;
3978 int speculative_targets;
3980 if (final_warning_records)
3981 final_warning_records->dyn_count = e->count;
3983 vec <cgraph_node *>targets
3984 = possible_polymorphic_call_targets
3985 (e, &final, &cache_token, &speculative_targets);
3986 unsigned int i;
3988 if (dump_file)
3989 dump_possible_polymorphic_call_targets
3990 (dump_file, e);
3992 npolymorphic++;
3994 if (!flag_devirtualize_speculatively)
3995 continue;
3997 if (!e->maybe_hot_p ())
3999 if (dump_file)
4000 fprintf (dump_file, "Call is cold\n\n");
4001 ncold++;
4002 continue;
4004 if (e->speculative)
4006 if (dump_file)
4007 fprintf (dump_file, "Call is aready speculated\n\n");
4008 nspeculated++;
4010 /* When dumping see if we agree with speculation. */
4011 if (!dump_file)
4012 continue;
4014 if (bad_call_targets.contains (cache_token))
4016 if (dump_file)
4017 fprintf (dump_file, "Target list is known to be useless\n\n");
4018 nmultiple++;
4019 continue;
4021 for (i = 0; i < targets.length (); i++)
4022 if (likely_target_p (targets[i]))
4024 if (likely_target)
4026 if (i < (unsigned) speculative_targets)
4028 likely_target = NULL;
4029 if (dump_file)
4030 fprintf (dump_file, "More than one likely target\n\n");
4031 nmultiple++;
4033 break;
4035 likely_target = targets[i];
4037 if (!likely_target)
4039 bad_call_targets.add (cache_token);
4040 continue;
4042 /* This is reached only when dumping; check if we agree or disagree
4043 with the speculation. */
4044 if (e->speculative)
4046 struct cgraph_edge *e2;
4047 struct ipa_ref *ref;
4048 e->speculative_call_info (e2, e, ref);
4049 if (e2->callee->ultimate_alias_target ()
4050 == likely_target->ultimate_alias_target ())
4052 fprintf (dump_file, "We agree with speculation\n\n");
4053 nok++;
4055 else
4057 fprintf (dump_file, "We disagree with speculation\n\n");
4058 nwrong++;
4060 continue;
4062 if (!likely_target->definition)
4064 if (dump_file)
4065 fprintf (dump_file, "Target is not an definition\n\n");
4066 nnotdefined++;
4067 continue;
4069 /* Do not introduce new references to external symbols. While we
4070 can handle these just well, it is common for programs to
4071 incorrectly with headers defining methods they are linked
4072 with. */
4073 if (DECL_EXTERNAL (likely_target->decl))
4075 if (dump_file)
4076 fprintf (dump_file, "Target is external\n\n");
4077 nexternal++;
4078 continue;
4080 /* Don't use an implicitly-declared destructor (c++/58678). */
4081 struct cgraph_node *non_thunk_target
4082 = likely_target->function_symbol ();
4083 if (DECL_ARTIFICIAL (non_thunk_target->decl))
4085 if (dump_file)
4086 fprintf (dump_file, "Target is artificial\n\n");
4087 nartificial++;
4088 continue;
4090 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
4091 && likely_target->can_be_discarded_p ())
4093 if (dump_file)
4094 fprintf (dump_file, "Target is overwritable\n\n");
4095 noverwritable++;
4096 continue;
4098 else if (dbg_cnt (devirt))
4100 if (dump_enabled_p ())
4102 location_t locus = gimple_location_safe (e->call_stmt);
4103 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
4104 "speculatively devirtualizing call in %s/%i to %s/%i\n",
4105 n->name (), n->order,
4106 likely_target->name (),
4107 likely_target->order);
4109 if (!likely_target->can_be_discarded_p ())
4111 cgraph_node *alias;
4112 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
4113 if (alias)
4114 likely_target = alias;
4116 nconverted++;
4117 update = true;
4118 e->make_speculative
4119 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
4122 if (update)
4123 inline_update_overall_summary (n);
4125 if (warn_suggest_final_methods || warn_suggest_final_types)
4127 if (warn_suggest_final_types)
4129 final_warning_records->type_warnings.qsort (type_warning_cmp);
4130 for (unsigned int i = 0;
4131 i < final_warning_records->type_warnings.length (); i++)
4132 if (final_warning_records->type_warnings[i].count)
4134 tree type = final_warning_records->type_warnings[i].type;
4135 warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
4136 OPT_Wsuggest_final_types,
4137 "Declaring type %qD final "
4138 "would enable devirtualization of %i calls",
4139 type,
4140 final_warning_records->type_warnings[i].count);
4144 if (warn_suggest_final_methods)
4146 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
4148 final_warning_records->decl_warnings.traverse
4149 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
4150 decl_warnings_vec.qsort (decl_warning_cmp);
4151 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
4153 tree decl = decl_warnings_vec[i]->decl;
4154 int count = decl_warnings_vec[i]->count;
4156 if (DECL_CXX_DESTRUCTOR_P (decl))
4157 warning_at (DECL_SOURCE_LOCATION (decl),
4158 OPT_Wsuggest_final_methods,
4159 "Declaring virtual destructor of %qD final "
4160 "would enable devirtualization of %i calls",
4161 DECL_CONTEXT (decl), count);
4162 else
4163 warning_at (DECL_SOURCE_LOCATION (decl),
4164 OPT_Wsuggest_final_methods,
4165 "Declaring method %qD final "
4166 "would enable devirtualization of %i calls",
4167 decl, count);
4171 delete (final_warning_records);
4172 final_warning_records = 0;
4175 if (dump_file)
4176 fprintf (dump_file,
4177 "%i polymorphic calls, %i devirtualized,"
4178 " %i speculatively devirtualized, %i cold\n"
4179 "%i have multiple targets, %i overwritable,"
4180 " %i already speculated (%i agree, %i disagree),"
4181 " %i external, %i not defined, %i artificial\n",
4182 npolymorphic, ndevirtualized, nconverted, ncold,
4183 nmultiple, noverwritable, nspeculated, nok, nwrong,
4184 nexternal, nnotdefined, nartificial);
4185 return ndevirtualized ? TODO_remove_functions : 0;
4188 namespace {
4190 const pass_data pass_data_ipa_devirt =
4192 IPA_PASS, /* type */
4193 "devirt", /* name */
4194 OPTGROUP_NONE, /* optinfo_flags */
4195 TV_IPA_DEVIRT, /* tv_id */
4196 0, /* properties_required */
4197 0, /* properties_provided */
4198 0, /* properties_destroyed */
4199 0, /* todo_flags_start */
4200 ( TODO_dump_symtab ), /* todo_flags_finish */
4203 class pass_ipa_devirt : public ipa_opt_pass_d
4205 public:
4206 pass_ipa_devirt (gcc::context *ctxt)
4207 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
4208 NULL, /* generate_summary */
4209 NULL, /* write_summary */
4210 NULL, /* read_summary */
4211 NULL, /* write_optimization_summary */
4212 NULL, /* read_optimization_summary */
4213 NULL, /* stmt_fixup */
4214 0, /* function_transform_todo_flags_start */
4215 NULL, /* function_transform */
4216 NULL) /* variable_transform */
4219 /* opt_pass methods: */
4220 virtual bool gate (function *)
4222 return (flag_devirtualize
4223 && (flag_devirtualize_speculatively
4224 || (warn_suggest_final_methods
4225 || warn_suggest_final_types))
4226 && optimize);
4229 virtual unsigned int execute (function *) { return ipa_devirt (); }
4231 }; // class pass_ipa_devirt
4233 } // anon namespace
4235 ipa_opt_pass_d *
4236 make_pass_ipa_devirt (gcc::context *ctxt)
4238 return new pass_ipa_devirt (ctxt);
4241 #include "gt-ipa-devirt.h"