* ipa-devirt.c (maybe_record_node): Ignore all non-methods (including
[official-gcc.git] / gcc / ipa-devirt.c
blob3a5432e13c13fec7651c90ca1aaec6c4fdcda693
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
82 What we do here:
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
127 #include "gimple.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
133 static bool odr_violation_reported = false;
135 /* Dummy polymorphic call context. */
137 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
138 = {0, NULL, false, true};
140 /* Pointer set of all call targets appearing in the cache. */
141 static pointer_set_t *cached_polymorphic_call_targets;
143 /* The node of type inheritance graph. For each type unique in
144 One Defintion Rule (ODR) sense, we produce one node linking all
145 main variants of types equivalent to it, bases and derived types. */
147 struct GTY(()) odr_type_d
149 /* leader type. */
150 tree type;
151 /* All bases. */
152 vec<odr_type> GTY((skip)) bases;
153 /* All derrived types with virtual methods seen in unit. */
154 vec<odr_type> GTY((skip)) derived_types;
156 /* All equivalent types, if more than one. */
157 vec<tree, va_gc> *types;
158 /* Set of all equivalent types, if NON-NULL. */
159 pointer_set_t * GTY((skip)) types_set;
161 /* Unique ID indexing the type in odr_types array. */
162 int id;
163 /* Is it in anonymous namespace? */
164 bool anonymous_namespace;
168 /* Return true if BINFO corresponds to a type with virtual methods.
170 Every type has several BINFOs. One is the BINFO associated by the type
171 while other represents bases of derived types. The BINFOs representing
172 bases do not have BINFO_VTABLE pointer set when this is the single
173 inheritance (because vtables are shared). Look up the BINFO of type
174 and check presence of its vtable. */
176 static inline bool
177 polymorphic_type_binfo_p (tree binfo)
179 /* See if BINFO's type has an virtual table associtated with it. */
180 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
183 /* One Definition Rule hashtable helpers. */
185 struct odr_hasher
187 typedef odr_type_d value_type;
188 typedef union tree_node compare_type;
189 static inline hashval_t hash (const value_type *);
190 static inline bool equal (const value_type *, const compare_type *);
191 static inline void remove (value_type *);
194 /* Produce hash based on type name. */
196 hashval_t
197 hash_type_name (tree t)
199 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
201 /* If not in LTO, all main variants are unique, so we can do
202 pointer hash. */
203 if (!in_lto_p)
204 return htab_hash_pointer (t);
206 /* Anonymous types are unique. */
207 if (type_in_anonymous_namespace_p (t))
208 return htab_hash_pointer (t);
210 /* For polymorphic types, we can simply hash the virtual table. */
211 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
213 tree v = BINFO_VTABLE (TYPE_BINFO (t));
214 hashval_t hash = 0;
216 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
218 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
219 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
222 v = DECL_ASSEMBLER_NAME (v);
223 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
224 return hash;
227 /* Rest is not implemented yet. */
228 gcc_unreachable ();
231 /* Return the computed hashcode for ODR_TYPE. */
233 inline hashval_t
234 odr_hasher::hash (const value_type *odr_type)
236 return hash_type_name (odr_type->type);
239 /* Compare types T1 and T2 and return true if they are
240 equivalent. */
242 inline bool
243 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
245 tree t2 = const_cast <tree> (ct2);
247 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
248 if (t1->type == t2)
249 return true;
250 if (!in_lto_p)
251 return false;
252 return types_same_for_odr (t1->type, t2);
255 /* Free ODR type V. */
257 inline void
258 odr_hasher::remove (value_type *v)
260 v->bases.release ();
261 v->derived_types.release ();
262 if (v->types_set)
263 pointer_set_destroy (v->types_set);
264 ggc_free (v);
267 /* ODR type hash used to lookup ODR type based on tree type node. */
269 typedef hash_table <odr_hasher> odr_hash_type;
270 static odr_hash_type odr_hash;
272 /* ODR types are also stored into ODR_TYPE vector to allow consistent
273 walking. Bases appear before derived types. Vector is garbage collected
274 so we won't end up visiting empty types. */
276 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
277 #define odr_types (*odr_types_ptr)
279 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
280 from VAL->type. This may happen in LTO where tree merging did not merge
281 all variants of the same type. It may or may not mean the ODR violation.
282 Add it to the list of duplicates and warn on some violations. */
284 static void
285 add_type_duplicate (odr_type val, tree type)
287 if (!val->types_set)
288 val->types_set = pointer_set_create ();
290 /* See if this duplicate is new. */
291 if (!pointer_set_insert (val->types_set, type))
293 bool merge = true;
294 bool base_mismatch = false;
295 gcc_assert (in_lto_p);
296 vec_safe_push (val->types, type);
297 unsigned int i,j;
299 /* First we compare memory layout. */
300 if (!types_compatible_p (val->type, type))
302 merge = false;
303 odr_violation_reported = true;
304 if (BINFO_VTABLE (TYPE_BINFO (val->type))
305 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
306 "type %qD violates one definition rule ",
307 type))
308 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
309 "a type with the same name but different layout is "
310 "defined in another translation unit");
311 if (cgraph_dump_file)
313 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
315 print_node (cgraph_dump_file, "", val->type, 0);
316 putc ('\n',cgraph_dump_file);
317 print_node (cgraph_dump_file, "", type, 0);
318 putc ('\n',cgraph_dump_file);
322 /* Next sanity check that bases are the same. If not, we will end
323 up producing wrong answers. */
324 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
325 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
327 odr_type base = get_odr_type
328 (BINFO_TYPE
329 (BINFO_BASE_BINFO (TYPE_BINFO (type),
330 i)),
331 true);
332 if (val->bases.length () <= j || val->bases[j] != base)
333 base_mismatch = true;
334 j++;
336 if (base_mismatch)
338 merge = false;
339 odr_violation_reported = true;
341 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
342 "type %qD violates one definition rule ",
343 type))
344 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
345 "a type with the same name but different bases is "
346 "defined in another translation unit");
347 if (cgraph_dump_file)
349 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
351 print_node (cgraph_dump_file, "", val->type, 0);
352 putc ('\n',cgraph_dump_file);
353 print_node (cgraph_dump_file, "", type, 0);
354 putc ('\n',cgraph_dump_file);
358 /* Regularize things a little. During LTO same types may come with
359 different BINFOs. Either because their virtual table was
360 not merged by tree merging and only later at decl merging or
361 because one type comes with external vtable, while other
362 with internal. We want to merge equivalent binfos to conserve
363 memory and streaming overhead.
365 The external vtables are more harmful: they contain references
366 to external declarations of methods that may be defined in the
367 merged LTO unit. For this reason we absolutely need to remove
368 them and replace by internal variants. Not doing so will lead
369 to incomplete answers from possible_polymorphic_call_targets. */
370 if (!flag_ltrans && merge)
372 tree master_binfo = TYPE_BINFO (val->type);
373 tree v1 = BINFO_VTABLE (master_binfo);
374 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
376 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
378 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
379 && operand_equal_p (TREE_OPERAND (v1, 1),
380 TREE_OPERAND (v2, 1), 0));
381 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
382 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
384 gcc_assert (DECL_ASSEMBLER_NAME (v1)
385 == DECL_ASSEMBLER_NAME (v2));
387 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
389 unsigned int i;
391 TYPE_BINFO (val->type) = TYPE_BINFO (type);
392 for (i = 0; i < val->types->length (); i++)
394 if (TYPE_BINFO ((*val->types)[i])
395 == master_binfo)
396 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
399 else
400 TYPE_BINFO (type) = master_binfo;
405 /* Get ODR type hash entry for TYPE. If INSERT is true, create
406 possibly new entry. */
408 odr_type
409 get_odr_type (tree type, bool insert)
411 odr_type_d **slot;
412 odr_type val;
413 hashval_t hash;
415 type = TYPE_MAIN_VARIANT (type);
416 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
417 hash = hash_type_name (type);
418 slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
419 if (!slot)
420 return NULL;
422 /* See if we already have entry for type. */
423 if (*slot)
425 val = *slot;
427 /* With LTO we need to support multiple tree representation of
428 the same ODR type. */
429 if (val->type != type)
430 add_type_duplicate (val, type);
432 else
434 tree binfo = TYPE_BINFO (type);
435 unsigned int i;
437 val = ggc_alloc_cleared_odr_type_d ();
438 val->type = type;
439 val->bases = vNULL;
440 val->derived_types = vNULL;
441 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
442 *slot = val;
443 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
444 /* For now record only polymorphic types. other are
445 pointless for devirtualization and we can not precisely
446 determine ODR equivalency of these during LTO. */
447 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
449 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
450 i)),
451 true);
452 base->derived_types.safe_push (val);
453 val->bases.safe_push (base);
455 /* First record bases, then add into array so ids are increasing. */
456 if (odr_types_ptr)
457 val->id = odr_types.length ();
458 vec_safe_push (odr_types_ptr, val);
460 return val;
463 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
464 recusive printing. */
466 static void
467 dump_odr_type (FILE *f, odr_type t, int indent=0)
469 unsigned int i;
470 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
471 print_generic_expr (f, t->type, TDF_SLIM);
472 fprintf (f, "%s\n", t->anonymous_namespace ? " (anonymous namespace)":"");
473 if (TYPE_NAME (t->type))
475 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
476 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
477 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
479 if (t->bases.length ())
481 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
482 for (i = 0; i < t->bases.length (); i++)
483 fprintf (f, " %i", t->bases[i]->id);
484 fprintf (f, "\n");
486 if (t->derived_types.length ())
488 fprintf (f, "%*s derived types:\n", indent * 2, "");
489 for (i = 0; i < t->derived_types.length (); i++)
490 dump_odr_type (f, t->derived_types[i], indent + 1);
492 fprintf (f, "\n");
495 /* Dump the type inheritance graph. */
497 static void
498 dump_type_inheritance_graph (FILE *f)
500 unsigned int i;
501 if (!odr_types_ptr)
502 return;
503 fprintf (f, "\n\nType inheritance graph:\n");
504 for (i = 0; i < odr_types.length (); i++)
506 if (odr_types[i]->bases.length () == 0)
507 dump_odr_type (f, odr_types[i]);
509 for (i = 0; i < odr_types.length (); i++)
511 if (odr_types[i]->types && odr_types[i]->types->length ())
513 unsigned int j;
514 fprintf (f, "Duplicate tree types for odr type %i\n", i);
515 print_node (f, "", odr_types[i]->type, 0);
516 for (j = 0; j < odr_types[i]->types->length (); j++)
518 tree t;
519 fprintf (f, "duplicate #%i\n", j);
520 print_node (f, "", (*odr_types[i]->types)[j], 0);
521 t = (*odr_types[i]->types)[j];
522 while (TYPE_P (t) && TYPE_CONTEXT (t))
524 t = TYPE_CONTEXT (t);
525 print_node (f, "", t, 0);
527 putc ('\n',f);
533 /* Given method type T, return type of class it belongs to.
534 Lookup this pointer and get its type. */
536 tree
537 method_class_type (tree t)
539 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
540 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
542 return TREE_TYPE (first_parm_type);
545 /* Initialize IPA devirt and build inheritance tree graph. */
547 void
548 build_type_inheritance_graph (void)
550 struct symtab_node *n;
551 FILE *inheritance_dump_file;
552 int flags;
554 if (odr_hash.is_created ())
555 return;
556 timevar_push (TV_IPA_INHERITANCE);
557 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
558 odr_hash.create (23);
560 /* We reconstruct the graph starting of types of all methods seen in the
561 the unit. */
562 FOR_EACH_SYMBOL (n)
563 if (is_a <cgraph_node> (n)
564 && DECL_VIRTUAL_P (n->decl)
565 && symtab_real_symbol_p (n))
566 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
568 /* Look also for virtual tables of types that do not define any methods.
570 We need it in a case where class B has virtual base of class A
571 re-defining its virtual method and there is class C with no virtual
572 methods with B as virtual base.
574 Here we output B's virtual method in two variant - for non-virtual
575 and virtual inheritance. B's virtual table has non-virtual version,
576 while C's has virtual.
578 For this reason we need to know about C in order to include both
579 variants of B. More correctly, record_target_from_binfo should
580 add both variants of the method when walking B, but we have no
581 link in between them.
583 We rely on fact that either the method is exported and thus we
584 assume it is called externally or C is in anonymous namespace and
585 thus we will see the vtable. */
587 else if (is_a <varpool_node> (n)
588 && DECL_VIRTUAL_P (n->decl)
589 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
590 && TYPE_BINFO (DECL_CONTEXT (n->decl))
591 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
592 get_odr_type (DECL_CONTEXT (n->decl), true);
593 if (inheritance_dump_file)
595 dump_type_inheritance_graph (inheritance_dump_file);
596 dump_end (TDI_inheritance, inheritance_dump_file);
598 timevar_pop (TV_IPA_INHERITANCE);
601 /* If TARGET has associated node, record it in the NODES array.
602 CAN_REFER specify if program can refer to the target directly.
603 if TARGET is unknown (NULL) or it can not be inserted (for example because
604 its body was already removed and there is no way to refer to it), clear
605 COMPLETEP. */
607 static void
608 maybe_record_node (vec <cgraph_node *> &nodes,
609 tree target, pointer_set_t *inserted,
610 bool can_refer,
611 bool *completep)
613 struct cgraph_node *target_node;
615 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
616 list of targets; the runtime effect of calling them is undefined.
617 Only "real" virtual methods should be accounted. */
618 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
619 return;
621 if (!can_refer)
623 /* The only case when method of anonymous namespace becomes unreferable
624 is when we completely optimized it out. */
625 if (flag_ltrans
626 || !target
627 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
628 *completep = false;
629 return;
632 if (!target)
633 return;
635 target_node = cgraph_get_node (target);
637 if (target_node != NULL
638 && ((TREE_PUBLIC (target)
639 || DECL_EXTERNAL (target))
640 || target_node->definition)
641 && symtab_real_symbol_p (target_node))
643 gcc_assert (!target_node->global.inlined_to);
644 gcc_assert (symtab_real_symbol_p (target_node));
645 if (!pointer_set_insert (inserted, target))
647 pointer_set_insert (cached_polymorphic_call_targets,
648 target_node);
649 nodes.safe_push (target_node);
652 else if (completep
653 && !type_in_anonymous_namespace_p
654 (method_class_type (TREE_TYPE (target))))
655 *completep = false;
658 /* See if BINFO's type match OUTER_TYPE. If so, lookup
659 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
660 method in vtable and insert method to NODES array.
661 Otherwise recurse to base BINFOs.
662 This match what get_binfo_at_offset does, but with offset
663 being unknown.
665 TYPE_BINFOS is a stack of BINFOS of types with defined
666 virtual table seen on way from class type to BINFO.
668 MATCHED_VTABLES tracks virtual tables we already did lookup
669 for virtual function in. INSERTED tracks nodes we already
670 inserted.
672 ANONYMOUS is true if BINFO is part of anonymous namespace.
674 Clear COMPLETEP when we hit unreferable target.
677 static void
678 record_target_from_binfo (vec <cgraph_node *> &nodes,
679 tree binfo,
680 tree otr_type,
681 vec <tree> &type_binfos,
682 HOST_WIDE_INT otr_token,
683 tree outer_type,
684 HOST_WIDE_INT offset,
685 pointer_set_t *inserted,
686 pointer_set_t *matched_vtables,
687 bool anonymous,
688 bool *completep)
690 tree type = BINFO_TYPE (binfo);
691 int i;
692 tree base_binfo;
695 if (BINFO_VTABLE (binfo))
696 type_binfos.safe_push (binfo);
697 if (types_same_for_odr (type, outer_type))
699 int i;
700 tree type_binfo = NULL;
702 /* Lookup BINFO with virtual table. For normal types it is always last
703 binfo on stack. */
704 for (i = type_binfos.length () - 1; i >= 0; i--)
705 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
707 type_binfo = type_binfos[i];
708 break;
710 if (BINFO_VTABLE (binfo))
711 type_binfos.pop ();
712 /* If this is duplicated BINFO for base shared by virtual inheritance,
713 we may not have its associated vtable. This is not a problem, since
714 we will walk it on the other path. */
715 if (!type_binfo)
716 return;
717 tree inner_binfo = get_binfo_at_offset (type_binfo,
718 offset, otr_type);
719 if (!inner_binfo)
721 gcc_assert (odr_violation_reported);
722 return;
724 /* For types in anonymous namespace first check if the respective vtable
725 is alive. If not, we know the type can't be called. */
726 if (!flag_ltrans && anonymous)
728 tree vtable = BINFO_VTABLE (inner_binfo);
729 varpool_node *vnode;
731 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
732 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
733 vnode = varpool_get_node (vtable);
734 if (!vnode || !vnode->definition)
735 return;
737 gcc_assert (inner_binfo);
738 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
740 bool can_refer;
741 tree target = gimple_get_virt_method_for_binfo (otr_token,
742 inner_binfo,
743 &can_refer);
744 maybe_record_node (nodes, target, inserted, can_refer, completep);
746 return;
749 /* Walk bases. */
750 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
751 /* Walking bases that have no virtual method is pointless excercise. */
752 if (polymorphic_type_binfo_p (base_binfo))
753 record_target_from_binfo (nodes, base_binfo, otr_type,
754 type_binfos,
755 otr_token, outer_type, offset, inserted,
756 matched_vtables, anonymous, completep);
757 if (BINFO_VTABLE (binfo))
758 type_binfos.pop ();
761 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
762 of TYPE, insert them to NODES, recurse into derived nodes.
763 INSERTED is used to avoid duplicate insertions of methods into NODES.
764 MATCHED_VTABLES are used to avoid duplicate walking vtables.
765 Clear COMPLETEP if unreferable target is found. */
767 static void
768 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
769 pointer_set_t *inserted,
770 pointer_set_t *matched_vtables,
771 tree otr_type,
772 odr_type type,
773 HOST_WIDE_INT otr_token,
774 tree outer_type,
775 HOST_WIDE_INT offset,
776 bool *completep)
778 tree binfo = TYPE_BINFO (type->type);
779 unsigned int i;
780 vec <tree> type_binfos = vNULL;
782 record_target_from_binfo (nodes, binfo, otr_type, type_binfos, otr_token,
783 outer_type, offset,
784 inserted, matched_vtables,
785 type->anonymous_namespace, completep);
786 type_binfos.release ();
787 for (i = 0; i < type->derived_types.length (); i++)
788 possible_polymorphic_call_targets_1 (nodes, inserted,
789 matched_vtables,
790 otr_type,
791 type->derived_types[i],
792 otr_token, outer_type, offset, completep);
795 /* Cache of queries for polymorphic call targets.
797 Enumerating all call targets may get expensive when there are many
798 polymorphic calls in the program, so we memoize all the previous
799 queries and avoid duplicated work. */
801 struct polymorphic_call_target_d
803 HOST_WIDE_INT otr_token;
804 ipa_polymorphic_call_context context;
805 odr_type type;
806 vec <cgraph_node *> targets;
807 int nonconstruction_targets;
808 bool complete;
811 /* Polymorphic call target cache helpers. */
813 struct polymorphic_call_target_hasher
815 typedef polymorphic_call_target_d value_type;
816 typedef polymorphic_call_target_d compare_type;
817 static inline hashval_t hash (const value_type *);
818 static inline bool equal (const value_type *, const compare_type *);
819 static inline void remove (value_type *);
822 /* Return the computed hashcode for ODR_QUERY. */
824 inline hashval_t
825 polymorphic_call_target_hasher::hash (const value_type *odr_query)
827 hashval_t hash;
829 hash = iterative_hash_host_wide_int
830 (odr_query->otr_token,
831 odr_query->type->id);
832 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
833 hash);
834 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
835 return iterative_hash_hashval_t
836 (((int)odr_query->context.maybe_in_construction << 1)
837 | (int)odr_query->context.maybe_derived_type, hash);
840 /* Compare cache entries T1 and T2. */
842 inline bool
843 polymorphic_call_target_hasher::equal (const value_type *t1,
844 const compare_type *t2)
846 return (t1->type == t2->type && t1->otr_token == t2->otr_token
847 && t1->context.offset == t2->context.offset
848 && t1->context.outer_type == t2->context.outer_type
849 && t1->context.maybe_in_construction
850 == t2->context.maybe_in_construction
851 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
854 /* Remove entry in polymorphic call target cache hash. */
856 inline void
857 polymorphic_call_target_hasher::remove (value_type *v)
859 v->targets.release ();
860 free (v);
863 /* Polymorphic call target query cache. */
865 typedef hash_table <polymorphic_call_target_hasher>
866 polymorphic_call_target_hash_type;
867 static polymorphic_call_target_hash_type polymorphic_call_target_hash;
869 /* Destroy polymorphic call target query cache. */
871 static void
872 free_polymorphic_call_targets_hash ()
874 if (cached_polymorphic_call_targets)
876 polymorphic_call_target_hash.dispose ();
877 pointer_set_destroy (cached_polymorphic_call_targets);
878 cached_polymorphic_call_targets = NULL;
882 /* When virtual function is removed, we may need to flush the cache. */
884 static void
885 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
887 if (cached_polymorphic_call_targets
888 && pointer_set_contains (cached_polymorphic_call_targets, n))
889 free_polymorphic_call_targets_hash ();
892 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
893 is contained at CONTEXT->OFFSET. Walk the memory representation of
894 CONTEXT->OUTER_TYPE and find the outermost class type that match
895 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
896 to represent it.
898 For example when CONTEXT represents type
899 class A
901 int a;
902 class B b;
904 and we look for type at offset sizeof(int), we end up with B and offset 0.
905 If the same is produced by multiple inheritance, we end up with A and offset
906 sizeof(int).
908 If we can not find corresponding class, give up by setting
909 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
910 Return true when lookup was sucesful. */
912 static bool
913 get_class_context (ipa_polymorphic_call_context *context,
914 tree expected_type)
916 tree type = context->outer_type;
917 HOST_WIDE_INT offset = context->offset;
919 /* Find the sub-object the constant actually refers to and mark whether it is
920 an artificial one (as opposed to a user-defined one). */
921 while (true)
923 HOST_WIDE_INT pos, size;
924 tree fld;
926 /* On a match, just return what we found. */
927 if (TREE_CODE (type) == TREE_CODE (expected_type)
928 && types_same_for_odr (type, expected_type))
930 /* Type can not contain itself on an non-zero offset. In that case
931 just give up. */
932 if (offset != 0)
933 goto give_up;
934 gcc_assert (offset == 0);
935 return true;
938 /* Walk fields and find corresponding on at OFFSET. */
939 if (TREE_CODE (type) == RECORD_TYPE)
941 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
943 if (TREE_CODE (fld) != FIELD_DECL)
944 continue;
946 pos = int_bit_position (fld);
947 size = tree_to_uhwi (DECL_SIZE (fld));
948 if (pos <= offset && (pos + size) > offset)
949 break;
952 if (!fld)
953 goto give_up;
955 type = TREE_TYPE (fld);
956 offset -= pos;
957 /* DECL_ARTIFICIAL represents a basetype. */
958 if (!DECL_ARTIFICIAL (fld))
960 context->outer_type = type;
961 context->offset = offset;
962 /* As soon as we se an field containing the type,
963 we know we are not looking for derivations. */
964 context->maybe_derived_type = false;
967 else if (TREE_CODE (type) == ARRAY_TYPE)
969 tree subtype = TREE_TYPE (type);
971 /* Give up if we don't know array size. */
972 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
973 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
974 goto give_up;
975 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
976 type = subtype;
977 context->outer_type = type;
978 context->offset = offset;
979 context->maybe_derived_type = false;
981 /* Give up on anything else. */
982 else
983 goto give_up;
986 /* If we failed to find subtype we look for, give up and fall back to the
987 most generic query. */
988 give_up:
989 context->outer_type = expected_type;
990 context->offset = 0;
991 context->maybe_derived_type = true;
992 return false;
995 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
997 static bool
998 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
999 tree otr_type)
1001 ipa_polymorphic_call_context context = {offset, outer_type,
1002 false, true};
1003 return get_class_context (&context, otr_type);
1006 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1008 static tree
1009 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1010 tree vtable)
1012 tree v = BINFO_VTABLE (binfo);
1013 int i;
1014 tree base_binfo;
1015 unsigned HOST_WIDE_INT this_offset;
1017 if (v)
1019 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1020 gcc_unreachable ();
1022 if (offset == this_offset
1023 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1024 return binfo;
1027 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1028 if (polymorphic_type_binfo_p (base_binfo))
1030 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1031 if (base_binfo)
1032 return base_binfo;
1034 return NULL;
1037 /* T is known constant value of virtual table pointer.
1038 Store virtual table to V and its offset to OFFSET.
1039 Return false if T does not look like virtual table reference. */
1041 bool
1042 vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset)
1044 /* We expect &MEM[(void *)&virtual_table + 16B].
1045 We obtain object's BINFO from the context of the virtual table.
1046 This one contains pointer to virtual table represented via
1047 POINTER_PLUS_EXPR. Verify that this pointer match to what
1048 we propagated through.
1050 In the case of virtual inheritance, the virtual tables may
1051 be nested, i.e. the offset may be different from 16 and we may
1052 need to dive into the type representation. */
1053 if (TREE_CODE (t) == ADDR_EXPR
1054 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1055 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1056 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1057 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1058 == VAR_DECL)
1059 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1060 (TREE_OPERAND (t, 0), 0), 0)))
1062 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1063 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1064 return true;
1067 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1068 We need to handle it when T comes from static variable initializer or
1069 BINFO. */
1070 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1072 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1073 t = TREE_OPERAND (t, 0);
1075 else
1076 *offset = 0;
1078 if (TREE_CODE (t) != ADDR_EXPR)
1079 return false;
1080 *v = TREE_OPERAND (t, 0);
1081 return true;
1084 /* T is known constant value of virtual table pointer. Return BINFO of the
1085 instance type. */
1087 tree
1088 vtable_pointer_value_to_binfo (tree t)
1090 tree vtable;
1091 unsigned HOST_WIDE_INT offset;
1093 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1094 return NULL_TREE;
1096 /* FIXME: for stores of construction vtables we return NULL,
1097 because we do not have BINFO for those. Eventually we should fix
1098 our representation to allow this case to be handled, too.
1099 In the case we see store of BINFO we however may assume
1100 that standard folding will be ale to cope with it. */
1101 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1102 offset, vtable);
1105 /* Proudce polymorphic call context for call method of instance
1106 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1108 static void
1109 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1110 tree base, HOST_WIDE_INT offset)
1112 gcc_assert (DECL_P (base));
1114 context->outer_type = TREE_TYPE (base);
1115 context->offset = offset;
1116 /* Make very conservative assumption that all objects
1117 may be in construction.
1118 TODO: ipa-prop already contains code to tell better.
1119 merge it later. */
1120 context->maybe_in_construction = true;
1121 context->maybe_derived_type = false;
1124 /* CST is an invariant (address of decl), try to get meaningful
1125 polymorphic call context for polymorphic call of method
1126 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1127 Return FALSE if nothing meaningful can be found. */
1129 bool
1130 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1131 tree cst,
1132 tree otr_type,
1133 HOST_WIDE_INT offset)
1135 HOST_WIDE_INT offset2, size, max_size;
1136 tree base;
1138 if (TREE_CODE (cst) != ADDR_EXPR)
1139 return false;
1141 cst = TREE_OPERAND (cst, 0);
1142 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1143 if (!DECL_P (base) || max_size == -1 || max_size != size)
1144 return false;
1146 /* Only type inconsistent programs can have otr_type that is
1147 not part of outer type. */
1148 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1149 return false;
1151 get_polymorphic_call_info_for_decl (context, base, offset);
1152 return true;
1155 /* Given REF call in FNDECL, determine class of the polymorphic
1156 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1157 Return pointer to object described by the context */
1159 tree
1160 get_polymorphic_call_info (tree fndecl,
1161 tree ref,
1162 tree *otr_type,
1163 HOST_WIDE_INT *otr_token,
1164 ipa_polymorphic_call_context *context)
1166 tree base_pointer;
1167 *otr_type = obj_type_ref_class (ref);
1168 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1170 /* Set up basic info in case we find nothing interesting in the analysis. */
1171 context->outer_type = *otr_type;
1172 context->offset = 0;
1173 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1174 context->maybe_derived_type = true;
1175 context->maybe_in_construction = false;
1177 /* Walk SSA for outer object. */
1180 if (TREE_CODE (base_pointer) == SSA_NAME
1181 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1182 && SSA_NAME_DEF_STMT (base_pointer)
1183 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1185 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1186 STRIP_NOPS (base_pointer);
1188 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1190 HOST_WIDE_INT size, max_size;
1191 HOST_WIDE_INT offset2;
1192 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1193 &offset2, &size, &max_size);
1195 /* If this is a varying address, punt. */
1196 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1197 && max_size != -1
1198 && max_size == size)
1200 /* We found dereference of a pointer. Type of the pointer
1201 and MEM_REF is meaningless, but we can look futher. */
1202 if (TREE_CODE (base) == MEM_REF)
1204 base_pointer = TREE_OPERAND (base, 0);
1205 context->offset
1206 += offset2 + mem_ref_offset (base).low * BITS_PER_UNIT;
1207 context->outer_type = NULL;
1209 /* We found base object. In this case the outer_type
1210 is known. */
1211 else if (DECL_P (base))
1213 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
1215 /* Only type inconsistent programs can have otr_type that is
1216 not part of outer type. */
1217 if (!contains_type_p (TREE_TYPE (base),
1218 context->offset + offset2, *otr_type))
1220 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1221 code sequences; we arrange the calls to be builtin_unreachable
1222 later. */
1223 *otr_token = INT_MAX;
1224 return base_pointer;
1226 get_polymorphic_call_info_for_decl (context, base,
1227 context->offset + offset2);
1228 return NULL;
1230 else
1231 break;
1233 else
1234 break;
1236 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1237 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1239 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1240 * BITS_PER_UNIT;
1241 base_pointer = TREE_OPERAND (base_pointer, 0);
1243 else
1244 break;
1246 while (true);
1248 /* Try to determine type of the outer object. */
1249 if (TREE_CODE (base_pointer) == SSA_NAME
1250 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1251 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1253 /* See if parameter is THIS pointer of a method. */
1254 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1255 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1257 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1258 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1260 /* Dynamic casting has possibly upcasted the type
1261 in the hiearchy. In this case outer type is less
1262 informative than inner type and we should forget
1263 about it. */
1264 if (!contains_type_p (context->outer_type, context->offset,
1265 *otr_type))
1267 context->outer_type = NULL;
1268 return base_pointer;
1271 /* If the function is constructor or destructor, then
1272 the type is possibly in construction, but we know
1273 it is not derived type. */
1274 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1275 || DECL_CXX_DESTRUCTOR_P (fndecl))
1277 context->maybe_in_construction = true;
1278 context->maybe_derived_type = false;
1280 else
1282 context->maybe_derived_type = true;
1283 context->maybe_in_construction = false;
1285 return base_pointer;
1287 /* Non-PODs passed by value are really passed by invisible
1288 reference. In this case we also know the type of the
1289 object. */
1290 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1292 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1293 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1294 /* Only type inconsistent programs can have otr_type that is
1295 not part of outer type. */
1296 if (!contains_type_p (context->outer_type, context->offset,
1297 *otr_type))
1299 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1300 code sequences; we arrange the calls to be builtin_unreachable
1301 later. */
1302 *otr_token = INT_MAX;
1303 return base_pointer;
1305 context->maybe_derived_type = false;
1306 context->maybe_in_construction = false;
1307 return base_pointer;
1310 /* TODO: There are multiple ways to derive a type. For instance
1311 if BASE_POINTER is passed to an constructor call prior our refernece.
1312 We do not make this type of flow sensitive analysis yet. */
1313 return base_pointer;
1316 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1317 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1318 and insert them to NODES.
1320 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1322 static void
1323 record_targets_from_bases (tree otr_type,
1324 HOST_WIDE_INT otr_token,
1325 tree outer_type,
1326 HOST_WIDE_INT offset,
1327 vec <cgraph_node *> &nodes,
1328 pointer_set_t *inserted,
1329 pointer_set_t *matched_vtables,
1330 bool *completep)
1332 while (true)
1334 HOST_WIDE_INT pos, size;
1335 tree base_binfo;
1336 tree fld;
1338 if (types_same_for_odr (outer_type, otr_type))
1339 return;
1341 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1343 if (TREE_CODE (fld) != FIELD_DECL)
1344 continue;
1346 pos = int_bit_position (fld);
1347 size = tree_to_shwi (DECL_SIZE (fld));
1348 if (pos <= offset && (pos + size) > offset
1349 /* Do not get confused by zero sized bases. */
1350 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
1351 break;
1353 /* Within a class type we should always find correcponding fields. */
1354 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1356 /* Nonbasetypes should have been stripped by outer_class_type. */
1357 gcc_assert (DECL_ARTIFICIAL (fld));
1359 outer_type = TREE_TYPE (fld);
1360 offset -= pos;
1362 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1363 offset, otr_type);
1364 if (!base_binfo)
1366 gcc_assert (odr_violation_reported);
1367 return;
1369 gcc_assert (base_binfo);
1370 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1372 bool can_refer;
1373 tree target = gimple_get_virt_method_for_binfo (otr_token,
1374 base_binfo,
1375 &can_refer);
1376 maybe_record_node (nodes, target, inserted, can_refer, completep);
1377 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1382 /* When virtual table is removed, we may need to flush the cache. */
1384 static void
1385 devirt_variable_node_removal_hook (varpool_node *n,
1386 void *d ATTRIBUTE_UNUSED)
1388 if (cached_polymorphic_call_targets
1389 && DECL_VIRTUAL_P (n->decl)
1390 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
1391 free_polymorphic_call_targets_hash ();
1394 /* Return vector containing possible targets of polymorphic call of type
1395 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1396 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1397 OTR_TYPE and include their virtual method. This is useful for types
1398 possibly in construction or destruction where the virtual table may
1399 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1400 us to walk the inheritance graph for all derivations.
1402 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1403 undefined and should be redirected to unreachable.
1405 If COMPLETEP is non-NULL, store true if the list is complete.
1406 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1407 in the target cache. If user needs to visit every target list
1408 just once, it can memoize them.
1410 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1411 the type is not in the construction. Those targets appear first in the
1412 vector returned.
1414 Returned vector is placed into cache. It is NOT caller's responsibility
1415 to free it. The vector can be freed on cgraph_remove_node call if
1416 the particular node is a virtual function present in the cache. */
1418 vec <cgraph_node *>
1419 possible_polymorphic_call_targets (tree otr_type,
1420 HOST_WIDE_INT otr_token,
1421 ipa_polymorphic_call_context context,
1422 bool *completep,
1423 void **cache_token,
1424 int *nonconstruction_targetsp)
1426 static struct cgraph_node_hook_list *node_removal_hook_holder;
1427 pointer_set_t *inserted;
1428 pointer_set_t *matched_vtables;
1429 vec <cgraph_node *> nodes = vNULL;
1430 odr_type type, outer_type;
1431 polymorphic_call_target_d key;
1432 polymorphic_call_target_d **slot;
1433 unsigned int i;
1434 tree binfo, target;
1435 bool complete;
1436 bool can_refer;
1438 /* If ODR is not initialized, return empty incomplete list. */
1439 if (!odr_hash.is_created ())
1441 if (completep)
1442 *completep = false;
1443 if (nonconstruction_targetsp)
1444 *nonconstruction_targetsp = 0;
1445 return nodes;
1448 /* If we hit type inconsistency, just return empty list of targets. */
1449 if (otr_token == INT_MAX)
1451 if (completep)
1452 *completep = true;
1453 if (nonconstruction_targetsp)
1454 *nonconstruction_targetsp = 0;
1455 return nodes;
1458 type = get_odr_type (otr_type, true);
1460 /* Lookup the outer class type we want to walk. */
1461 if (context.outer_type
1462 && !get_class_context (&context, otr_type))
1464 if (completep)
1465 *completep = false;
1466 if (nonconstruction_targetsp)
1467 *nonconstruction_targetsp = 0;
1468 return nodes;
1471 /* We canonicalize our query, so we do not need extra hashtable entries. */
1473 /* Without outer type, we have no use for offset. Just do the
1474 basic search from innter type */
1475 if (!context.outer_type)
1477 context.outer_type = otr_type;
1478 context.offset = 0;
1480 /* We need to update our hiearchy if the type does not exist. */
1481 outer_type = get_odr_type (context.outer_type, true);
1482 /* If outer and inner type match, there are no bases to see. */
1483 if (type == outer_type)
1484 context.maybe_in_construction = false;
1485 /* If the type is complete, there are no derivations. */
1486 if (TYPE_FINAL_P (outer_type->type))
1487 context.maybe_derived_type = false;
1489 /* Initialize query cache. */
1490 if (!cached_polymorphic_call_targets)
1492 cached_polymorphic_call_targets = pointer_set_create ();
1493 polymorphic_call_target_hash.create (23);
1494 if (!node_removal_hook_holder)
1496 node_removal_hook_holder =
1497 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1498 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1499 NULL);
1503 /* Lookup cached answer. */
1504 key.type = type;
1505 key.otr_token = otr_token;
1506 key.context = context;
1507 slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
1508 if (cache_token)
1509 *cache_token = (void *)*slot;
1510 if (*slot)
1512 if (completep)
1513 *completep = (*slot)->complete;
1514 if (nonconstruction_targetsp)
1515 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1516 return (*slot)->targets;
1519 complete = true;
1521 /* Do actual search. */
1522 timevar_push (TV_IPA_VIRTUAL_CALL);
1523 *slot = XCNEW (polymorphic_call_target_d);
1524 if (cache_token)
1525 *cache_token = (void *)*slot;
1526 (*slot)->type = type;
1527 (*slot)->otr_token = otr_token;
1528 (*slot)->context = context;
1530 inserted = pointer_set_create ();
1531 matched_vtables = pointer_set_create ();
1533 /* First see virtual method of type itself. */
1534 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1535 context.offset, otr_type);
1536 if (binfo)
1537 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1538 &can_refer);
1539 else
1541 gcc_assert (odr_violation_reported);
1542 target = NULL;
1545 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1547 if (target)
1549 /* In the case we get complete method, we don't need
1550 to walk derivations. */
1551 if (DECL_FINAL_P (target))
1552 context.maybe_derived_type = false;
1554 else
1555 gcc_assert (!complete);
1557 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1559 /* Next walk recursively all derived types. */
1560 if (context.maybe_derived_type)
1562 /* For anonymous namespace types we can attempt to build full type.
1563 All derivations must be in this unit (unless we see partial unit). */
1564 if (!type->anonymous_namespace || flag_ltrans)
1565 complete = false;
1566 for (i = 0; i < outer_type->derived_types.length(); i++)
1567 possible_polymorphic_call_targets_1 (nodes, inserted,
1568 matched_vtables,
1569 otr_type,
1570 outer_type->derived_types[i],
1571 otr_token, outer_type->type,
1572 context.offset, &complete);
1575 /* Finally walk bases, if asked to. */
1576 (*slot)->nonconstruction_targets = nodes.length();
1577 if (context.maybe_in_construction)
1578 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1579 context.offset, nodes, inserted,
1580 matched_vtables, &complete);
1582 (*slot)->targets = nodes;
1583 (*slot)->complete = complete;
1584 if (completep)
1585 *completep = complete;
1586 if (nonconstruction_targetsp)
1587 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1589 pointer_set_destroy (inserted);
1590 pointer_set_destroy (matched_vtables);
1591 timevar_pop (TV_IPA_VIRTUAL_CALL);
1592 return nodes;
1595 /* Dump all possible targets of a polymorphic call. */
1597 void
1598 dump_possible_polymorphic_call_targets (FILE *f,
1599 tree otr_type,
1600 HOST_WIDE_INT otr_token,
1601 const ipa_polymorphic_call_context &ctx)
1603 vec <cgraph_node *> targets;
1604 bool final;
1605 odr_type type = get_odr_type (otr_type, false);
1606 unsigned int i;
1607 int nonconstruction;
1609 if (!type)
1610 return;
1611 targets = possible_polymorphic_call_targets (otr_type, otr_token,
1612 ctx,
1613 &final, NULL, &nonconstruction);
1614 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
1615 print_generic_expr (f, type->type, TDF_SLIM);
1616 fprintf (f, " token %i\n", (int)otr_token);
1617 if (ctx.outer_type || ctx.offset)
1619 fprintf (f, " Contained in type:");
1620 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1621 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
1622 ctx.offset);
1625 fprintf (f, " %s%s%s\n ",
1626 final ? "This is a complete list." :
1627 "This is partial list; extra targets may be defined in other units.",
1628 ctx.maybe_in_construction ? " (base types included)" : "",
1629 ctx.maybe_derived_type ? " (derived types included)" : "");
1630 for (i = 0; i < targets.length (); i++)
1632 char *name = NULL;
1633 if (i == (unsigned)nonconstruction)
1634 fprintf (f, "\n If the type is in construction,"
1635 " then additional tarets are:\n"
1636 " ");
1637 if (in_lto_p)
1638 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
1639 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
1640 if (in_lto_p)
1641 free (name);
1642 if (!targets[i]->definition)
1643 fprintf (f, " (no definition%s)",
1644 DECL_DECLARED_INLINE_P (targets[i]->decl)
1645 ? " inline" : "");
1647 fprintf (f, "\n\n");
1651 /* Return true if N can be possibly target of a polymorphic call of
1652 OTR_TYPE/OTR_TOKEN. */
1654 bool
1655 possible_polymorphic_call_target_p (tree otr_type,
1656 HOST_WIDE_INT otr_token,
1657 const ipa_polymorphic_call_context &ctx,
1658 struct cgraph_node *n)
1660 vec <cgraph_node *> targets;
1661 unsigned int i;
1662 enum built_in_function fcode;
1663 bool final;
1665 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1666 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1667 == BUILT_IN_UNREACHABLE
1668 || fcode == BUILT_IN_TRAP))
1669 return true;
1671 if (!odr_hash.is_created ())
1672 return true;
1673 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
1674 for (i = 0; i < targets.length (); i++)
1675 if (symtab_semantically_equivalent_p (n, targets[i]))
1676 return true;
1678 /* At a moment we allow middle end to dig out new external declarations
1679 as a targets of polymorphic calls. */
1680 if (!final && !n->definition)
1681 return true;
1682 return false;
1686 /* After callgraph construction new external nodes may appear.
1687 Add them into the graph. */
1689 void
1690 update_type_inheritance_graph (void)
1692 struct cgraph_node *n;
1694 if (!odr_hash.is_created ())
1695 return;
1696 free_polymorphic_call_targets_hash ();
1697 timevar_push (TV_IPA_INHERITANCE);
1698 /* We reconstruct the graph starting from types of all methods seen in the
1699 the unit. */
1700 FOR_EACH_FUNCTION (n)
1701 if (DECL_VIRTUAL_P (n->decl)
1702 && !n->definition
1703 && symtab_real_symbol_p (n))
1704 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
1705 timevar_pop (TV_IPA_INHERITANCE);
1709 /* Return true if N looks like likely target of a polymorphic call.
1710 Rule out cxa_pure_virtual, noreturns, function declared cold and
1711 other obvious cases. */
1713 bool
1714 likely_target_p (struct cgraph_node *n)
1716 int flags;
1717 /* cxa_pure_virtual and similar things are not likely. */
1718 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
1719 return false;
1720 flags = flags_from_decl_or_type (n->decl);
1721 if (flags & ECF_NORETURN)
1722 return false;
1723 if (lookup_attribute ("cold",
1724 DECL_ATTRIBUTES (n->decl)))
1725 return false;
1726 if (n->frequency < NODE_FREQUENCY_NORMAL)
1727 return false;
1728 return true;
1731 /* The ipa-devirt pass.
1732 When polymorphic call has only one likely target in the unit,
1733 turn it into speculative call. */
1735 static unsigned int
1736 ipa_devirt (void)
1738 struct cgraph_node *n;
1739 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1740 struct cgraph_edge *e;
1742 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1743 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
1744 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
1746 FOR_EACH_DEFINED_FUNCTION (n)
1748 bool update = false;
1749 if (dump_file && n->indirect_calls)
1750 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
1751 n->name (), n->order);
1752 for (e = n->indirect_calls; e; e = e->next_callee)
1753 if (e->indirect_info->polymorphic)
1755 struct cgraph_node *likely_target = NULL;
1756 void *cache_token;
1757 bool final;
1758 int nonconstruction_targets;
1759 vec <cgraph_node *>targets
1760 = possible_polymorphic_call_targets
1761 (e, &final, &cache_token, &nonconstruction_targets);
1762 unsigned int i;
1764 if (dump_file)
1765 dump_possible_polymorphic_call_targets
1766 (dump_file, e);
1768 npolymorphic++;
1770 if (!cgraph_maybe_hot_edge_p (e))
1772 if (dump_file)
1773 fprintf (dump_file, "Call is cold\n\n");
1774 ncold++;
1775 continue;
1777 if (e->speculative)
1779 if (dump_file)
1780 fprintf (dump_file, "Call is aready speculated\n\n");
1781 nspeculated++;
1783 /* When dumping see if we agree with speculation. */
1784 if (!dump_file)
1785 continue;
1787 if (pointer_set_contains (bad_call_targets,
1788 cache_token))
1790 if (dump_file)
1791 fprintf (dump_file, "Target list is known to be useless\n\n");
1792 nmultiple++;
1793 continue;
1795 for (i = 0; i < targets.length (); i++)
1796 if (likely_target_p (targets[i]))
1798 if (likely_target)
1800 if (i < (unsigned) nonconstruction_targets)
1802 likely_target = NULL;
1803 if (dump_file)
1804 fprintf (dump_file, "More than one likely target\n\n");
1805 nmultiple++;
1807 break;
1809 likely_target = targets[i];
1811 if (!likely_target)
1813 pointer_set_insert (bad_call_targets, cache_token);
1814 continue;
1816 /* This is reached only when dumping; check if we agree or disagree
1817 with the speculation. */
1818 if (e->speculative)
1820 struct cgraph_edge *e2;
1821 struct ipa_ref *ref;
1822 cgraph_speculative_call_info (e, e2, e, ref);
1823 if (cgraph_function_or_thunk_node (e2->callee, NULL)
1824 == cgraph_function_or_thunk_node (likely_target, NULL))
1826 fprintf (dump_file, "We agree with speculation\n\n");
1827 nok++;
1829 else
1831 fprintf (dump_file, "We disagree with speculation\n\n");
1832 nwrong++;
1834 continue;
1836 if (!likely_target->definition)
1838 if (dump_file)
1839 fprintf (dump_file, "Target is not an definition\n\n");
1840 nnotdefined++;
1841 continue;
1843 /* Do not introduce new references to external symbols. While we
1844 can handle these just well, it is common for programs to
1845 incorrectly with headers defining methods they are linked
1846 with. */
1847 if (DECL_EXTERNAL (likely_target->decl))
1849 if (dump_file)
1850 fprintf (dump_file, "Target is external\n\n");
1851 nexternal++;
1852 continue;
1854 /* Don't use an implicitly-declared destructor (c++/58678). */
1855 struct cgraph_node *non_thunk_target
1856 = cgraph_function_node (likely_target);
1857 if (DECL_ARTIFICIAL (non_thunk_target->decl)
1858 && DECL_COMDAT (non_thunk_target->decl))
1860 if (dump_file)
1861 fprintf (dump_file, "Target is artificial\n\n");
1862 nartificial++;
1863 continue;
1865 if (cgraph_function_body_availability (likely_target)
1866 <= AVAIL_OVERWRITABLE
1867 && symtab_can_be_discarded (likely_target))
1869 if (dump_file)
1870 fprintf (dump_file, "Target is overwritable\n\n");
1871 noverwritable++;
1872 continue;
1874 else
1876 if (dump_file)
1877 fprintf (dump_file,
1878 "Speculatively devirtualizing call in %s/%i to %s/%i\n\n",
1879 n->name (), n->order,
1880 likely_target->name (),
1881 likely_target->order);
1882 if (!symtab_can_be_discarded (likely_target))
1884 cgraph_node *alias;
1885 alias = cgraph (symtab_nonoverwritable_alias
1886 (likely_target));
1887 if (alias)
1888 likely_target = alias;
1890 nconverted++;
1891 update = true;
1892 cgraph_turn_edge_to_speculative
1893 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
1896 if (update)
1897 inline_update_overall_summary (n);
1899 pointer_set_destroy (bad_call_targets);
1901 if (dump_file)
1902 fprintf (dump_file,
1903 "%i polymorphic calls, %i devirtualized,"
1904 " %i speculatively devirtualized, %i cold\n"
1905 "%i have multiple targets, %i overwritable,"
1906 " %i already speculated (%i agree, %i disagree),"
1907 " %i external, %i not defined, %i artificial\n",
1908 npolymorphic, ndevirtualized, nconverted, ncold,
1909 nmultiple, noverwritable, nspeculated, nok, nwrong,
1910 nexternal, nnotdefined, nartificial);
1911 return ndevirtualized ? TODO_remove_functions : 0;
1914 /* Gate for speculative IPA devirtualization optimization. */
1916 static bool
1917 gate_ipa_devirt (void)
1919 return (flag_devirtualize
1920 && flag_devirtualize_speculatively
1921 && optimize);
1924 namespace {
1926 const pass_data pass_data_ipa_devirt =
1928 IPA_PASS, /* type */
1929 "devirt", /* name */
1930 OPTGROUP_NONE, /* optinfo_flags */
1931 true, /* has_gate */
1932 true, /* has_execute */
1933 TV_IPA_DEVIRT, /* tv_id */
1934 0, /* properties_required */
1935 0, /* properties_provided */
1936 0, /* properties_destroyed */
1937 0, /* todo_flags_start */
1938 ( TODO_dump_symtab ), /* todo_flags_finish */
1941 class pass_ipa_devirt : public ipa_opt_pass_d
1943 public:
1944 pass_ipa_devirt (gcc::context *ctxt)
1945 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
1946 NULL, /* generate_summary */
1947 NULL, /* write_summary */
1948 NULL, /* read_summary */
1949 NULL, /* write_optimization_summary */
1950 NULL, /* read_optimization_summary */
1951 NULL, /* stmt_fixup */
1952 0, /* function_transform_todo_flags_start */
1953 NULL, /* function_transform */
1954 NULL) /* variable_transform */
1957 /* opt_pass methods: */
1958 bool gate () { return gate_ipa_devirt (); }
1959 unsigned int execute () { return ipa_devirt (); }
1961 }; // class pass_ipa_devirt
1963 } // anon namespace
1965 ipa_opt_pass_d *
1966 make_pass_ipa_devirt (gcc::context *ctxt)
1968 return new pass_ipa_devirt (ctxt);
1971 #include "gt-ipa-devirt.h"