Fix dot dump bug
[official-gcc.git] / gcc / ipa-devirt.c
bloba90d1dcd736b04040c4fba32e43704e2ef904631
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
82 What we do here:
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
127 #include "gimple.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
132 #include "dbgcnt.h"
134 static bool odr_violation_reported = false;
136 /* Dummy polymorphic call context. */
138 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
139 = {0, NULL, false, true};
141 /* Pointer set of all call targets appearing in the cache. */
142 static pointer_set_t *cached_polymorphic_call_targets;
144 /* The node of type inheritance graph. For each type unique in
145 One Defintion Rule (ODR) sense, we produce one node linking all
146 main variants of types equivalent to it, bases and derived types. */
148 struct GTY(()) odr_type_d
150 /* leader type. */
151 tree type;
152 /* All bases. */
153 vec<odr_type> GTY((skip)) bases;
154 /* All derrived types with virtual methods seen in unit. */
155 vec<odr_type> GTY((skip)) derived_types;
157 /* All equivalent types, if more than one. */
158 vec<tree, va_gc> *types;
159 /* Set of all equivalent types, if NON-NULL. */
160 pointer_set_t * GTY((skip)) types_set;
162 /* Unique ID indexing the type in odr_types array. */
163 int id;
164 /* Is it in anonymous namespace? */
165 bool anonymous_namespace;
166 /* Do we know about all derivations of given type? */
167 bool all_derivations_known;
171 /* Return true if BINFO corresponds to a type with virtual methods.
173 Every type has several BINFOs. One is the BINFO associated by the type
174 while other represents bases of derived types. The BINFOs representing
175 bases do not have BINFO_VTABLE pointer set when this is the single
176 inheritance (because vtables are shared). Look up the BINFO of type
177 and check presence of its vtable. */
179 static inline bool
180 polymorphic_type_binfo_p (tree binfo)
182 /* See if BINFO's type has an virtual table associtated with it. */
183 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
186 /* Return TRUE if all derived types of T are known and thus
187 we may consider the walk of derived type complete.
189 This is typically true only for final anonymous namespace types and types
190 defined within functions (that may be COMDAT and thus shared across units,
191 but with the same set of derived types). */
193 static bool
194 type_all_derivations_known_p (tree t)
196 if (TYPE_FINAL_P (t))
197 return true;
198 if (flag_ltrans)
199 return false;
200 if (type_in_anonymous_namespace_p (t))
201 return true;
202 return (decl_function_context (TYPE_NAME (t)) != NULL);
205 /* Return TURE if type's constructors are all visible. */
207 static bool
208 type_all_ctors_visible_p (tree t)
210 return !flag_ltrans
211 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
212 /* We can not always use type_all_derivations_known_p.
213 For function local types we must assume case where
214 the function is COMDAT and shared in between units.
216 TODO: These cases are quite easy to get, but we need
217 to keep track of C++ privatizing via -Wno-weak
218 as well as the IPA privatizing. */
219 && type_in_anonymous_namespace_p (t);
222 /* Return TRUE if type may have instance. */
224 static bool
225 type_possibly_instantiated_p (tree t)
227 tree vtable;
228 varpool_node *vnode;
230 /* TODO: Add abstract types here. */
231 if (!type_all_ctors_visible_p (t))
232 return true;
234 vtable = BINFO_VTABLE (TYPE_BINFO (t));
235 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
236 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
237 vnode = varpool_get_node (vtable);
238 return vnode && vnode->definition;
241 /* One Definition Rule hashtable helpers. */
243 struct odr_hasher
245 typedef odr_type_d value_type;
246 typedef union tree_node compare_type;
247 static inline hashval_t hash (const value_type *);
248 static inline bool equal (const value_type *, const compare_type *);
249 static inline void remove (value_type *);
252 /* Produce hash based on type name. */
254 hashval_t
255 hash_type_name (tree t)
257 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
259 /* If not in LTO, all main variants are unique, so we can do
260 pointer hash. */
261 if (!in_lto_p)
262 return htab_hash_pointer (t);
264 /* Anonymous types are unique. */
265 if (type_in_anonymous_namespace_p (t))
266 return htab_hash_pointer (t);
268 /* For polymorphic types, we can simply hash the virtual table. */
269 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
271 tree v = BINFO_VTABLE (TYPE_BINFO (t));
272 hashval_t hash = 0;
274 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
276 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
277 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
280 v = DECL_ASSEMBLER_NAME (v);
281 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
282 return hash;
285 /* Rest is not implemented yet. */
286 gcc_unreachable ();
289 /* Return the computed hashcode for ODR_TYPE. */
291 inline hashval_t
292 odr_hasher::hash (const value_type *odr_type)
294 return hash_type_name (odr_type->type);
297 /* Compare types T1 and T2 and return true if they are
298 equivalent. */
300 inline bool
301 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
303 tree t2 = const_cast <tree> (ct2);
305 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
306 if (t1->type == t2)
307 return true;
308 if (!in_lto_p)
309 return false;
310 return types_same_for_odr (t1->type, t2);
313 /* Free ODR type V. */
315 inline void
316 odr_hasher::remove (value_type *v)
318 v->bases.release ();
319 v->derived_types.release ();
320 if (v->types_set)
321 pointer_set_destroy (v->types_set);
322 ggc_free (v);
325 /* ODR type hash used to lookup ODR type based on tree type node. */
327 typedef hash_table <odr_hasher> odr_hash_type;
328 static odr_hash_type odr_hash;
330 /* ODR types are also stored into ODR_TYPE vector to allow consistent
331 walking. Bases appear before derived types. Vector is garbage collected
332 so we won't end up visiting empty types. */
334 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
335 #define odr_types (*odr_types_ptr)
337 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
338 from VAL->type. This may happen in LTO where tree merging did not merge
339 all variants of the same type. It may or may not mean the ODR violation.
340 Add it to the list of duplicates and warn on some violations. */
342 static void
343 add_type_duplicate (odr_type val, tree type)
345 if (!val->types_set)
346 val->types_set = pointer_set_create ();
348 /* See if this duplicate is new. */
349 if (!pointer_set_insert (val->types_set, type))
351 bool merge = true;
352 bool base_mismatch = false;
353 gcc_assert (in_lto_p);
354 vec_safe_push (val->types, type);
355 unsigned int i,j;
357 /* First we compare memory layout. */
358 if (!types_compatible_p (val->type, type))
360 merge = false;
361 odr_violation_reported = true;
362 if (BINFO_VTABLE (TYPE_BINFO (val->type))
363 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
364 "type %qD violates one definition rule ",
365 type))
366 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
367 "a type with the same name but different layout is "
368 "defined in another translation unit");
369 if (cgraph_dump_file)
371 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
373 print_node (cgraph_dump_file, "", val->type, 0);
374 putc ('\n',cgraph_dump_file);
375 print_node (cgraph_dump_file, "", type, 0);
376 putc ('\n',cgraph_dump_file);
380 /* Next sanity check that bases are the same. If not, we will end
381 up producing wrong answers. */
382 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
383 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
385 odr_type base = get_odr_type
386 (BINFO_TYPE
387 (BINFO_BASE_BINFO (TYPE_BINFO (type),
388 i)),
389 true);
390 if (val->bases.length () <= j || val->bases[j] != base)
391 base_mismatch = true;
392 j++;
394 if (base_mismatch)
396 merge = false;
397 odr_violation_reported = true;
399 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
400 "type %qD violates one definition rule ",
401 type))
402 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
403 "a type with the same name but different bases is "
404 "defined in another translation unit");
405 if (cgraph_dump_file)
407 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
409 print_node (cgraph_dump_file, "", val->type, 0);
410 putc ('\n',cgraph_dump_file);
411 print_node (cgraph_dump_file, "", type, 0);
412 putc ('\n',cgraph_dump_file);
416 /* Regularize things a little. During LTO same types may come with
417 different BINFOs. Either because their virtual table was
418 not merged by tree merging and only later at decl merging or
419 because one type comes with external vtable, while other
420 with internal. We want to merge equivalent binfos to conserve
421 memory and streaming overhead.
423 The external vtables are more harmful: they contain references
424 to external declarations of methods that may be defined in the
425 merged LTO unit. For this reason we absolutely need to remove
426 them and replace by internal variants. Not doing so will lead
427 to incomplete answers from possible_polymorphic_call_targets. */
428 if (!flag_ltrans && merge)
430 tree master_binfo = TYPE_BINFO (val->type);
431 tree v1 = BINFO_VTABLE (master_binfo);
432 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
434 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
436 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
437 && operand_equal_p (TREE_OPERAND (v1, 1),
438 TREE_OPERAND (v2, 1), 0));
439 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
440 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
442 gcc_assert (DECL_ASSEMBLER_NAME (v1)
443 == DECL_ASSEMBLER_NAME (v2));
445 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
447 unsigned int i;
449 TYPE_BINFO (val->type) = TYPE_BINFO (type);
450 for (i = 0; i < val->types->length (); i++)
452 if (TYPE_BINFO ((*val->types)[i])
453 == master_binfo)
454 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
457 else
458 TYPE_BINFO (type) = master_binfo;
463 /* Get ODR type hash entry for TYPE. If INSERT is true, create
464 possibly new entry. */
466 odr_type
467 get_odr_type (tree type, bool insert)
469 odr_type_d **slot;
470 odr_type val;
471 hashval_t hash;
473 type = TYPE_MAIN_VARIANT (type);
474 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
475 hash = hash_type_name (type);
476 slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
477 if (!slot)
478 return NULL;
480 /* See if we already have entry for type. */
481 if (*slot)
483 val = *slot;
485 /* With LTO we need to support multiple tree representation of
486 the same ODR type. */
487 if (val->type != type)
488 add_type_duplicate (val, type);
490 else
492 tree binfo = TYPE_BINFO (type);
493 unsigned int i;
495 val = ggc_cleared_alloc<odr_type_d> ();
496 val->type = type;
497 val->bases = vNULL;
498 val->derived_types = vNULL;
499 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
500 val->all_derivations_known = type_all_derivations_known_p (type);
501 *slot = val;
502 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
503 /* For now record only polymorphic types. other are
504 pointless for devirtualization and we can not precisely
505 determine ODR equivalency of these during LTO. */
506 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
508 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
509 i)),
510 true);
511 base->derived_types.safe_push (val);
512 val->bases.safe_push (base);
514 /* First record bases, then add into array so ids are increasing. */
515 if (odr_types_ptr)
516 val->id = odr_types.length ();
517 vec_safe_push (odr_types_ptr, val);
519 return val;
522 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
523 recusive printing. */
525 static void
526 dump_odr_type (FILE *f, odr_type t, int indent=0)
528 unsigned int i;
529 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
530 print_generic_expr (f, t->type, TDF_SLIM);
531 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
532 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
533 if (TYPE_NAME (t->type))
535 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
536 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
537 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
539 if (t->bases.length ())
541 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
542 for (i = 0; i < t->bases.length (); i++)
543 fprintf (f, " %i", t->bases[i]->id);
544 fprintf (f, "\n");
546 if (t->derived_types.length ())
548 fprintf (f, "%*s derived types:\n", indent * 2, "");
549 for (i = 0; i < t->derived_types.length (); i++)
550 dump_odr_type (f, t->derived_types[i], indent + 1);
552 fprintf (f, "\n");
555 /* Dump the type inheritance graph. */
557 static void
558 dump_type_inheritance_graph (FILE *f)
560 unsigned int i;
561 if (!odr_types_ptr)
562 return;
563 fprintf (f, "\n\nType inheritance graph:\n");
564 for (i = 0; i < odr_types.length (); i++)
566 if (odr_types[i]->bases.length () == 0)
567 dump_odr_type (f, odr_types[i]);
569 for (i = 0; i < odr_types.length (); i++)
571 if (odr_types[i]->types && odr_types[i]->types->length ())
573 unsigned int j;
574 fprintf (f, "Duplicate tree types for odr type %i\n", i);
575 print_node (f, "", odr_types[i]->type, 0);
576 for (j = 0; j < odr_types[i]->types->length (); j++)
578 tree t;
579 fprintf (f, "duplicate #%i\n", j);
580 print_node (f, "", (*odr_types[i]->types)[j], 0);
581 t = (*odr_types[i]->types)[j];
582 while (TYPE_P (t) && TYPE_CONTEXT (t))
584 t = TYPE_CONTEXT (t);
585 print_node (f, "", t, 0);
587 putc ('\n',f);
593 /* Given method type T, return type of class it belongs to.
594 Lookup this pointer and get its type. */
596 tree
597 method_class_type (tree t)
599 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
600 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
602 return TREE_TYPE (first_parm_type);
605 /* Initialize IPA devirt and build inheritance tree graph. */
607 void
608 build_type_inheritance_graph (void)
610 struct symtab_node *n;
611 FILE *inheritance_dump_file;
612 int flags;
614 if (odr_hash.is_created ())
615 return;
616 timevar_push (TV_IPA_INHERITANCE);
617 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
618 odr_hash.create (23);
620 /* We reconstruct the graph starting of types of all methods seen in the
621 the unit. */
622 FOR_EACH_SYMBOL (n)
623 if (is_a <cgraph_node *> (n)
624 && DECL_VIRTUAL_P (n->decl)
625 && symtab_real_symbol_p (n))
626 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
628 /* Look also for virtual tables of types that do not define any methods.
630 We need it in a case where class B has virtual base of class A
631 re-defining its virtual method and there is class C with no virtual
632 methods with B as virtual base.
634 Here we output B's virtual method in two variant - for non-virtual
635 and virtual inheritance. B's virtual table has non-virtual version,
636 while C's has virtual.
638 For this reason we need to know about C in order to include both
639 variants of B. More correctly, record_target_from_binfo should
640 add both variants of the method when walking B, but we have no
641 link in between them.
643 We rely on fact that either the method is exported and thus we
644 assume it is called externally or C is in anonymous namespace and
645 thus we will see the vtable. */
647 else if (is_a <varpool_node *> (n)
648 && DECL_VIRTUAL_P (n->decl)
649 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
650 && TYPE_BINFO (DECL_CONTEXT (n->decl))
651 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
652 get_odr_type (DECL_CONTEXT (n->decl), true);
653 if (inheritance_dump_file)
655 dump_type_inheritance_graph (inheritance_dump_file);
656 dump_end (TDI_inheritance, inheritance_dump_file);
658 timevar_pop (TV_IPA_INHERITANCE);
661 /* Return true if N has reference from live virtual table
662 (and thus can be a destination of polymorphic call).
663 Be conservatively correct when callgraph is not built or
664 if the method may be referred externally. */
666 static bool
667 referenced_from_vtable_p (struct cgraph_node *node)
669 int i;
670 struct ipa_ref *ref;
671 bool found = false;
673 if (node->externally_visible
674 || node->used_from_other_partition)
675 return true;
677 /* Keep this test constant time.
678 It is unlikely this can happen except for the case where speculative
679 devirtualization introduced many speculative edges to this node.
680 In this case the target is very likely alive anyway. */
681 if (node->ref_list.referring.length () > 100)
682 return true;
684 /* We need references built. */
685 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
686 return true;
688 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
689 i, ref); i++)
691 if ((ref->use == IPA_REF_ALIAS
692 && referenced_from_vtable_p (cgraph (ref->referring)))
693 || (ref->use == IPA_REF_ADDR
694 && TREE_CODE (ref->referring->decl) == VAR_DECL
695 && DECL_VIRTUAL_P (ref->referring->decl)))
697 found = true;
698 break;
700 return found;
703 /* If TARGET has associated node, record it in the NODES array.
704 CAN_REFER specify if program can refer to the target directly.
705 if TARGET is unknown (NULL) or it can not be inserted (for example because
706 its body was already removed and there is no way to refer to it), clear
707 COMPLETEP. */
709 static void
710 maybe_record_node (vec <cgraph_node *> &nodes,
711 tree target, pointer_set_t *inserted,
712 bool can_refer,
713 bool *completep)
715 struct cgraph_node *target_node;
717 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
718 list of targets; the runtime effect of calling them is undefined.
719 Only "real" virtual methods should be accounted. */
720 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
721 return;
723 if (!can_refer)
725 /* The only case when method of anonymous namespace becomes unreferable
726 is when we completely optimized it out. */
727 if (flag_ltrans
728 || !target
729 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
730 *completep = false;
731 return;
734 if (!target)
735 return;
737 target_node = cgraph_get_node (target);
739 /* Method can only be called by polymorphic call if any
740 of vtables refering to it are alive.
742 While this holds for non-anonymous functions, too, there are
743 cases where we want to keep them in the list; for example
744 inline functions with -fno-weak are static, but we still
745 may devirtualize them when instance comes from other unit.
746 The same holds for LTO.
748 Currently we ignore these functions in speculative devirtualization.
749 ??? Maybe it would make sense to be more aggressive for LTO even
750 eslewhere. */
751 if (!flag_ltrans
752 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
753 && (!target_node
754 || !referenced_from_vtable_p (target_node)))
756 /* See if TARGET is useful function we can deal with. */
757 else if (target_node != NULL
758 && (TREE_PUBLIC (target)
759 || DECL_EXTERNAL (target)
760 || target_node->definition)
761 && symtab_real_symbol_p (target_node))
763 gcc_assert (!target_node->global.inlined_to);
764 gcc_assert (symtab_real_symbol_p (target_node));
765 if (!pointer_set_insert (inserted, target))
767 pointer_set_insert (cached_polymorphic_call_targets,
768 target_node);
769 nodes.safe_push (target_node);
772 else if (completep
773 && (!type_in_anonymous_namespace_p
774 (DECL_CONTEXT (target))
775 || flag_ltrans))
776 *completep = false;
779 /* See if BINFO's type match OUTER_TYPE. If so, lookup
780 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
781 method in vtable and insert method to NODES array
782 or BASES_TO_CONSIDER if this array is non-NULL.
783 Otherwise recurse to base BINFOs.
784 This match what get_binfo_at_offset does, but with offset
785 being unknown.
787 TYPE_BINFOS is a stack of BINFOS of types with defined
788 virtual table seen on way from class type to BINFO.
790 MATCHED_VTABLES tracks virtual tables we already did lookup
791 for virtual function in. INSERTED tracks nodes we already
792 inserted.
794 ANONYMOUS is true if BINFO is part of anonymous namespace.
796 Clear COMPLETEP when we hit unreferable target.
799 static void
800 record_target_from_binfo (vec <cgraph_node *> &nodes,
801 vec <tree> *bases_to_consider,
802 tree binfo,
803 tree otr_type,
804 vec <tree> &type_binfos,
805 HOST_WIDE_INT otr_token,
806 tree outer_type,
807 HOST_WIDE_INT offset,
808 pointer_set_t *inserted,
809 pointer_set_t *matched_vtables,
810 bool anonymous,
811 bool *completep)
813 tree type = BINFO_TYPE (binfo);
814 int i;
815 tree base_binfo;
818 if (BINFO_VTABLE (binfo))
819 type_binfos.safe_push (binfo);
820 if (types_same_for_odr (type, outer_type))
822 int i;
823 tree type_binfo = NULL;
825 /* Lookup BINFO with virtual table. For normal types it is always last
826 binfo on stack. */
827 for (i = type_binfos.length () - 1; i >= 0; i--)
828 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
830 type_binfo = type_binfos[i];
831 break;
833 if (BINFO_VTABLE (binfo))
834 type_binfos.pop ();
835 /* If this is duplicated BINFO for base shared by virtual inheritance,
836 we may not have its associated vtable. This is not a problem, since
837 we will walk it on the other path. */
838 if (!type_binfo)
839 return;
840 tree inner_binfo = get_binfo_at_offset (type_binfo,
841 offset, otr_type);
842 if (!inner_binfo)
844 gcc_assert (odr_violation_reported);
845 return;
847 /* For types in anonymous namespace first check if the respective vtable
848 is alive. If not, we know the type can't be called. */
849 if (!flag_ltrans && anonymous)
851 tree vtable = BINFO_VTABLE (inner_binfo);
852 varpool_node *vnode;
854 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
855 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
856 vnode = varpool_get_node (vtable);
857 if (!vnode || !vnode->definition)
858 return;
860 gcc_assert (inner_binfo);
861 if (bases_to_consider
862 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
863 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
865 bool can_refer;
866 tree target = gimple_get_virt_method_for_binfo (otr_token,
867 inner_binfo,
868 &can_refer);
869 if (!bases_to_consider)
870 maybe_record_node (nodes, target, inserted, can_refer, completep);
871 /* Destructors are never called via construction vtables. */
872 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
873 bases_to_consider->safe_push (target);
875 return;
878 /* Walk bases. */
879 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
880 /* Walking bases that have no virtual method is pointless excercise. */
881 if (polymorphic_type_binfo_p (base_binfo))
882 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
883 type_binfos,
884 otr_token, outer_type, offset, inserted,
885 matched_vtables, anonymous, completep);
886 if (BINFO_VTABLE (binfo))
887 type_binfos.pop ();
890 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
891 of TYPE, insert them to NODES, recurse into derived nodes.
892 INSERTED is used to avoid duplicate insertions of methods into NODES.
893 MATCHED_VTABLES are used to avoid duplicate walking vtables.
894 Clear COMPLETEP if unreferable target is found.
896 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
897 all cases where BASE_SKIPPED is true (because the base is abstract
898 class). */
900 static void
901 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
902 pointer_set_t *inserted,
903 pointer_set_t *matched_vtables,
904 tree otr_type,
905 odr_type type,
906 HOST_WIDE_INT otr_token,
907 tree outer_type,
908 HOST_WIDE_INT offset,
909 bool *completep,
910 vec <tree> &bases_to_consider,
911 bool consider_construction)
913 tree binfo = TYPE_BINFO (type->type);
914 unsigned int i;
915 vec <tree> type_binfos = vNULL;
916 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
918 /* We may need to consider types w/o instances because of possible derived
919 types using their methods either directly or via construction vtables.
920 We are safe to skip them when all derivations are known, since we will
921 handle them later.
922 This is done by recording them to BASES_TO_CONSIDER array. */
923 if (possibly_instantiated || consider_construction)
925 record_target_from_binfo (nodes,
926 (!possibly_instantiated
927 && type_all_derivations_known_p (type->type))
928 ? &bases_to_consider : NULL,
929 binfo, otr_type, type_binfos, otr_token,
930 outer_type, offset,
931 inserted, matched_vtables,
932 type->anonymous_namespace, completep);
934 type_binfos.release ();
935 for (i = 0; i < type->derived_types.length (); i++)
936 possible_polymorphic_call_targets_1 (nodes, inserted,
937 matched_vtables,
938 otr_type,
939 type->derived_types[i],
940 otr_token, outer_type, offset, completep,
941 bases_to_consider, consider_construction);
944 /* Cache of queries for polymorphic call targets.
946 Enumerating all call targets may get expensive when there are many
947 polymorphic calls in the program, so we memoize all the previous
948 queries and avoid duplicated work. */
950 struct polymorphic_call_target_d
952 HOST_WIDE_INT otr_token;
953 ipa_polymorphic_call_context context;
954 odr_type type;
955 vec <cgraph_node *> targets;
956 int nonconstruction_targets;
957 bool complete;
960 /* Polymorphic call target cache helpers. */
962 struct polymorphic_call_target_hasher
964 typedef polymorphic_call_target_d value_type;
965 typedef polymorphic_call_target_d compare_type;
966 static inline hashval_t hash (const value_type *);
967 static inline bool equal (const value_type *, const compare_type *);
968 static inline void remove (value_type *);
971 /* Return the computed hashcode for ODR_QUERY. */
973 inline hashval_t
974 polymorphic_call_target_hasher::hash (const value_type *odr_query)
976 hashval_t hash;
978 hash = iterative_hash_host_wide_int
979 (odr_query->otr_token,
980 odr_query->type->id);
981 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
982 hash);
983 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
984 return iterative_hash_hashval_t
985 (((int)odr_query->context.maybe_in_construction << 1)
986 | (int)odr_query->context.maybe_derived_type, hash);
989 /* Compare cache entries T1 and T2. */
991 inline bool
992 polymorphic_call_target_hasher::equal (const value_type *t1,
993 const compare_type *t2)
995 return (t1->type == t2->type && t1->otr_token == t2->otr_token
996 && t1->context.offset == t2->context.offset
997 && t1->context.outer_type == t2->context.outer_type
998 && t1->context.maybe_in_construction
999 == t2->context.maybe_in_construction
1000 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
1003 /* Remove entry in polymorphic call target cache hash. */
1005 inline void
1006 polymorphic_call_target_hasher::remove (value_type *v)
1008 v->targets.release ();
1009 free (v);
1012 /* Polymorphic call target query cache. */
1014 typedef hash_table <polymorphic_call_target_hasher>
1015 polymorphic_call_target_hash_type;
1016 static polymorphic_call_target_hash_type polymorphic_call_target_hash;
1018 /* Destroy polymorphic call target query cache. */
1020 static void
1021 free_polymorphic_call_targets_hash ()
1023 if (cached_polymorphic_call_targets)
1025 polymorphic_call_target_hash.dispose ();
1026 pointer_set_destroy (cached_polymorphic_call_targets);
1027 cached_polymorphic_call_targets = NULL;
1031 /* When virtual function is removed, we may need to flush the cache. */
1033 static void
1034 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1036 if (cached_polymorphic_call_targets
1037 && pointer_set_contains (cached_polymorphic_call_targets, n))
1038 free_polymorphic_call_targets_hash ();
1041 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1042 is contained at CONTEXT->OFFSET. Walk the memory representation of
1043 CONTEXT->OUTER_TYPE and find the outermost class type that match
1044 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1045 to represent it.
1047 For example when CONTEXT represents type
1048 class A
1050 int a;
1051 class B b;
1053 and we look for type at offset sizeof(int), we end up with B and offset 0.
1054 If the same is produced by multiple inheritance, we end up with A and offset
1055 sizeof(int).
1057 If we can not find corresponding class, give up by setting
1058 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1059 Return true when lookup was sucesful. */
1061 static bool
1062 get_class_context (ipa_polymorphic_call_context *context,
1063 tree expected_type)
1065 tree type = context->outer_type;
1066 HOST_WIDE_INT offset = context->offset;
1068 /* Find the sub-object the constant actually refers to and mark whether it is
1069 an artificial one (as opposed to a user-defined one). */
1070 while (true)
1072 HOST_WIDE_INT pos, size;
1073 tree fld;
1075 /* On a match, just return what we found. */
1076 if (TREE_CODE (type) == TREE_CODE (expected_type)
1077 && types_same_for_odr (type, expected_type))
1079 /* Type can not contain itself on an non-zero offset. In that case
1080 just give up. */
1081 if (offset != 0)
1082 goto give_up;
1083 gcc_assert (offset == 0);
1084 return true;
1087 /* Walk fields and find corresponding on at OFFSET. */
1088 if (TREE_CODE (type) == RECORD_TYPE)
1090 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1092 if (TREE_CODE (fld) != FIELD_DECL)
1093 continue;
1095 pos = int_bit_position (fld);
1096 size = tree_to_uhwi (DECL_SIZE (fld));
1097 if (pos <= offset && (pos + size) > offset)
1098 break;
1101 if (!fld)
1102 goto give_up;
1104 type = TREE_TYPE (fld);
1105 offset -= pos;
1106 /* DECL_ARTIFICIAL represents a basetype. */
1107 if (!DECL_ARTIFICIAL (fld))
1109 context->outer_type = type;
1110 context->offset = offset;
1111 /* As soon as we se an field containing the type,
1112 we know we are not looking for derivations. */
1113 context->maybe_derived_type = false;
1116 else if (TREE_CODE (type) == ARRAY_TYPE)
1118 tree subtype = TREE_TYPE (type);
1120 /* Give up if we don't know array size. */
1121 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1122 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1123 goto give_up;
1124 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1125 type = subtype;
1126 context->outer_type = type;
1127 context->offset = offset;
1128 context->maybe_derived_type = false;
1130 /* Give up on anything else. */
1131 else
1132 goto give_up;
1135 /* If we failed to find subtype we look for, give up and fall back to the
1136 most generic query. */
1137 give_up:
1138 context->outer_type = expected_type;
1139 context->offset = 0;
1140 context->maybe_derived_type = true;
1141 context->maybe_in_construction = true;
1142 /* POD can be changed to an instance of a polymorphic type by
1143 placement new. Here we play safe and assume that any
1144 non-polymorphic type is POD. */
1145 if ((TREE_CODE (type) != RECORD_TYPE
1146 || !TYPE_BINFO (type)
1147 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1148 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1149 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1150 tree_to_uhwi (TYPE_SIZE (type)))))
1151 return true;
1152 return false;
1155 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1157 static bool
1158 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1159 tree otr_type)
1161 ipa_polymorphic_call_context context = {offset, outer_type,
1162 false, true};
1163 return get_class_context (&context, otr_type);
1166 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1168 static tree
1169 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1170 tree vtable)
1172 tree v = BINFO_VTABLE (binfo);
1173 int i;
1174 tree base_binfo;
1175 unsigned HOST_WIDE_INT this_offset;
1177 if (v)
1179 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1180 gcc_unreachable ();
1182 if (offset == this_offset
1183 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1184 return binfo;
1187 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1188 if (polymorphic_type_binfo_p (base_binfo))
1190 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1191 if (base_binfo)
1192 return base_binfo;
1194 return NULL;
1197 /* T is known constant value of virtual table pointer.
1198 Store virtual table to V and its offset to OFFSET.
1199 Return false if T does not look like virtual table reference. */
1201 bool
1202 vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset)
1204 /* We expect &MEM[(void *)&virtual_table + 16B].
1205 We obtain object's BINFO from the context of the virtual table.
1206 This one contains pointer to virtual table represented via
1207 POINTER_PLUS_EXPR. Verify that this pointer match to what
1208 we propagated through.
1210 In the case of virtual inheritance, the virtual tables may
1211 be nested, i.e. the offset may be different from 16 and we may
1212 need to dive into the type representation. */
1213 if (TREE_CODE (t) == ADDR_EXPR
1214 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1215 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1216 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1217 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1218 == VAR_DECL)
1219 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1220 (TREE_OPERAND (t, 0), 0), 0)))
1222 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1223 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1224 return true;
1227 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1228 We need to handle it when T comes from static variable initializer or
1229 BINFO. */
1230 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1232 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1233 t = TREE_OPERAND (t, 0);
1235 else
1236 *offset = 0;
1238 if (TREE_CODE (t) != ADDR_EXPR)
1239 return false;
1240 *v = TREE_OPERAND (t, 0);
1241 return true;
1244 /* T is known constant value of virtual table pointer. Return BINFO of the
1245 instance type. */
1247 tree
1248 vtable_pointer_value_to_binfo (tree t)
1250 tree vtable;
1251 unsigned HOST_WIDE_INT offset;
1253 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1254 return NULL_TREE;
1256 /* FIXME: for stores of construction vtables we return NULL,
1257 because we do not have BINFO for those. Eventually we should fix
1258 our representation to allow this case to be handled, too.
1259 In the case we see store of BINFO we however may assume
1260 that standard folding will be ale to cope with it. */
1261 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1262 offset, vtable);
1265 /* Proudce polymorphic call context for call method of instance
1266 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1268 static void
1269 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1270 tree base, HOST_WIDE_INT offset)
1272 gcc_assert (DECL_P (base));
1274 context->outer_type = TREE_TYPE (base);
1275 context->offset = offset;
1276 /* Make very conservative assumption that all objects
1277 may be in construction.
1278 TODO: ipa-prop already contains code to tell better.
1279 merge it later. */
1280 context->maybe_in_construction = true;
1281 context->maybe_derived_type = false;
1284 /* CST is an invariant (address of decl), try to get meaningful
1285 polymorphic call context for polymorphic call of method
1286 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1287 Return FALSE if nothing meaningful can be found. */
1289 bool
1290 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1291 tree cst,
1292 tree otr_type,
1293 HOST_WIDE_INT offset)
1295 HOST_WIDE_INT offset2, size, max_size;
1296 tree base;
1298 if (TREE_CODE (cst) != ADDR_EXPR)
1299 return false;
1301 cst = TREE_OPERAND (cst, 0);
1302 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1303 if (!DECL_P (base) || max_size == -1 || max_size != size)
1304 return false;
1306 /* Only type inconsistent programs can have otr_type that is
1307 not part of outer type. */
1308 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1309 return false;
1311 get_polymorphic_call_info_for_decl (context, base, offset);
1312 return true;
1315 /* Given REF call in FNDECL, determine class of the polymorphic
1316 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1317 Return pointer to object described by the context */
1319 tree
1320 get_polymorphic_call_info (tree fndecl,
1321 tree ref,
1322 tree *otr_type,
1323 HOST_WIDE_INT *otr_token,
1324 ipa_polymorphic_call_context *context)
1326 tree base_pointer;
1327 *otr_type = obj_type_ref_class (ref);
1328 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1330 /* Set up basic info in case we find nothing interesting in the analysis. */
1331 context->outer_type = *otr_type;
1332 context->offset = 0;
1333 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1334 context->maybe_derived_type = true;
1335 context->maybe_in_construction = true;
1337 /* Walk SSA for outer object. */
1340 if (TREE_CODE (base_pointer) == SSA_NAME
1341 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1342 && SSA_NAME_DEF_STMT (base_pointer)
1343 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1345 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1346 STRIP_NOPS (base_pointer);
1348 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1350 HOST_WIDE_INT size, max_size;
1351 HOST_WIDE_INT offset2;
1352 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1353 &offset2, &size, &max_size);
1355 /* If this is a varying address, punt. */
1356 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1357 && max_size != -1
1358 && max_size == size)
1360 /* We found dereference of a pointer. Type of the pointer
1361 and MEM_REF is meaningless, but we can look futher. */
1362 if (TREE_CODE (base) == MEM_REF)
1364 base_pointer = TREE_OPERAND (base, 0);
1365 context->offset
1366 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1367 context->outer_type = NULL;
1369 /* We found base object. In this case the outer_type
1370 is known. */
1371 else if (DECL_P (base))
1373 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
1375 /* Only type inconsistent programs can have otr_type that is
1376 not part of outer type. */
1377 if (!contains_type_p (TREE_TYPE (base),
1378 context->offset + offset2, *otr_type))
1380 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1381 code sequences; we arrange the calls to be builtin_unreachable
1382 later. */
1383 *otr_token = INT_MAX;
1384 return base_pointer;
1386 get_polymorphic_call_info_for_decl (context, base,
1387 context->offset + offset2);
1388 return NULL;
1390 else
1391 break;
1393 else
1394 break;
1396 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1397 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1399 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1400 * BITS_PER_UNIT;
1401 base_pointer = TREE_OPERAND (base_pointer, 0);
1403 else
1404 break;
1406 while (true);
1408 /* Try to determine type of the outer object. */
1409 if (TREE_CODE (base_pointer) == SSA_NAME
1410 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1411 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1413 /* See if parameter is THIS pointer of a method. */
1414 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1415 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1417 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1418 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1420 /* Dynamic casting has possibly upcasted the type
1421 in the hiearchy. In this case outer type is less
1422 informative than inner type and we should forget
1423 about it. */
1424 if (!contains_type_p (context->outer_type, context->offset,
1425 *otr_type))
1427 context->outer_type = NULL;
1428 return base_pointer;
1431 /* If the function is constructor or destructor, then
1432 the type is possibly in construction, but we know
1433 it is not derived type. */
1434 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1435 || DECL_CXX_DESTRUCTOR_P (fndecl))
1437 context->maybe_in_construction = true;
1438 context->maybe_derived_type = false;
1440 else
1442 context->maybe_derived_type = true;
1443 context->maybe_in_construction = false;
1445 return base_pointer;
1447 /* Non-PODs passed by value are really passed by invisible
1448 reference. In this case we also know the type of the
1449 object. */
1450 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1452 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1453 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1454 /* Only type inconsistent programs can have otr_type that is
1455 not part of outer type. */
1456 if (!contains_type_p (context->outer_type, context->offset,
1457 *otr_type))
1459 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1460 code sequences; we arrange the calls to be builtin_unreachable
1461 later. */
1462 *otr_token = INT_MAX;
1463 return base_pointer;
1465 context->maybe_derived_type = false;
1466 context->maybe_in_construction = false;
1467 return base_pointer;
1470 /* TODO: There are multiple ways to derive a type. For instance
1471 if BASE_POINTER is passed to an constructor call prior our refernece.
1472 We do not make this type of flow sensitive analysis yet. */
1473 return base_pointer;
1476 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1477 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1478 and insert them to NODES.
1480 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1482 static void
1483 record_targets_from_bases (tree otr_type,
1484 HOST_WIDE_INT otr_token,
1485 tree outer_type,
1486 HOST_WIDE_INT offset,
1487 vec <cgraph_node *> &nodes,
1488 pointer_set_t *inserted,
1489 pointer_set_t *matched_vtables,
1490 bool *completep)
1492 while (true)
1494 HOST_WIDE_INT pos, size;
1495 tree base_binfo;
1496 tree fld;
1498 if (types_same_for_odr (outer_type, otr_type))
1499 return;
1501 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1503 if (TREE_CODE (fld) != FIELD_DECL)
1504 continue;
1506 pos = int_bit_position (fld);
1507 size = tree_to_shwi (DECL_SIZE (fld));
1508 if (pos <= offset && (pos + size) > offset
1509 /* Do not get confused by zero sized bases. */
1510 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
1511 break;
1513 /* Within a class type we should always find correcponding fields. */
1514 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1516 /* Nonbasetypes should have been stripped by outer_class_type. */
1517 gcc_assert (DECL_ARTIFICIAL (fld));
1519 outer_type = TREE_TYPE (fld);
1520 offset -= pos;
1522 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1523 offset, otr_type);
1524 if (!base_binfo)
1526 gcc_assert (odr_violation_reported);
1527 return;
1529 gcc_assert (base_binfo);
1530 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1532 bool can_refer;
1533 tree target = gimple_get_virt_method_for_binfo (otr_token,
1534 base_binfo,
1535 &can_refer);
1536 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
1537 maybe_record_node (nodes, target, inserted, can_refer, completep);
1538 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1543 /* When virtual table is removed, we may need to flush the cache. */
1545 static void
1546 devirt_variable_node_removal_hook (varpool_node *n,
1547 void *d ATTRIBUTE_UNUSED)
1549 if (cached_polymorphic_call_targets
1550 && DECL_VIRTUAL_P (n->decl)
1551 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
1552 free_polymorphic_call_targets_hash ();
1555 /* Return vector containing possible targets of polymorphic call of type
1556 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1557 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1558 OTR_TYPE and include their virtual method. This is useful for types
1559 possibly in construction or destruction where the virtual table may
1560 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1561 us to walk the inheritance graph for all derivations.
1563 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1564 undefined and should be redirected to unreachable.
1566 If COMPLETEP is non-NULL, store true if the list is complete.
1567 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1568 in the target cache. If user needs to visit every target list
1569 just once, it can memoize them.
1571 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1572 the type is not in the construction. Those targets appear first in the
1573 vector returned.
1575 Returned vector is placed into cache. It is NOT caller's responsibility
1576 to free it. The vector can be freed on cgraph_remove_node call if
1577 the particular node is a virtual function present in the cache. */
1579 vec <cgraph_node *>
1580 possible_polymorphic_call_targets (tree otr_type,
1581 HOST_WIDE_INT otr_token,
1582 ipa_polymorphic_call_context context,
1583 bool *completep,
1584 void **cache_token,
1585 int *nonconstruction_targetsp)
1587 static struct cgraph_node_hook_list *node_removal_hook_holder;
1588 pointer_set_t *inserted;
1589 pointer_set_t *matched_vtables;
1590 vec <cgraph_node *> nodes = vNULL;
1591 vec <tree> bases_to_consider = vNULL;
1592 odr_type type, outer_type;
1593 polymorphic_call_target_d key;
1594 polymorphic_call_target_d **slot;
1595 unsigned int i;
1596 tree binfo, target;
1597 bool complete;
1598 bool can_refer;
1599 bool skipped = false;
1601 /* If ODR is not initialized, return empty incomplete list. */
1602 if (!odr_hash.is_created ())
1604 if (completep)
1605 *completep = false;
1606 if (cache_token)
1607 *cache_token = NULL;
1608 if (nonconstruction_targetsp)
1609 *nonconstruction_targetsp = 0;
1610 return nodes;
1613 /* If we hit type inconsistency, just return empty list of targets. */
1614 if (otr_token == INT_MAX)
1616 if (completep)
1617 *completep = true;
1618 if (cache_token)
1619 *cache_token = NULL;
1620 if (nonconstruction_targetsp)
1621 *nonconstruction_targetsp = 0;
1622 return nodes;
1625 type = get_odr_type (otr_type, true);
1627 /* Lookup the outer class type we want to walk. */
1628 if (context.outer_type
1629 && !get_class_context (&context, otr_type))
1631 if (completep)
1632 *completep = false;
1633 if (cache_token)
1634 *cache_token = NULL;
1635 if (nonconstruction_targetsp)
1636 *nonconstruction_targetsp = 0;
1637 return nodes;
1640 /* We canonicalize our query, so we do not need extra hashtable entries. */
1642 /* Without outer type, we have no use for offset. Just do the
1643 basic search from innter type */
1644 if (!context.outer_type)
1646 context.outer_type = otr_type;
1647 context.offset = 0;
1649 /* We need to update our hiearchy if the type does not exist. */
1650 outer_type = get_odr_type (context.outer_type, true);
1651 /* If the type is complete, there are no derivations. */
1652 if (TYPE_FINAL_P (outer_type->type))
1653 context.maybe_derived_type = false;
1655 /* Initialize query cache. */
1656 if (!cached_polymorphic_call_targets)
1658 cached_polymorphic_call_targets = pointer_set_create ();
1659 polymorphic_call_target_hash.create (23);
1660 if (!node_removal_hook_holder)
1662 node_removal_hook_holder =
1663 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1664 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1665 NULL);
1669 /* Lookup cached answer. */
1670 key.type = type;
1671 key.otr_token = otr_token;
1672 key.context = context;
1673 slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
1674 if (cache_token)
1675 *cache_token = (void *)*slot;
1676 if (*slot)
1678 if (completep)
1679 *completep = (*slot)->complete;
1680 if (nonconstruction_targetsp)
1681 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1682 return (*slot)->targets;
1685 complete = true;
1687 /* Do actual search. */
1688 timevar_push (TV_IPA_VIRTUAL_CALL);
1689 *slot = XCNEW (polymorphic_call_target_d);
1690 if (cache_token)
1691 *cache_token = (void *)*slot;
1692 (*slot)->type = type;
1693 (*slot)->otr_token = otr_token;
1694 (*slot)->context = context;
1696 inserted = pointer_set_create ();
1697 matched_vtables = pointer_set_create ();
1699 /* First see virtual method of type itself. */
1700 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1701 context.offset, otr_type);
1702 if (binfo)
1703 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1704 &can_refer);
1705 else
1707 gcc_assert (odr_violation_reported);
1708 target = NULL;
1711 /* Destructors are never called through construction virtual tables,
1712 because the type is always known. */
1713 if (target && DECL_CXX_DESTRUCTOR_P (target))
1714 context.maybe_in_construction = false;
1716 if (target)
1718 /* In the case we get complete method, we don't need
1719 to walk derivations. */
1720 if (DECL_FINAL_P (target))
1721 context.maybe_derived_type = false;
1724 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
1725 if (type_possibly_instantiated_p (outer_type->type))
1726 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1727 else
1729 skipped = true;
1730 gcc_assert (in_lto_p || context.maybe_derived_type);
1733 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1735 /* Next walk recursively all derived types. */
1736 if (context.maybe_derived_type)
1738 /* For anonymous namespace types we can attempt to build full type.
1739 All derivations must be in this unit (unless we see partial unit). */
1740 if (!type->all_derivations_known)
1741 complete = false;
1742 for (i = 0; i < outer_type->derived_types.length(); i++)
1743 possible_polymorphic_call_targets_1 (nodes, inserted,
1744 matched_vtables,
1745 otr_type,
1746 outer_type->derived_types[i],
1747 otr_token, outer_type->type,
1748 context.offset, &complete,
1749 bases_to_consider,
1750 context.maybe_in_construction);
1753 /* Finally walk bases, if asked to. */
1754 (*slot)->nonconstruction_targets = nodes.length();
1756 /* Destructors are never called through construction virtual tables,
1757 because the type is always known. One of entries may be cxa_pure_virtual
1758 so look to at least two of them. */
1759 if (context.maybe_in_construction)
1760 for (i =0 ; i < MIN (nodes.length (), 2); i++)
1761 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
1762 context.maybe_in_construction = false;
1763 if (context.maybe_in_construction)
1765 if (type != outer_type
1766 && (!skipped
1767 || (context.maybe_derived_type
1768 && !type_all_derivations_known_p (outer_type->type))))
1769 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1770 context.offset, nodes, inserted,
1771 matched_vtables, &complete);
1772 if (skipped)
1773 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1774 for (i = 0; i < bases_to_consider.length(); i++)
1775 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
1777 bases_to_consider.release();
1779 (*slot)->targets = nodes;
1780 (*slot)->complete = complete;
1781 if (completep)
1782 *completep = complete;
1783 if (nonconstruction_targetsp)
1784 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1786 pointer_set_destroy (inserted);
1787 pointer_set_destroy (matched_vtables);
1788 timevar_pop (TV_IPA_VIRTUAL_CALL);
1789 return nodes;
1792 /* Dump all possible targets of a polymorphic call. */
1794 void
1795 dump_possible_polymorphic_call_targets (FILE *f,
1796 tree otr_type,
1797 HOST_WIDE_INT otr_token,
1798 const ipa_polymorphic_call_context &ctx)
1800 vec <cgraph_node *> targets;
1801 bool final;
1802 odr_type type = get_odr_type (otr_type, false);
1803 unsigned int i;
1804 int nonconstruction;
1806 if (!type)
1807 return;
1808 targets = possible_polymorphic_call_targets (otr_type, otr_token,
1809 ctx,
1810 &final, NULL, &nonconstruction);
1811 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
1812 print_generic_expr (f, type->type, TDF_SLIM);
1813 fprintf (f, " token %i\n", (int)otr_token);
1814 if (ctx.outer_type || ctx.offset)
1816 fprintf (f, " Contained in type:");
1817 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1818 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
1819 ctx.offset);
1822 fprintf (f, " %s%s%s\n ",
1823 final ? "This is a complete list." :
1824 "This is partial list; extra targets may be defined in other units.",
1825 ctx.maybe_in_construction ? " (base types included)" : "",
1826 ctx.maybe_derived_type ? " (derived types included)" : "");
1827 for (i = 0; i < targets.length (); i++)
1829 char *name = NULL;
1830 if (i == (unsigned)nonconstruction)
1831 fprintf (f, "\n If the type is in construction,"
1832 " then additional tarets are:\n"
1833 " ");
1834 if (in_lto_p)
1835 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
1836 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
1837 if (in_lto_p)
1838 free (name);
1839 if (!targets[i]->definition)
1840 fprintf (f, " (no definition%s)",
1841 DECL_DECLARED_INLINE_P (targets[i]->decl)
1842 ? " inline" : "");
1844 fprintf (f, "\n\n");
1848 /* Return true if N can be possibly target of a polymorphic call of
1849 OTR_TYPE/OTR_TOKEN. */
1851 bool
1852 possible_polymorphic_call_target_p (tree otr_type,
1853 HOST_WIDE_INT otr_token,
1854 const ipa_polymorphic_call_context &ctx,
1855 struct cgraph_node *n)
1857 vec <cgraph_node *> targets;
1858 unsigned int i;
1859 enum built_in_function fcode;
1860 bool final;
1862 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1863 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1864 == BUILT_IN_UNREACHABLE
1865 || fcode == BUILT_IN_TRAP))
1866 return true;
1868 if (!odr_hash.is_created ())
1869 return true;
1870 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
1871 for (i = 0; i < targets.length (); i++)
1872 if (symtab_semantically_equivalent_p (n, targets[i]))
1873 return true;
1875 /* At a moment we allow middle end to dig out new external declarations
1876 as a targets of polymorphic calls. */
1877 if (!final && !n->definition)
1878 return true;
1879 return false;
1883 /* After callgraph construction new external nodes may appear.
1884 Add them into the graph. */
1886 void
1887 update_type_inheritance_graph (void)
1889 struct cgraph_node *n;
1891 if (!odr_hash.is_created ())
1892 return;
1893 free_polymorphic_call_targets_hash ();
1894 timevar_push (TV_IPA_INHERITANCE);
1895 /* We reconstruct the graph starting from types of all methods seen in the
1896 the unit. */
1897 FOR_EACH_FUNCTION (n)
1898 if (DECL_VIRTUAL_P (n->decl)
1899 && !n->definition
1900 && symtab_real_symbol_p (n))
1901 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
1902 timevar_pop (TV_IPA_INHERITANCE);
1906 /* Return true if N looks like likely target of a polymorphic call.
1907 Rule out cxa_pure_virtual, noreturns, function declared cold and
1908 other obvious cases. */
1910 bool
1911 likely_target_p (struct cgraph_node *n)
1913 int flags;
1914 /* cxa_pure_virtual and similar things are not likely. */
1915 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
1916 return false;
1917 flags = flags_from_decl_or_type (n->decl);
1918 if (flags & ECF_NORETURN)
1919 return false;
1920 if (lookup_attribute ("cold",
1921 DECL_ATTRIBUTES (n->decl)))
1922 return false;
1923 if (n->frequency < NODE_FREQUENCY_NORMAL)
1924 return false;
1925 /* If there are no virtual tables refering the target alive,
1926 the only way the target can be called is an instance comming from other
1927 compilation unit; speculative devirtualization is build around an
1928 assumption that won't happen. */
1929 if (!referenced_from_vtable_p (n))
1930 return false;
1931 return true;
1934 /* The ipa-devirt pass.
1935 When polymorphic call has only one likely target in the unit,
1936 turn it into speculative call. */
1938 static unsigned int
1939 ipa_devirt (void)
1941 struct cgraph_node *n;
1942 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1943 struct cgraph_edge *e;
1945 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1946 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
1947 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
1949 FOR_EACH_DEFINED_FUNCTION (n)
1951 bool update = false;
1952 if (dump_file && n->indirect_calls)
1953 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
1954 n->name (), n->order);
1955 for (e = n->indirect_calls; e; e = e->next_callee)
1956 if (e->indirect_info->polymorphic)
1958 struct cgraph_node *likely_target = NULL;
1959 void *cache_token;
1960 bool final;
1961 int nonconstruction_targets;
1962 vec <cgraph_node *>targets
1963 = possible_polymorphic_call_targets
1964 (e, &final, &cache_token, &nonconstruction_targets);
1965 unsigned int i;
1967 if (dump_file)
1968 dump_possible_polymorphic_call_targets
1969 (dump_file, e);
1971 npolymorphic++;
1973 if (!cgraph_maybe_hot_edge_p (e))
1975 if (dump_file)
1976 fprintf (dump_file, "Call is cold\n\n");
1977 ncold++;
1978 continue;
1980 if (e->speculative)
1982 if (dump_file)
1983 fprintf (dump_file, "Call is aready speculated\n\n");
1984 nspeculated++;
1986 /* When dumping see if we agree with speculation. */
1987 if (!dump_file)
1988 continue;
1990 if (pointer_set_contains (bad_call_targets,
1991 cache_token))
1993 if (dump_file)
1994 fprintf (dump_file, "Target list is known to be useless\n\n");
1995 nmultiple++;
1996 continue;
1998 for (i = 0; i < targets.length (); i++)
1999 if (likely_target_p (targets[i]))
2001 if (likely_target)
2003 if (i < (unsigned) nonconstruction_targets)
2005 likely_target = NULL;
2006 if (dump_file)
2007 fprintf (dump_file, "More than one likely target\n\n");
2008 nmultiple++;
2010 break;
2012 likely_target = targets[i];
2014 if (!likely_target)
2016 pointer_set_insert (bad_call_targets, cache_token);
2017 continue;
2019 /* This is reached only when dumping; check if we agree or disagree
2020 with the speculation. */
2021 if (e->speculative)
2023 struct cgraph_edge *e2;
2024 struct ipa_ref *ref;
2025 cgraph_speculative_call_info (e, e2, e, ref);
2026 if (cgraph_function_or_thunk_node (e2->callee, NULL)
2027 == cgraph_function_or_thunk_node (likely_target, NULL))
2029 fprintf (dump_file, "We agree with speculation\n\n");
2030 nok++;
2032 else
2034 fprintf (dump_file, "We disagree with speculation\n\n");
2035 nwrong++;
2037 continue;
2039 if (!likely_target->definition)
2041 if (dump_file)
2042 fprintf (dump_file, "Target is not an definition\n\n");
2043 nnotdefined++;
2044 continue;
2046 /* Do not introduce new references to external symbols. While we
2047 can handle these just well, it is common for programs to
2048 incorrectly with headers defining methods they are linked
2049 with. */
2050 if (DECL_EXTERNAL (likely_target->decl))
2052 if (dump_file)
2053 fprintf (dump_file, "Target is external\n\n");
2054 nexternal++;
2055 continue;
2057 /* Don't use an implicitly-declared destructor (c++/58678). */
2058 struct cgraph_node *non_thunk_target
2059 = cgraph_function_node (likely_target);
2060 if (DECL_ARTIFICIAL (non_thunk_target->decl)
2061 && DECL_COMDAT (non_thunk_target->decl))
2063 if (dump_file)
2064 fprintf (dump_file, "Target is artificial\n\n");
2065 nartificial++;
2066 continue;
2068 if (cgraph_function_body_availability (likely_target)
2069 <= AVAIL_OVERWRITABLE
2070 && symtab_can_be_discarded (likely_target))
2072 if (dump_file)
2073 fprintf (dump_file, "Target is overwritable\n\n");
2074 noverwritable++;
2075 continue;
2077 else if (dbg_cnt (devirt))
2079 if (dump_enabled_p ())
2081 location_t locus = gimple_location (e->call_stmt);
2082 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2083 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2084 n->name (), n->order,
2085 likely_target->name (),
2086 likely_target->order);
2088 if (!symtab_can_be_discarded (likely_target))
2090 cgraph_node *alias;
2091 alias = cgraph (symtab_nonoverwritable_alias
2092 (likely_target));
2093 if (alias)
2094 likely_target = alias;
2096 nconverted++;
2097 update = true;
2098 cgraph_turn_edge_to_speculative
2099 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2102 if (update)
2103 inline_update_overall_summary (n);
2105 pointer_set_destroy (bad_call_targets);
2107 if (dump_file)
2108 fprintf (dump_file,
2109 "%i polymorphic calls, %i devirtualized,"
2110 " %i speculatively devirtualized, %i cold\n"
2111 "%i have multiple targets, %i overwritable,"
2112 " %i already speculated (%i agree, %i disagree),"
2113 " %i external, %i not defined, %i artificial\n",
2114 npolymorphic, ndevirtualized, nconverted, ncold,
2115 nmultiple, noverwritable, nspeculated, nok, nwrong,
2116 nexternal, nnotdefined, nartificial);
2117 return ndevirtualized ? TODO_remove_functions : 0;
2120 namespace {
2122 const pass_data pass_data_ipa_devirt =
2124 IPA_PASS, /* type */
2125 "devirt", /* name */
2126 OPTGROUP_NONE, /* optinfo_flags */
2127 true, /* has_execute */
2128 TV_IPA_DEVIRT, /* tv_id */
2129 0, /* properties_required */
2130 0, /* properties_provided */
2131 0, /* properties_destroyed */
2132 0, /* todo_flags_start */
2133 ( TODO_dump_symtab ), /* todo_flags_finish */
2136 class pass_ipa_devirt : public ipa_opt_pass_d
2138 public:
2139 pass_ipa_devirt (gcc::context *ctxt)
2140 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2141 NULL, /* generate_summary */
2142 NULL, /* write_summary */
2143 NULL, /* read_summary */
2144 NULL, /* write_optimization_summary */
2145 NULL, /* read_optimization_summary */
2146 NULL, /* stmt_fixup */
2147 0, /* function_transform_todo_flags_start */
2148 NULL, /* function_transform */
2149 NULL) /* variable_transform */
2152 /* opt_pass methods: */
2153 virtual bool gate (function *)
2155 return (flag_devirtualize
2156 && flag_devirtualize_speculatively
2157 && optimize);
2160 virtual unsigned int execute (function *) { return ipa_devirt (); }
2162 }; // class pass_ipa_devirt
2164 } // anon namespace
2166 ipa_opt_pass_d *
2167 make_pass_ipa_devirt (gcc::context *ctxt)
2169 return new pass_ipa_devirt (ctxt);
2172 #include "gt-ipa-devirt.h"