Merge from trunk: 215733-215743
[official-gcc.git] / gcc-4_9 / gcc / ipa-devirt.c
blobbb4b4168e3f4a5f5987ee3b4999237119c4125e7
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
82 What we do here:
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
127 #include "gimple.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
133 static bool odr_violation_reported = false;
135 /* Dummy polymorphic call context. */
137 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
138 = {0, NULL, false, true};
140 /* Pointer set of all call targets appearing in the cache. */
141 static pointer_set_t *cached_polymorphic_call_targets;
143 /* The node of type inheritance graph. For each type unique in
144 One Defintion Rule (ODR) sense, we produce one node linking all
145 main variants of types equivalent to it, bases and derived types. */
147 struct GTY(()) odr_type_d
149 /* leader type. */
150 tree type;
151 /* All bases. */
152 vec<odr_type> GTY((skip)) bases;
153 /* All derrived types with virtual methods seen in unit. */
154 vec<odr_type> GTY((skip)) derived_types;
156 /* All equivalent types, if more than one. */
157 vec<tree, va_gc> *types;
158 /* Set of all equivalent types, if NON-NULL. */
159 pointer_set_t * GTY((skip)) types_set;
161 /* Unique ID indexing the type in odr_types array. */
162 int id;
163 /* Is it in anonymous namespace? */
164 bool anonymous_namespace;
168 /* Return true if BINFO corresponds to a type with virtual methods.
170 Every type has several BINFOs. One is the BINFO associated by the type
171 while other represents bases of derived types. The BINFOs representing
172 bases do not have BINFO_VTABLE pointer set when this is the single
173 inheritance (because vtables are shared). Look up the BINFO of type
174 and check presence of its vtable. */
176 static inline bool
177 polymorphic_type_binfo_p (tree binfo)
179 /* See if BINFO's type has an virtual table associtated with it. */
180 tree type_binfo = TYPE_BINFO (BINFO_TYPE (binfo));
181 if (L_IPO_COMP_MODE && !type_binfo)
182 return false;
183 return BINFO_VTABLE (type_binfo);
186 /* One Definition Rule hashtable helpers. */
188 struct odr_hasher
190 typedef odr_type_d value_type;
191 typedef union tree_node compare_type;
192 static inline hashval_t hash (const value_type *);
193 static inline bool equal (const value_type *, const compare_type *);
194 static inline void remove (value_type *);
197 /* Produce hash based on type name. */
199 hashval_t
200 hash_type_name (tree t)
202 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
204 /* If not in LTO, all main variants are unique, so we can do
205 pointer hash. */
206 if (!in_lto_p)
207 return htab_hash_pointer (t);
209 /* Anonymous types are unique. */
210 if (type_in_anonymous_namespace_p (t))
211 return htab_hash_pointer (t);
213 /* For polymorphic types, we can simply hash the virtual table. */
214 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
216 tree v = BINFO_VTABLE (TYPE_BINFO (t));
217 hashval_t hash = 0;
219 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
221 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
222 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
225 v = DECL_ASSEMBLER_NAME (v);
226 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
227 return hash;
230 /* Rest is not implemented yet. */
231 gcc_unreachable ();
234 /* Return the computed hashcode for ODR_TYPE. */
236 inline hashval_t
237 odr_hasher::hash (const value_type *odr_type)
239 return hash_type_name (odr_type->type);
242 /* Compare types T1 and T2 and return true if they are
243 equivalent. */
245 inline bool
246 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
248 tree t2 = const_cast <tree> (ct2);
250 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
251 if (t1->type == t2)
252 return true;
253 if (!in_lto_p)
254 return false;
255 return types_same_for_odr (t1->type, t2);
258 /* Free ODR type V. */
260 inline void
261 odr_hasher::remove (value_type *v)
263 v->bases.release ();
264 v->derived_types.release ();
265 if (v->types_set)
266 pointer_set_destroy (v->types_set);
267 ggc_free (v);
270 /* ODR type hash used to lookup ODR type based on tree type node. */
272 typedef hash_table <odr_hasher> odr_hash_type;
273 static odr_hash_type odr_hash;
275 /* ODR types are also stored into ODR_TYPE vector to allow consistent
276 walking. Bases appear before derived types. Vector is garbage collected
277 so we won't end up visiting empty types. */
279 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
280 #define odr_types (*odr_types_ptr)
282 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
283 from VAL->type. This may happen in LTO where tree merging did not merge
284 all variants of the same type. It may or may not mean the ODR violation.
285 Add it to the list of duplicates and warn on some violations. */
287 static void
288 add_type_duplicate (odr_type val, tree type)
290 if (!val->types_set)
291 val->types_set = pointer_set_create ();
293 /* See if this duplicate is new. */
294 if (!pointer_set_insert (val->types_set, type))
296 bool merge = true;
297 bool base_mismatch = false;
298 gcc_assert (in_lto_p);
299 vec_safe_push (val->types, type);
300 unsigned int i,j;
302 /* First we compare memory layout. */
303 if (!types_compatible_p (val->type, type))
305 merge = false;
306 odr_violation_reported = true;
307 if (BINFO_VTABLE (TYPE_BINFO (val->type))
308 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
309 "type %qD violates one definition rule ",
310 type))
311 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
312 "a type with the same name but different layout is "
313 "defined in another translation unit");
314 if (cgraph_dump_file)
316 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
318 print_node (cgraph_dump_file, "", val->type, 0);
319 putc ('\n',cgraph_dump_file);
320 print_node (cgraph_dump_file, "", type, 0);
321 putc ('\n',cgraph_dump_file);
325 /* Next sanity check that bases are the same. If not, we will end
326 up producing wrong answers. */
327 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
328 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
330 odr_type base = get_odr_type
331 (BINFO_TYPE
332 (BINFO_BASE_BINFO (TYPE_BINFO (type),
333 i)),
334 true);
335 if (val->bases.length () <= j || val->bases[j] != base)
336 base_mismatch = true;
337 j++;
339 if (base_mismatch)
341 merge = false;
342 odr_violation_reported = true;
344 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
345 "type %qD violates one definition rule ",
346 type))
347 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
348 "a type with the same name but different bases is "
349 "defined in another translation unit");
350 if (cgraph_dump_file)
352 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
354 print_node (cgraph_dump_file, "", val->type, 0);
355 putc ('\n',cgraph_dump_file);
356 print_node (cgraph_dump_file, "", type, 0);
357 putc ('\n',cgraph_dump_file);
361 /* Regularize things a little. During LTO same types may come with
362 different BINFOs. Either because their virtual table was
363 not merged by tree merging and only later at decl merging or
364 because one type comes with external vtable, while other
365 with internal. We want to merge equivalent binfos to conserve
366 memory and streaming overhead.
368 The external vtables are more harmful: they contain references
369 to external declarations of methods that may be defined in the
370 merged LTO unit. For this reason we absolutely need to remove
371 them and replace by internal variants. Not doing so will lead
372 to incomplete answers from possible_polymorphic_call_targets. */
373 if (!flag_ltrans && merge)
375 tree master_binfo = TYPE_BINFO (val->type);
376 tree v1 = BINFO_VTABLE (master_binfo);
377 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
379 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
381 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
382 && operand_equal_p (TREE_OPERAND (v1, 1),
383 TREE_OPERAND (v2, 1), 0));
384 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
385 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
387 gcc_assert (DECL_ASSEMBLER_NAME (v1)
388 == DECL_ASSEMBLER_NAME (v2));
390 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
392 unsigned int i;
394 TYPE_BINFO (val->type) = TYPE_BINFO (type);
395 for (i = 0; i < val->types->length (); i++)
397 if (TYPE_BINFO ((*val->types)[i])
398 == master_binfo)
399 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
402 else
403 TYPE_BINFO (type) = master_binfo;
408 /* Get ODR type hash entry for TYPE. If INSERT is true, create
409 possibly new entry. */
411 odr_type
412 get_odr_type (tree type, bool insert)
414 odr_type_d **slot;
415 odr_type val;
416 hashval_t hash;
418 type = TYPE_MAIN_VARIANT (type);
419 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
420 hash = hash_type_name (type);
421 slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
422 if (!slot)
423 return NULL;
425 /* See if we already have entry for type. */
426 if (*slot)
428 val = *slot;
430 /* With LTO we need to support multiple tree representation of
431 the same ODR type. */
432 if (val->type != type)
433 add_type_duplicate (val, type);
435 else
437 tree binfo = TYPE_BINFO (type);
438 unsigned int i;
440 val = ggc_alloc_cleared_odr_type_d ();
441 val->type = type;
442 val->bases = vNULL;
443 val->derived_types = vNULL;
444 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
445 *slot = val;
446 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
447 /* For now record only polymorphic types. other are
448 pointless for devirtualization and we can not precisely
449 determine ODR equivalency of these during LTO. */
450 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
452 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
453 i)),
454 true);
455 base->derived_types.safe_push (val);
456 val->bases.safe_push (base);
458 /* First record bases, then add into array so ids are increasing. */
459 if (odr_types_ptr)
460 val->id = odr_types.length ();
461 vec_safe_push (odr_types_ptr, val);
463 return val;
466 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
467 recusive printing. */
469 static void
470 dump_odr_type (FILE *f, odr_type t, int indent=0)
472 unsigned int i;
473 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
474 print_generic_expr (f, t->type, TDF_SLIM);
475 fprintf (f, "%s\n", t->anonymous_namespace ? " (anonymous namespace)":"");
476 if (TYPE_NAME (t->type))
478 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
479 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
480 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
482 if (t->bases.length ())
484 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
485 for (i = 0; i < t->bases.length (); i++)
486 fprintf (f, " %i", t->bases[i]->id);
487 fprintf (f, "\n");
489 if (t->derived_types.length ())
491 fprintf (f, "%*s derived types:\n", indent * 2, "");
492 for (i = 0; i < t->derived_types.length (); i++)
493 dump_odr_type (f, t->derived_types[i], indent + 1);
495 fprintf (f, "\n");
498 /* Dump the type inheritance graph. */
500 static void
501 dump_type_inheritance_graph (FILE *f)
503 unsigned int i;
504 if (!odr_types_ptr)
505 return;
506 fprintf (f, "\n\nType inheritance graph:\n");
507 for (i = 0; i < odr_types.length (); i++)
509 if (odr_types[i]->bases.length () == 0)
510 dump_odr_type (f, odr_types[i]);
512 for (i = 0; i < odr_types.length (); i++)
514 if (odr_types[i]->types && odr_types[i]->types->length ())
516 unsigned int j;
517 fprintf (f, "Duplicate tree types for odr type %i\n", i);
518 print_node (f, "", odr_types[i]->type, 0);
519 for (j = 0; j < odr_types[i]->types->length (); j++)
521 tree t;
522 fprintf (f, "duplicate #%i\n", j);
523 print_node (f, "", (*odr_types[i]->types)[j], 0);
524 t = (*odr_types[i]->types)[j];
525 while (TYPE_P (t) && TYPE_CONTEXT (t))
527 t = TYPE_CONTEXT (t);
528 print_node (f, "", t, 0);
530 putc ('\n',f);
536 /* Given method type T, return type of class it belongs to.
537 Lookup this pointer and get its type. */
539 tree
540 method_class_type (const_tree t)
542 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
543 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
545 return TREE_TYPE (first_parm_type);
548 /* Initialize IPA devirt and build inheritance tree graph. */
550 void
551 build_type_inheritance_graph (void)
553 struct symtab_node *n;
554 FILE *inheritance_dump_file;
555 int flags;
557 if (odr_hash.is_created ())
558 return;
559 timevar_push (TV_IPA_INHERITANCE);
560 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
561 odr_hash.create (23);
563 /* We reconstruct the graph starting of types of all methods seen in the
564 the unit. */
565 FOR_EACH_SYMBOL (n)
566 if (is_a <cgraph_node> (n)
567 && DECL_VIRTUAL_P (n->decl)
568 && symtab_real_symbol_p (n))
569 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
571 /* Look also for virtual tables of types that do not define any methods.
573 We need it in a case where class B has virtual base of class A
574 re-defining its virtual method and there is class C with no virtual
575 methods with B as virtual base.
577 Here we output B's virtual method in two variant - for non-virtual
578 and virtual inheritance. B's virtual table has non-virtual version,
579 while C's has virtual.
581 For this reason we need to know about C in order to include both
582 variants of B. More correctly, record_target_from_binfo should
583 add both variants of the method when walking B, but we have no
584 link in between them.
586 We rely on fact that either the method is exported and thus we
587 assume it is called externally or C is in anonymous namespace and
588 thus we will see the vtable. */
590 else if (is_a <varpool_node> (n)
591 && DECL_VIRTUAL_P (n->decl)
592 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
593 && TYPE_BINFO (DECL_CONTEXT (n->decl))
594 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
595 get_odr_type (DECL_CONTEXT (n->decl), true);
596 if (inheritance_dump_file)
598 dump_type_inheritance_graph (inheritance_dump_file);
599 dump_end (TDI_inheritance, inheritance_dump_file);
601 timevar_pop (TV_IPA_INHERITANCE);
604 /* If TARGET has associated node, record it in the NODES array.
605 CAN_REFER specify if program can refer to the target directly.
606 if TARGET is unknown (NULL) or it can not be inserted (for example because
607 its body was already removed and there is no way to refer to it), clear
608 COMPLETEP. */
610 static void
611 maybe_record_node (vec <cgraph_node *> &nodes,
612 tree target, pointer_set_t *inserted,
613 bool can_refer,
614 bool *completep)
616 struct cgraph_node *target_node;
617 enum built_in_function fcode;
619 if (!can_refer)
621 /* The only case when method of anonymous namespace becomes unreferable
622 is when we completely optimized it out. */
623 if (flag_ltrans
624 || !target
625 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
626 *completep = false;
627 return;
630 if (!target
631 /* Those are used to mark impossible scenarios. */
632 || (fcode = DECL_FUNCTION_CODE (target))
633 == BUILT_IN_UNREACHABLE
634 || fcode == BUILT_IN_TRAP)
635 return;
637 target_node = cgraph_get_node (target);
639 if (target_node != NULL
640 && (TREE_PUBLIC (target)
641 || target_node->definition)
642 && symtab_real_symbol_p (target_node))
644 gcc_assert (!target_node->global.inlined_to);
645 gcc_assert (symtab_real_symbol_p (target_node));
646 if (!pointer_set_insert (inserted, target))
648 pointer_set_insert (cached_polymorphic_call_targets,
649 target_node);
650 nodes.safe_push (target_node);
653 else if (completep
654 && !type_in_anonymous_namespace_p
655 (method_class_type (TREE_TYPE (target))))
656 *completep = false;
659 /* See if BINFO's type match OUTER_TYPE. If so, lookup
660 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
661 method in vtable and insert method to NODES array.
662 Otherwise recurse to base BINFOs.
663 This match what get_binfo_at_offset does, but with offset
664 being unknown.
666 TYPE_BINFOS is a stack of BINFOS of types with defined
667 virtual table seen on way from class type to BINFO.
669 MATCHED_VTABLES tracks virtual tables we already did lookup
670 for virtual function in. INSERTED tracks nodes we already
671 inserted.
673 ANONYMOUS is true if BINFO is part of anonymous namespace.
675 Clear COMPLETEP when we hit unreferable target.
678 static void
679 record_target_from_binfo (vec <cgraph_node *> &nodes,
680 tree binfo,
681 tree otr_type,
682 vec <tree> &type_binfos,
683 HOST_WIDE_INT otr_token,
684 tree outer_type,
685 HOST_WIDE_INT offset,
686 pointer_set_t *inserted,
687 pointer_set_t *matched_vtables,
688 bool anonymous,
689 bool *completep)
691 tree type = BINFO_TYPE (binfo);
692 int i;
693 tree base_binfo;
696 if (BINFO_VTABLE (binfo))
697 type_binfos.safe_push (binfo);
698 if (types_same_for_odr (type, outer_type))
700 int i;
701 tree type_binfo = NULL;
703 /* Lookup BINFO with virtual table. For normal types it is always last
704 binfo on stack. */
705 for (i = type_binfos.length () - 1; i >= 0; i--)
706 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
708 type_binfo = type_binfos[i];
709 break;
711 if (BINFO_VTABLE (binfo))
712 type_binfos.pop ();
713 /* If this is duplicated BINFO for base shared by virtual inheritance,
714 we may not have its associated vtable. This is not a problem, since
715 we will walk it on the other path. */
716 if (!type_binfo)
717 return;
718 tree inner_binfo = get_binfo_at_offset (type_binfo,
719 offset, otr_type);
720 if (!inner_binfo)
722 gcc_assert (odr_violation_reported);
723 return;
725 /* For types in anonymous namespace first check if the respective vtable
726 is alive. If not, we know the type can't be called. */
727 if (!flag_ltrans && anonymous)
729 tree vtable = BINFO_VTABLE (inner_binfo);
730 varpool_node *vnode;
732 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
733 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
734 vnode = varpool_get_node (vtable);
735 if (!vnode || !vnode->definition)
736 return;
738 gcc_assert (inner_binfo);
739 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
741 bool can_refer;
742 tree target = gimple_get_virt_method_for_binfo (otr_token,
743 inner_binfo,
744 &can_refer);
745 maybe_record_node (nodes, target, inserted, can_refer, completep);
747 return;
750 /* Walk bases. */
751 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
752 /* Walking bases that have no virtual method is pointless excercise. */
753 if (polymorphic_type_binfo_p (base_binfo))
754 record_target_from_binfo (nodes, base_binfo, otr_type,
755 type_binfos,
756 otr_token, outer_type, offset, inserted,
757 matched_vtables, anonymous, completep);
758 if (BINFO_VTABLE (binfo))
759 type_binfos.pop ();
762 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
763 of TYPE, insert them to NODES, recurse into derived nodes.
764 INSERTED is used to avoid duplicate insertions of methods into NODES.
765 MATCHED_VTABLES are used to avoid duplicate walking vtables.
766 Clear COMPLETEP if unreferable target is found. */
768 static void
769 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
770 pointer_set_t *inserted,
771 pointer_set_t *matched_vtables,
772 tree otr_type,
773 odr_type type,
774 HOST_WIDE_INT otr_token,
775 tree outer_type,
776 HOST_WIDE_INT offset,
777 bool *completep)
779 tree binfo = TYPE_BINFO (type->type);
780 unsigned int i;
781 vec <tree> type_binfos = vNULL;
783 record_target_from_binfo (nodes, binfo, otr_type, type_binfos, otr_token,
784 outer_type, offset,
785 inserted, matched_vtables,
786 type->anonymous_namespace, completep);
787 type_binfos.release ();
788 for (i = 0; i < type->derived_types.length (); i++)
789 possible_polymorphic_call_targets_1 (nodes, inserted,
790 matched_vtables,
791 otr_type,
792 type->derived_types[i],
793 otr_token, outer_type, offset, completep);
796 /* Cache of queries for polymorphic call targets.
798 Enumerating all call targets may get expensive when there are many
799 polymorphic calls in the program, so we memoize all the previous
800 queries and avoid duplicated work. */
802 struct polymorphic_call_target_d
804 HOST_WIDE_INT otr_token;
805 ipa_polymorphic_call_context context;
806 odr_type type;
807 vec <cgraph_node *> targets;
808 int nonconstruction_targets;
809 bool complete;
812 /* Polymorphic call target cache helpers. */
814 struct polymorphic_call_target_hasher
816 typedef polymorphic_call_target_d value_type;
817 typedef polymorphic_call_target_d compare_type;
818 static inline hashval_t hash (const value_type *);
819 static inline bool equal (const value_type *, const compare_type *);
820 static inline void remove (value_type *);
823 /* Return the computed hashcode for ODR_QUERY. */
825 inline hashval_t
826 polymorphic_call_target_hasher::hash (const value_type *odr_query)
828 hashval_t hash;
830 hash = iterative_hash_host_wide_int
831 (odr_query->otr_token,
832 odr_query->type->id);
833 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
834 hash);
835 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
836 return iterative_hash_hashval_t
837 (((int)odr_query->context.maybe_in_construction << 1)
838 | (int)odr_query->context.maybe_derived_type, hash);
841 /* Compare cache entries T1 and T2. */
843 inline bool
844 polymorphic_call_target_hasher::equal (const value_type *t1,
845 const compare_type *t2)
847 return (t1->type == t2->type && t1->otr_token == t2->otr_token
848 && t1->context.offset == t2->context.offset
849 && t1->context.outer_type == t2->context.outer_type
850 && t1->context.maybe_in_construction
851 == t2->context.maybe_in_construction
852 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
855 /* Remove entry in polymorphic call target cache hash. */
857 inline void
858 polymorphic_call_target_hasher::remove (value_type *v)
860 v->targets.release ();
861 free (v);
864 /* Polymorphic call target query cache. */
866 typedef hash_table <polymorphic_call_target_hasher>
867 polymorphic_call_target_hash_type;
868 static polymorphic_call_target_hash_type polymorphic_call_target_hash;
870 /* Destroy polymorphic call target query cache. */
872 static void
873 free_polymorphic_call_targets_hash ()
875 if (cached_polymorphic_call_targets)
877 polymorphic_call_target_hash.dispose ();
878 pointer_set_destroy (cached_polymorphic_call_targets);
879 cached_polymorphic_call_targets = NULL;
883 /* When virtual function is removed, we may need to flush the cache. */
885 static void
886 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
888 if (cached_polymorphic_call_targets
889 && pointer_set_contains (cached_polymorphic_call_targets, n))
890 free_polymorphic_call_targets_hash ();
893 /* Return true when TYPE contains an polymorphic type and thus is interesting
894 for devirtualization machinery. */
896 bool
897 contains_polymorphic_type_p (const_tree type)
899 type = TYPE_MAIN_VARIANT (type);
901 if (RECORD_OR_UNION_TYPE_P (type))
903 if (TYPE_BINFO (type)
904 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
905 return true;
906 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
907 if (TREE_CODE (fld) == FIELD_DECL
908 && !DECL_ARTIFICIAL (fld)
909 && contains_polymorphic_type_p (TREE_TYPE (fld)))
910 return true;
911 return false;
913 if (TREE_CODE (type) == ARRAY_TYPE)
914 return contains_polymorphic_type_p (TREE_TYPE (type));
915 return false;
918 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
919 is contained at CONTEXT->OFFSET. Walk the memory representation of
920 CONTEXT->OUTER_TYPE and find the outermost class type that match
921 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
922 to represent it.
924 For example when CONTEXT represents type
925 class A
927 int a;
928 class B b;
930 and we look for type at offset sizeof(int), we end up with B and offset 0.
931 If the same is produced by multiple inheritance, we end up with A and offset
932 sizeof(int).
934 If we can not find corresponding class, give up by setting
935 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
936 Return true when lookup was sucesful. */
938 static bool
939 get_class_context (ipa_polymorphic_call_context *context,
940 tree expected_type)
942 tree type = context->outer_type;
943 HOST_WIDE_INT offset = context->offset;
945 /* Find the sub-object the constant actually refers to and mark whether it is
946 an artificial one (as opposed to a user-defined one). */
947 while (true)
949 HOST_WIDE_INT pos, size;
950 tree fld;
952 /* On a match, just return what we found. */
953 if (TREE_CODE (type) == TREE_CODE (expected_type)
954 && types_same_for_odr (type, expected_type))
956 /* Type can not contain itself on an non-zero offset. In that case
957 just give up. */
958 if (offset != 0)
959 goto give_up;
960 gcc_assert (offset == 0);
961 return true;
964 /* Walk fields and find corresponding on at OFFSET. */
965 if (TREE_CODE (type) == RECORD_TYPE)
967 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
969 if (TREE_CODE (fld) != FIELD_DECL)
970 continue;
972 pos = int_bit_position (fld);
973 size = tree_to_uhwi (DECL_SIZE (fld));
974 if (pos <= offset && (pos + size) > offset)
975 break;
978 if (!fld)
979 goto give_up;
981 type = TREE_TYPE (fld);
982 offset -= pos;
983 /* DECL_ARTIFICIAL represents a basetype. */
984 if (!DECL_ARTIFICIAL (fld))
986 context->outer_type = type;
987 context->offset = offset;
988 /* As soon as we se an field containing the type,
989 we know we are not looking for derivations. */
990 context->maybe_derived_type = false;
993 else if (TREE_CODE (type) == ARRAY_TYPE)
995 tree subtype = TREE_TYPE (type);
997 /* Give up if we don't know array size. */
998 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
999 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1000 goto give_up;
1001 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1002 type = subtype;
1003 context->outer_type = type;
1004 context->offset = offset;
1005 context->maybe_derived_type = false;
1007 /* Give up on anything else. */
1008 else
1009 goto give_up;
1012 /* If we failed to find subtype we look for, give up and fall back to the
1013 most generic query. */
1014 give_up:
1015 context->outer_type = expected_type;
1016 context->offset = 0;
1017 context->maybe_derived_type = true;
1018 context->maybe_in_construction = true;
1019 /* POD can be changed to an instance of a polymorphic type by
1020 placement new. Here we play safe and assume that any
1021 non-polymorphic type is POD. */
1022 if ((TREE_CODE (type) != RECORD_TYPE
1023 || !TYPE_BINFO (type)
1024 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1025 && (!TYPE_SIZE (type)
1026 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1027 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1028 tree_to_uhwi (TYPE_SIZE (type)))))
1029 return true;
1030 return false;
1033 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1035 static bool
1036 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1037 tree otr_type)
1039 ipa_polymorphic_call_context context = {offset, outer_type,
1040 false, true};
1041 return get_class_context (&context, otr_type);
1044 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1046 static tree
1047 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1048 tree vtable)
1050 tree v = BINFO_VTABLE (binfo);
1051 int i;
1052 tree base_binfo;
1053 unsigned HOST_WIDE_INT this_offset;
1055 if (v)
1057 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1058 gcc_unreachable ();
1060 if (offset == this_offset
1061 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1062 return binfo;
1065 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1066 if (polymorphic_type_binfo_p (base_binfo))
1068 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1069 if (base_binfo)
1070 return base_binfo;
1072 return NULL;
1075 /* T is known constant value of virtual table pointer.
1076 Store virtual table to V and its offset to OFFSET.
1077 Return false if T does not look like virtual table reference. */
1079 bool
1080 vtable_pointer_value_to_vtable (const_tree t, tree *v,
1081 unsigned HOST_WIDE_INT *offset)
1083 /* We expect &MEM[(void *)&virtual_table + 16B].
1084 We obtain object's BINFO from the context of the virtual table.
1085 This one contains pointer to virtual table represented via
1086 POINTER_PLUS_EXPR. Verify that this pointer match to what
1087 we propagated through.
1089 In the case of virtual inheritance, the virtual tables may
1090 be nested, i.e. the offset may be different from 16 and we may
1091 need to dive into the type representation. */
1092 if (TREE_CODE (t) == ADDR_EXPR
1093 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1094 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1095 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1096 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1097 == VAR_DECL)
1098 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1099 (TREE_OPERAND (t, 0), 0), 0)))
1101 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1102 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1103 return true;
1106 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1107 We need to handle it when T comes from static variable initializer or
1108 BINFO. */
1109 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1111 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1112 t = TREE_OPERAND (t, 0);
1114 else
1115 *offset = 0;
1117 if (TREE_CODE (t) != ADDR_EXPR)
1118 return false;
1119 *v = TREE_OPERAND (t, 0);
1120 return true;
1123 /* T is known constant value of virtual table pointer. Return BINFO of the
1124 instance type. */
1126 tree
1127 vtable_pointer_value_to_binfo (const_tree t)
1129 tree vtable;
1130 unsigned HOST_WIDE_INT offset;
1132 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1133 return NULL_TREE;
1135 /* FIXME: for stores of construction vtables we return NULL,
1136 because we do not have BINFO for those. Eventually we should fix
1137 our representation to allow this case to be handled, too.
1138 In the case we see store of BINFO we however may assume
1139 that standard folding will be ale to cope with it. */
1140 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1141 offset, vtable);
1144 /* Proudce polymorphic call context for call method of instance
1145 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1147 static void
1148 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1149 tree base, HOST_WIDE_INT offset)
1151 gcc_assert (DECL_P (base));
1153 context->outer_type = TREE_TYPE (base);
1154 context->offset = offset;
1155 /* Make very conservative assumption that all objects
1156 may be in construction.
1157 TODO: ipa-prop already contains code to tell better.
1158 merge it later. */
1159 context->maybe_in_construction = true;
1160 context->maybe_derived_type = false;
1163 /* CST is an invariant (address of decl), try to get meaningful
1164 polymorphic call context for polymorphic call of method
1165 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1166 Return FALSE if nothing meaningful can be found. */
1168 bool
1169 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1170 tree cst,
1171 tree otr_type,
1172 HOST_WIDE_INT offset)
1174 HOST_WIDE_INT offset2, size, max_size;
1175 tree base;
1177 if (TREE_CODE (cst) != ADDR_EXPR)
1178 return false;
1180 cst = TREE_OPERAND (cst, 0);
1181 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1182 if (!DECL_P (base) || max_size == -1 || max_size != size)
1183 return false;
1185 /* Only type inconsistent programs can have otr_type that is
1186 not part of outer type. */
1187 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1188 return false;
1190 get_polymorphic_call_info_for_decl (context, base, offset);
1191 return true;
1194 /* Given REF call in FNDECL, determine class of the polymorphic
1195 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1196 Return pointer to object described by the context */
1198 tree
1199 get_polymorphic_call_info (tree fndecl,
1200 tree ref,
1201 tree *otr_type,
1202 HOST_WIDE_INT *otr_token,
1203 ipa_polymorphic_call_context *context)
1205 tree base_pointer;
1206 *otr_type = obj_type_ref_class (ref);
1207 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1209 /* Set up basic info in case we find nothing interesting in the analysis. */
1210 context->outer_type = *otr_type;
1211 context->offset = 0;
1212 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1213 context->maybe_derived_type = true;
1214 context->maybe_in_construction = false;
1216 /* Walk SSA for outer object. */
1219 if (TREE_CODE (base_pointer) == SSA_NAME
1220 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1221 && SSA_NAME_DEF_STMT (base_pointer)
1222 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1224 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1225 STRIP_NOPS (base_pointer);
1227 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1229 HOST_WIDE_INT size, max_size;
1230 HOST_WIDE_INT offset2;
1231 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1232 &offset2, &size, &max_size);
1234 /* If this is a varying address, punt. */
1235 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1236 && max_size != -1
1237 && max_size == size)
1239 /* We found dereference of a pointer. Type of the pointer
1240 and MEM_REF is meaningless, but we can look futher. */
1241 if (TREE_CODE (base) == MEM_REF)
1243 base_pointer = TREE_OPERAND (base, 0);
1244 context->offset
1245 += offset2 + mem_ref_offset (base).low * BITS_PER_UNIT;
1246 context->outer_type = NULL;
1248 /* We found base object. In this case the outer_type
1249 is known. */
1250 else if (DECL_P (base))
1252 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
1254 /* Only type inconsistent programs can have otr_type that is
1255 not part of outer type. */
1256 if (!contains_type_p (TREE_TYPE (base),
1257 context->offset + offset2, *otr_type))
1259 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1260 code sequences; we arrange the calls to be builtin_unreachable
1261 later. */
1262 *otr_token = INT_MAX;
1263 return base_pointer;
1265 get_polymorphic_call_info_for_decl (context, base,
1266 context->offset + offset2);
1267 return NULL;
1269 else
1270 break;
1272 else
1273 break;
1275 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1276 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1278 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1279 * BITS_PER_UNIT;
1280 base_pointer = TREE_OPERAND (base_pointer, 0);
1282 else
1283 break;
1285 while (true);
1287 /* Try to determine type of the outer object. */
1288 if (TREE_CODE (base_pointer) == SSA_NAME
1289 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1290 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1292 /* See if parameter is THIS pointer of a method. */
1293 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1294 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1296 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1297 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1299 /* Dynamic casting has possibly upcasted the type
1300 in the hiearchy. In this case outer type is less
1301 informative than inner type and we should forget
1302 about it. */
1303 if (!contains_type_p (context->outer_type, context->offset,
1304 *otr_type))
1306 context->outer_type = NULL;
1307 return base_pointer;
1310 /* If the function is constructor or destructor, then
1311 the type is possibly in construction, but we know
1312 it is not derived type. */
1313 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1314 || DECL_CXX_DESTRUCTOR_P (fndecl))
1316 context->maybe_in_construction = true;
1317 context->maybe_derived_type = false;
1319 else
1321 context->maybe_derived_type = true;
1322 context->maybe_in_construction = false;
1324 return base_pointer;
1326 /* Non-PODs passed by value are really passed by invisible
1327 reference. In this case we also know the type of the
1328 object. */
1329 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1331 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1332 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1333 /* Only type inconsistent programs can have otr_type that is
1334 not part of outer type. */
1335 if (!contains_type_p (context->outer_type, context->offset,
1336 *otr_type))
1338 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1339 code sequences; we arrange the calls to be builtin_unreachable
1340 later. */
1341 *otr_token = INT_MAX;
1342 return base_pointer;
1344 context->maybe_derived_type = false;
1345 context->maybe_in_construction = false;
1346 return base_pointer;
1349 /* TODO: There are multiple ways to derive a type. For instance
1350 if BASE_POINTER is passed to an constructor call prior our refernece.
1351 We do not make this type of flow sensitive analysis yet. */
1352 return base_pointer;
1355 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1356 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1357 and insert them to NODES.
1359 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1361 static void
1362 record_targets_from_bases (tree otr_type,
1363 HOST_WIDE_INT otr_token,
1364 tree outer_type,
1365 HOST_WIDE_INT offset,
1366 vec <cgraph_node *> &nodes,
1367 pointer_set_t *inserted,
1368 pointer_set_t *matched_vtables,
1369 bool *completep)
1371 while (true)
1373 HOST_WIDE_INT pos, size;
1374 tree base_binfo;
1375 tree fld;
1377 if (types_same_for_odr (outer_type, otr_type))
1378 return;
1380 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1382 if (TREE_CODE (fld) != FIELD_DECL)
1383 continue;
1385 pos = int_bit_position (fld);
1386 size = tree_to_shwi (DECL_SIZE (fld));
1387 if (pos <= offset && (pos + size) > offset
1388 /* Do not get confused by zero sized bases. */
1389 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
1390 break;
1392 /* Within a class type we should always find correcponding fields. */
1393 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1395 /* Nonbasetypes should have been stripped by outer_class_type. */
1396 gcc_assert (DECL_ARTIFICIAL (fld));
1398 outer_type = TREE_TYPE (fld);
1399 offset -= pos;
1401 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1402 offset, otr_type);
1403 if (!base_binfo)
1405 gcc_assert (odr_violation_reported);
1406 return;
1408 gcc_assert (base_binfo);
1409 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1411 bool can_refer;
1412 tree target = gimple_get_virt_method_for_binfo (otr_token,
1413 base_binfo,
1414 &can_refer);
1415 maybe_record_node (nodes, target, inserted, can_refer, completep);
1416 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1421 /* When virtual table is removed, we may need to flush the cache. */
1423 static void
1424 devirt_variable_node_removal_hook (varpool_node *n,
1425 void *d ATTRIBUTE_UNUSED)
1427 if (cached_polymorphic_call_targets
1428 && DECL_VIRTUAL_P (n->decl)
1429 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
1430 free_polymorphic_call_targets_hash ();
1433 /* Return vector containing possible targets of polymorphic call of type
1434 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1435 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1436 OTR_TYPE and include their virtual method. This is useful for types
1437 possibly in construction or destruction where the virtual table may
1438 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1439 us to walk the inheritance graph for all derivations.
1441 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1442 undefined and should be redirected to unreachable.
1444 If COMPLETEP is non-NULL, store true if the list is complete.
1445 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1446 in the target cache. If user needs to visit every target list
1447 just once, it can memoize them.
1449 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1450 the type is not in the construction. Those targets appear first in the
1451 vector returned.
1453 Returned vector is placed into cache. It is NOT caller's responsibility
1454 to free it. The vector can be freed on cgraph_remove_node call if
1455 the particular node is a virtual function present in the cache. */
1457 vec <cgraph_node *>
1458 possible_polymorphic_call_targets (tree otr_type,
1459 HOST_WIDE_INT otr_token,
1460 ipa_polymorphic_call_context context,
1461 bool *completep,
1462 void **cache_token,
1463 int *nonconstruction_targetsp)
1465 static struct cgraph_node_hook_list *node_removal_hook_holder;
1466 pointer_set_t *inserted;
1467 pointer_set_t *matched_vtables;
1468 vec <cgraph_node *> nodes = vNULL;
1469 odr_type type, outer_type;
1470 polymorphic_call_target_d key;
1471 polymorphic_call_target_d **slot;
1472 unsigned int i;
1473 tree binfo, target;
1474 bool complete;
1475 bool can_refer;
1477 /* If ODR is not initialized, return empty incomplete list. */
1478 if (!odr_hash.is_created ())
1480 if (completep)
1481 *completep = false;
1482 if (cache_token)
1483 *cache_token = NULL;
1484 if (nonconstruction_targetsp)
1485 *nonconstruction_targetsp = 0;
1486 return nodes;
1489 /* If we hit type inconsistency, just return empty list of targets. */
1490 if (otr_token == INT_MAX)
1492 if (completep)
1493 *completep = true;
1494 if (cache_token)
1495 *cache_token = NULL;
1496 if (nonconstruction_targetsp)
1497 *nonconstruction_targetsp = 0;
1498 return nodes;
1501 type = get_odr_type (otr_type, true);
1503 /* Lookup the outer class type we want to walk. */
1504 if (context.outer_type
1505 && !get_class_context (&context, otr_type))
1507 if (completep)
1508 *completep = false;
1509 if (cache_token)
1510 *cache_token = NULL;
1511 if (nonconstruction_targetsp)
1512 *nonconstruction_targetsp = 0;
1513 return nodes;
1516 /* We canonicalize our query, so we do not need extra hashtable entries. */
1518 /* Without outer type, we have no use for offset. Just do the
1519 basic search from innter type */
1520 if (!context.outer_type)
1522 context.outer_type = otr_type;
1523 context.offset = 0;
1525 /* We need to update our hiearchy if the type does not exist. */
1526 outer_type = get_odr_type (context.outer_type, true);
1527 /* If outer and inner type match, there are no bases to see. */
1528 if (type == outer_type)
1529 context.maybe_in_construction = false;
1530 /* If the type is complete, there are no derivations. */
1531 if (TYPE_FINAL_P (outer_type->type))
1532 context.maybe_derived_type = false;
1534 /* Initialize query cache. */
1535 if (!cached_polymorphic_call_targets)
1537 cached_polymorphic_call_targets = pointer_set_create ();
1538 polymorphic_call_target_hash.create (23);
1539 if (!node_removal_hook_holder)
1541 node_removal_hook_holder =
1542 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1543 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1544 NULL);
1548 /* Lookup cached answer. */
1549 key.type = type;
1550 key.otr_token = otr_token;
1551 key.context = context;
1552 slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
1553 if (cache_token)
1554 *cache_token = (void *)*slot;
1555 if (*slot)
1557 if (completep)
1558 *completep = (*slot)->complete;
1559 if (nonconstruction_targetsp)
1560 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1561 return (*slot)->targets;
1564 complete = true;
1566 /* Do actual search. */
1567 timevar_push (TV_IPA_VIRTUAL_CALL);
1568 *slot = XCNEW (polymorphic_call_target_d);
1569 if (cache_token)
1570 *cache_token = (void *)*slot;
1571 (*slot)->type = type;
1572 (*slot)->otr_token = otr_token;
1573 (*slot)->context = context;
1575 inserted = pointer_set_create ();
1576 matched_vtables = pointer_set_create ();
1578 /* First see virtual method of type itself. */
1579 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1580 context.offset, otr_type);
1581 if (binfo)
1582 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1583 &can_refer);
1584 else
1586 gcc_assert (odr_violation_reported);
1587 target = NULL;
1590 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1592 if (target)
1594 /* In the case we get complete method, we don't need
1595 to walk derivations. */
1596 if (DECL_FINAL_P (target))
1597 context.maybe_derived_type = false;
1599 else
1600 gcc_assert (!complete);
1602 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1604 /* Next walk recursively all derived types. */
1605 if (context.maybe_derived_type)
1607 /* For anonymous namespace types we can attempt to build full type.
1608 All derivations must be in this unit (unless we see partial unit). */
1609 if (!type->anonymous_namespace || flag_ltrans)
1610 complete = false;
1611 for (i = 0; i < outer_type->derived_types.length(); i++)
1612 possible_polymorphic_call_targets_1 (nodes, inserted,
1613 matched_vtables,
1614 otr_type,
1615 outer_type->derived_types[i],
1616 otr_token, outer_type->type,
1617 context.offset, &complete);
1620 /* Finally walk bases, if asked to. */
1621 (*slot)->nonconstruction_targets = nodes.length();
1622 if (context.maybe_in_construction)
1623 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1624 context.offset, nodes, inserted,
1625 matched_vtables, &complete);
1627 (*slot)->targets = nodes;
1628 (*slot)->complete = complete;
1629 if (completep)
1630 *completep = complete;
1631 if (nonconstruction_targetsp)
1632 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1634 pointer_set_destroy (inserted);
1635 pointer_set_destroy (matched_vtables);
1636 timevar_pop (TV_IPA_VIRTUAL_CALL);
1637 return nodes;
1640 /* Dump all possible targets of a polymorphic call. */
1642 void
1643 dump_possible_polymorphic_call_targets (FILE *f,
1644 tree otr_type,
1645 HOST_WIDE_INT otr_token,
1646 const ipa_polymorphic_call_context &ctx)
1648 vec <cgraph_node *> targets;
1649 bool final;
1650 odr_type type = get_odr_type (otr_type, false);
1651 unsigned int i;
1652 int nonconstruction;
1654 if (!type)
1655 return;
1656 targets = possible_polymorphic_call_targets (otr_type, otr_token,
1657 ctx,
1658 &final, NULL, &nonconstruction);
1659 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
1660 print_generic_expr (f, type->type, TDF_SLIM);
1661 fprintf (f, " token %i\n", (int)otr_token);
1662 if (ctx.outer_type || ctx.offset)
1664 fprintf (f, " Contained in type:");
1665 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1666 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
1667 ctx.offset);
1670 fprintf (f, " %s%s%s\n ",
1671 final ? "This is a complete list." :
1672 "This is partial list; extra targets may be defined in other units.",
1673 ctx.maybe_in_construction ? " (base types included)" : "",
1674 ctx.maybe_derived_type ? " (derived types included)" : "");
1675 for (i = 0; i < targets.length (); i++)
1677 char *name = NULL;
1678 if (i == (unsigned)nonconstruction)
1679 fprintf (f, "\n If the type is in construction,"
1680 " then additional tarets are:\n"
1681 " ");
1682 if (in_lto_p)
1683 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
1684 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
1685 if (in_lto_p)
1686 free (name);
1687 if (!targets[i]->definition)
1688 fprintf (f, " (no definition%s)",
1689 DECL_DECLARED_INLINE_P (targets[i]->decl)
1690 ? " inline" : "");
1692 fprintf (f, "\n\n");
1696 /* Return true if N can be possibly target of a polymorphic call of
1697 OTR_TYPE/OTR_TOKEN. */
1699 bool
1700 possible_polymorphic_call_target_p (tree otr_type,
1701 HOST_WIDE_INT otr_token,
1702 const ipa_polymorphic_call_context &ctx,
1703 struct cgraph_node *n)
1705 vec <cgraph_node *> targets;
1706 unsigned int i;
1707 enum built_in_function fcode;
1708 bool final;
1710 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1711 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1712 == BUILT_IN_UNREACHABLE
1713 || fcode == BUILT_IN_TRAP))
1714 return true;
1716 if (!odr_hash.is_created ())
1717 return true;
1718 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
1719 for (i = 0; i < targets.length (); i++)
1720 if (symtab_semantically_equivalent_p (n, targets[i]))
1721 return true;
1723 /* At a moment we allow middle end to dig out new external declarations
1724 as a targets of polymorphic calls. */
1725 if (!final && !n->definition)
1726 return true;
1727 return false;
1731 /* After callgraph construction new external nodes may appear.
1732 Add them into the graph. */
1734 void
1735 update_type_inheritance_graph (void)
1737 struct cgraph_node *n;
1739 if (!odr_hash.is_created ())
1740 return;
1741 free_polymorphic_call_targets_hash ();
1742 timevar_push (TV_IPA_INHERITANCE);
1743 /* We reconstruct the graph starting from types of all methods seen in the
1744 the unit. */
1745 FOR_EACH_FUNCTION (n)
1746 if (DECL_VIRTUAL_P (n->decl)
1747 && !n->definition
1748 && symtab_real_symbol_p (n))
1749 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
1750 timevar_pop (TV_IPA_INHERITANCE);
1754 /* Return true if N looks like likely target of a polymorphic call.
1755 Rule out cxa_pure_virtual, noreturns, function declared cold and
1756 other obvious cases. */
1758 bool
1759 likely_target_p (struct cgraph_node *n)
1761 int flags;
1762 /* cxa_pure_virtual and similar things are not likely. */
1763 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
1764 return false;
1765 flags = flags_from_decl_or_type (n->decl);
1766 if (flags & ECF_NORETURN)
1767 return false;
1768 if (lookup_attribute ("cold",
1769 DECL_ATTRIBUTES (n->decl)))
1770 return false;
1771 if (n->frequency < NODE_FREQUENCY_NORMAL)
1772 return false;
1773 return true;
1776 /* The ipa-devirt pass.
1777 When polymorphic call has only one likely target in the unit,
1778 turn it into speculative call. */
1780 static unsigned int
1781 ipa_devirt (void)
1783 struct cgraph_node *n;
1784 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1785 struct cgraph_edge *e;
1787 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1788 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
1789 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
1791 FOR_EACH_DEFINED_FUNCTION (n)
1793 bool update = false;
1794 if (dump_file && n->indirect_calls)
1795 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
1796 n->name (), n->order);
1797 for (e = n->indirect_calls; e; e = e->next_callee)
1798 if (e->indirect_info->polymorphic)
1800 struct cgraph_node *likely_target = NULL;
1801 void *cache_token;
1802 bool final;
1803 int nonconstruction_targets;
1804 vec <cgraph_node *>targets
1805 = possible_polymorphic_call_targets
1806 (e, &final, &cache_token, &nonconstruction_targets);
1807 unsigned int i;
1809 if (dump_file)
1810 dump_possible_polymorphic_call_targets
1811 (dump_file, e);
1813 npolymorphic++;
1815 if (!cgraph_maybe_hot_edge_p (e))
1817 if (dump_file)
1818 fprintf (dump_file, "Call is cold\n\n");
1819 ncold++;
1820 continue;
1822 if (e->speculative)
1824 if (dump_file)
1825 fprintf (dump_file, "Call is aready speculated\n\n");
1826 nspeculated++;
1828 /* When dumping see if we agree with speculation. */
1829 if (!dump_file)
1830 continue;
1832 if (pointer_set_contains (bad_call_targets,
1833 cache_token))
1835 if (dump_file)
1836 fprintf (dump_file, "Target list is known to be useless\n\n");
1837 nmultiple++;
1838 continue;
1840 for (i = 0; i < targets.length (); i++)
1841 if (likely_target_p (targets[i]))
1843 if (likely_target)
1845 if (i < (unsigned) nonconstruction_targets)
1847 likely_target = NULL;
1848 if (dump_file)
1849 fprintf (dump_file, "More than one likely target\n\n");
1850 nmultiple++;
1852 break;
1854 likely_target = targets[i];
1856 if (!likely_target)
1858 pointer_set_insert (bad_call_targets, cache_token);
1859 continue;
1861 /* This is reached only when dumping; check if we agree or disagree
1862 with the speculation. */
1863 if (e->speculative)
1865 struct cgraph_edge *e2;
1866 struct ipa_ref *ref;
1867 cgraph_speculative_call_info (e, e2, e, ref);
1868 if (cgraph_function_or_thunk_node (e2->callee, NULL)
1869 == cgraph_function_or_thunk_node (likely_target, NULL))
1871 fprintf (dump_file, "We agree with speculation\n\n");
1872 nok++;
1874 else
1876 fprintf (dump_file, "We disagree with speculation\n\n");
1877 nwrong++;
1879 continue;
1881 if (!likely_target->definition)
1883 if (dump_file)
1884 fprintf (dump_file, "Target is not an definition\n\n");
1885 nnotdefined++;
1886 continue;
1888 /* Do not introduce new references to external symbols. While we
1889 can handle these just well, it is common for programs to
1890 incorrectly with headers defining methods they are linked
1891 with. */
1892 if (DECL_EXTERNAL (likely_target->decl))
1894 if (dump_file)
1895 fprintf (dump_file, "Target is external\n\n");
1896 nexternal++;
1897 continue;
1899 /* Don't use an implicitly-declared destructor (c++/58678). */
1900 struct cgraph_node *non_thunk_target
1901 = cgraph_function_node (likely_target);
1902 if (DECL_ARTIFICIAL (non_thunk_target->decl))
1904 if (dump_file)
1905 fprintf (dump_file, "Target is artificial\n\n");
1906 nartificial++;
1907 continue;
1909 if (cgraph_function_body_availability (likely_target)
1910 <= AVAIL_OVERWRITABLE
1911 && symtab_can_be_discarded (likely_target))
1913 if (dump_file)
1914 fprintf (dump_file, "Target is overwritable\n\n");
1915 noverwritable++;
1916 continue;
1918 else
1920 if (dump_file)
1921 fprintf (dump_file,
1922 "Speculatively devirtualizing call in %s/%i to %s/%i\n\n",
1923 n->name (), n->order,
1924 likely_target->name (),
1925 likely_target->order);
1926 if (!symtab_can_be_discarded (likely_target))
1928 cgraph_node *alias;
1929 alias = cgraph (symtab_nonoverwritable_alias
1930 (likely_target));
1931 if (alias)
1932 likely_target = alias;
1934 nconverted++;
1935 update = true;
1936 cgraph_turn_edge_to_speculative
1937 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
1940 if (update)
1941 inline_update_overall_summary (n);
1943 pointer_set_destroy (bad_call_targets);
1945 if (dump_file)
1946 fprintf (dump_file,
1947 "%i polymorphic calls, %i devirtualized,"
1948 " %i speculatively devirtualized, %i cold\n"
1949 "%i have multiple targets, %i overwritable,"
1950 " %i already speculated (%i agree, %i disagree),"
1951 " %i external, %i not defined, %i artificial\n",
1952 npolymorphic, ndevirtualized, nconverted, ncold,
1953 nmultiple, noverwritable, nspeculated, nok, nwrong,
1954 nexternal, nnotdefined, nartificial);
1955 return ndevirtualized ? TODO_remove_functions : 0;
1958 /* Gate for speculative IPA devirtualization optimization. */
1960 static bool
1961 gate_ipa_devirt (void)
1963 return (flag_devirtualize
1964 && flag_devirtualize_speculatively
1965 && optimize);
1968 namespace {
1970 const pass_data pass_data_ipa_devirt =
1972 IPA_PASS, /* type */
1973 "devirt", /* name */
1974 OPTGROUP_NONE, /* optinfo_flags */
1975 true, /* has_gate */
1976 true, /* has_execute */
1977 TV_IPA_DEVIRT, /* tv_id */
1978 0, /* properties_required */
1979 0, /* properties_provided */
1980 0, /* properties_destroyed */
1981 0, /* todo_flags_start */
1982 ( TODO_dump_symtab ), /* todo_flags_finish */
1985 class pass_ipa_devirt : public ipa_opt_pass_d
1987 public:
1988 pass_ipa_devirt (gcc::context *ctxt)
1989 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
1990 NULL, /* generate_summary */
1991 NULL, /* write_summary */
1992 NULL, /* read_summary */
1993 NULL, /* write_optimization_summary */
1994 NULL, /* read_optimization_summary */
1995 NULL, /* stmt_fixup */
1996 0, /* function_transform_todo_flags_start */
1997 NULL, /* function_transform */
1998 NULL) /* variable_transform */
2001 /* opt_pass methods: */
2002 bool gate () { return gate_ipa_devirt (); }
2003 unsigned int execute () { return ipa_devirt (); }
2005 }; // class pass_ipa_devirt
2007 } // anon namespace
2009 ipa_opt_pass_d *
2010 make_pass_ipa_devirt (gcc::context *ctxt)
2012 return new pass_ipa_devirt (ctxt);
2015 #include "gt-ipa-devirt.h"