GCC Rust: Parsing in floats now
[official-gcc.git] / gcc / ipa-devirt.c
blob7f07d2f94ae4063581eac5bccfdf6974f53b8eb8
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
82 What we do here:
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
127 #include "gimple.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
133 static bool odr_violation_reported = false;
135 /* Dummy polymorphic call context. */
137 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
138 = {0, NULL, false, true};
140 /* Pointer set of all call targets appearing in the cache. */
141 static pointer_set_t *cached_polymorphic_call_targets;
143 /* The node of type inheritance graph. For each type unique in
144 One Defintion Rule (ODR) sense, we produce one node linking all
145 main variants of types equivalent to it, bases and derived types. */
147 struct GTY(()) odr_type_d
149 /* leader type. */
150 tree type;
151 /* All bases. */
152 vec<odr_type> GTY((skip)) bases;
153 /* All derrived types with virtual methods seen in unit. */
154 vec<odr_type> GTY((skip)) derived_types;
156 /* All equivalent types, if more than one. */
157 vec<tree, va_gc> *types;
158 /* Set of all equivalent types, if NON-NULL. */
159 pointer_set_t * GTY((skip)) types_set;
161 /* Unique ID indexing the type in odr_types array. */
162 int id;
163 /* Is it in anonymous namespace? */
164 bool anonymous_namespace;
168 /* Return true if BINFO corresponds to a type with virtual methods.
170 Every type has several BINFOs. One is the BINFO associated by the type
171 while other represents bases of derived types. The BINFOs representing
172 bases do not have BINFO_VTABLE pointer set when this is the single
173 inheritance (because vtables are shared). Look up the BINFO of type
174 and check presence of its vtable. */
176 static inline bool
177 polymorphic_type_binfo_p (tree binfo)
179 /* See if BINFO's type has an virtual table associtated with it. */
180 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
183 /* One Definition Rule hashtable helpers. */
185 struct odr_hasher
187 typedef odr_type_d value_type;
188 typedef union tree_node compare_type;
189 static inline hashval_t hash (const value_type *);
190 static inline bool equal (const value_type *, const compare_type *);
191 static inline void remove (value_type *);
194 /* Produce hash based on type name. */
196 hashval_t
197 hash_type_name (tree t)
199 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
201 /* If not in LTO, all main variants are unique, so we can do
202 pointer hash. */
203 if (!in_lto_p)
204 return htab_hash_pointer (t);
206 /* Anonymous types are unique. */
207 if (type_in_anonymous_namespace_p (t))
208 return htab_hash_pointer (t);
210 /* For polymorphic types, we can simply hash the virtual table. */
211 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
213 tree v = BINFO_VTABLE (TYPE_BINFO (t));
214 hashval_t hash = 0;
216 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
218 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
219 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
222 v = DECL_ASSEMBLER_NAME (v);
223 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
224 return hash;
227 /* Rest is not implemented yet. */
228 gcc_unreachable ();
231 /* Return the computed hashcode for ODR_TYPE. */
233 inline hashval_t
234 odr_hasher::hash (const value_type *odr_type)
236 return hash_type_name (odr_type->type);
239 /* Compare types T1 and T2 and return true if they are
240 equivalent. */
242 inline bool
243 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
245 tree t2 = const_cast <tree> (ct2);
247 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
248 if (t1->type == t2)
249 return true;
250 if (!in_lto_p)
251 return false;
252 return types_same_for_odr (t1->type, t2);
255 /* Free ODR type V. */
257 inline void
258 odr_hasher::remove (value_type *v)
260 v->bases.release ();
261 v->derived_types.release ();
262 if (v->types_set)
263 pointer_set_destroy (v->types_set);
264 ggc_free (v);
267 /* ODR type hash used to lookup ODR type based on tree type node. */
269 typedef hash_table <odr_hasher> odr_hash_type;
270 static odr_hash_type odr_hash;
272 /* ODR types are also stored into ODR_TYPE vector to allow consistent
273 walking. Bases appear before derived types. Vector is garbage collected
274 so we won't end up visiting empty types. */
276 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
277 #define odr_types (*odr_types_ptr)
279 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
280 from VAL->type. This may happen in LTO where tree merging did not merge
281 all variants of the same type. It may or may not mean the ODR violation.
282 Add it to the list of duplicates and warn on some violations. */
284 static void
285 add_type_duplicate (odr_type val, tree type)
287 if (!val->types_set)
288 val->types_set = pointer_set_create ();
290 /* See if this duplicate is new. */
291 if (!pointer_set_insert (val->types_set, type))
293 bool merge = true;
294 bool base_mismatch = false;
295 gcc_assert (in_lto_p);
296 vec_safe_push (val->types, type);
297 unsigned int i,j;
299 /* First we compare memory layout. */
300 if (!types_compatible_p (val->type, type))
302 merge = false;
303 odr_violation_reported = true;
304 if (BINFO_VTABLE (TYPE_BINFO (val->type))
305 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
306 "type %qD violates one definition rule ",
307 type))
308 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
309 "a type with the same name but different layout is "
310 "defined in another translation unit");
311 if (cgraph_dump_file)
313 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
315 print_node (cgraph_dump_file, "", val->type, 0);
316 putc ('\n',cgraph_dump_file);
317 print_node (cgraph_dump_file, "", type, 0);
318 putc ('\n',cgraph_dump_file);
322 /* Next sanity check that bases are the same. If not, we will end
323 up producing wrong answers. */
324 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
325 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
327 odr_type base = get_odr_type
328 (BINFO_TYPE
329 (BINFO_BASE_BINFO (TYPE_BINFO (type),
330 i)),
331 true);
332 if (val->bases.length () <= j || val->bases[j] != base)
333 base_mismatch = true;
334 j++;
336 if (base_mismatch)
338 merge = false;
339 odr_violation_reported = true;
341 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
342 "type %qD violates one definition rule ",
343 type))
344 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
345 "a type with the same name but different bases is "
346 "defined in another translation unit");
347 if (cgraph_dump_file)
349 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
351 print_node (cgraph_dump_file, "", val->type, 0);
352 putc ('\n',cgraph_dump_file);
353 print_node (cgraph_dump_file, "", type, 0);
354 putc ('\n',cgraph_dump_file);
358 /* Regularize things a little. During LTO same types may come with
359 different BINFOs. Either because their virtual table was
360 not merged by tree merging and only later at decl merging or
361 because one type comes with external vtable, while other
362 with internal. We want to merge equivalent binfos to conserve
363 memory and streaming overhead.
365 The external vtables are more harmful: they contain references
366 to external declarations of methods that may be defined in the
367 merged LTO unit. For this reason we absolutely need to remove
368 them and replace by internal variants. Not doing so will lead
369 to incomplete answers from possible_polymorphic_call_targets. */
370 if (!flag_ltrans && merge)
372 tree master_binfo = TYPE_BINFO (val->type);
373 tree v1 = BINFO_VTABLE (master_binfo);
374 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
376 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
378 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
379 && operand_equal_p (TREE_OPERAND (v1, 1),
380 TREE_OPERAND (v2, 1), 0));
381 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
382 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
384 gcc_assert (DECL_ASSEMBLER_NAME (v1)
385 == DECL_ASSEMBLER_NAME (v2));
387 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
389 unsigned int i;
391 TYPE_BINFO (val->type) = TYPE_BINFO (type);
392 for (i = 0; i < val->types->length (); i++)
394 if (TYPE_BINFO ((*val->types)[i])
395 == master_binfo)
396 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
399 else
400 TYPE_BINFO (type) = master_binfo;
405 /* Get ODR type hash entry for TYPE. If INSERT is true, create
406 possibly new entry. */
408 odr_type
409 get_odr_type (tree type, bool insert)
411 odr_type_d **slot;
412 odr_type val;
413 hashval_t hash;
415 type = TYPE_MAIN_VARIANT (type);
416 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
417 hash = hash_type_name (type);
418 slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
419 if (!slot)
420 return NULL;
422 /* See if we already have entry for type. */
423 if (*slot)
425 val = *slot;
427 /* With LTO we need to support multiple tree representation of
428 the same ODR type. */
429 if (val->type != type)
430 add_type_duplicate (val, type);
432 else
434 tree binfo = TYPE_BINFO (type);
435 unsigned int i;
437 val = ggc_alloc_cleared_odr_type_d ();
438 val->type = type;
439 val->bases = vNULL;
440 val->derived_types = vNULL;
441 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
442 *slot = val;
443 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
444 /* For now record only polymorphic types. other are
445 pointless for devirtualization and we can not precisely
446 determine ODR equivalency of these during LTO. */
447 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
449 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
450 i)),
451 true);
452 base->derived_types.safe_push (val);
453 val->bases.safe_push (base);
455 /* First record bases, then add into array so ids are increasing. */
456 if (odr_types_ptr)
457 val->id = odr_types.length ();
458 vec_safe_push (odr_types_ptr, val);
460 return val;
463 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
464 recusive printing. */
466 static void
467 dump_odr_type (FILE *f, odr_type t, int indent=0)
469 unsigned int i;
470 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
471 print_generic_expr (f, t->type, TDF_SLIM);
472 fprintf (f, "%s\n", t->anonymous_namespace ? " (anonymous namespace)":"");
473 if (TYPE_NAME (t->type))
475 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
476 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
477 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
479 if (t->bases.length ())
481 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
482 for (i = 0; i < t->bases.length (); i++)
483 fprintf (f, " %i", t->bases[i]->id);
484 fprintf (f, "\n");
486 if (t->derived_types.length ())
488 fprintf (f, "%*s derived types:\n", indent * 2, "");
489 for (i = 0; i < t->derived_types.length (); i++)
490 dump_odr_type (f, t->derived_types[i], indent + 1);
492 fprintf (f, "\n");
495 /* Dump the type inheritance graph. */
497 static void
498 dump_type_inheritance_graph (FILE *f)
500 unsigned int i;
501 if (!odr_types_ptr)
502 return;
503 fprintf (f, "\n\nType inheritance graph:\n");
504 for (i = 0; i < odr_types.length (); i++)
506 if (odr_types[i]->bases.length () == 0)
507 dump_odr_type (f, odr_types[i]);
509 for (i = 0; i < odr_types.length (); i++)
511 if (odr_types[i]->types && odr_types[i]->types->length ())
513 unsigned int j;
514 fprintf (f, "Duplicate tree types for odr type %i\n", i);
515 print_node (f, "", odr_types[i]->type, 0);
516 for (j = 0; j < odr_types[i]->types->length (); j++)
518 tree t;
519 fprintf (f, "duplicate #%i\n", j);
520 print_node (f, "", (*odr_types[i]->types)[j], 0);
521 t = (*odr_types[i]->types)[j];
522 while (TYPE_P (t) && TYPE_CONTEXT (t))
524 t = TYPE_CONTEXT (t);
525 print_node (f, "", t, 0);
527 putc ('\n',f);
533 /* Given method type T, return type of class it belongs to.
534 Lookup this pointer and get its type. */
536 tree
537 method_class_type (tree t)
539 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
540 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
542 return TREE_TYPE (first_parm_type);
545 /* Initialize IPA devirt and build inheritance tree graph. */
547 void
548 build_type_inheritance_graph (void)
550 struct symtab_node *n;
551 FILE *inheritance_dump_file;
552 int flags;
554 if (odr_hash.is_created ())
555 return;
556 timevar_push (TV_IPA_INHERITANCE);
557 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
558 odr_hash.create (23);
560 /* We reconstruct the graph starting of types of all methods seen in the
561 the unit. */
562 FOR_EACH_SYMBOL (n)
563 if (is_a <cgraph_node> (n)
564 && DECL_VIRTUAL_P (n->decl)
565 && symtab_real_symbol_p (n))
566 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
568 /* Look also for virtual tables of types that do not define any methods.
570 We need it in a case where class B has virtual base of class A
571 re-defining its virtual method and there is class C with no virtual
572 methods with B as virtual base.
574 Here we output B's virtual method in two variant - for non-virtual
575 and virtual inheritance. B's virtual table has non-virtual version,
576 while C's has virtual.
578 For this reason we need to know about C in order to include both
579 variants of B. More correctly, record_target_from_binfo should
580 add both variants of the method when walking B, but we have no
581 link in between them.
583 We rely on fact that either the method is exported and thus we
584 assume it is called externally or C is in anonymous namespace and
585 thus we will see the vtable. */
587 else if (is_a <varpool_node> (n)
588 && DECL_VIRTUAL_P (n->decl)
589 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
590 && TYPE_BINFO (DECL_CONTEXT (n->decl))
591 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
592 get_odr_type (DECL_CONTEXT (n->decl), true);
593 if (inheritance_dump_file)
595 dump_type_inheritance_graph (inheritance_dump_file);
596 dump_end (TDI_inheritance, inheritance_dump_file);
598 timevar_pop (TV_IPA_INHERITANCE);
601 /* If TARGET has associated node, record it in the NODES array.
602 CAN_REFER specify if program can refer to the target directly.
603 if TARGET is unknown (NULL) or it can not be inserted (for example because
604 its body was already removed and there is no way to refer to it), clear
605 COMPLETEP. */
607 static void
608 maybe_record_node (vec <cgraph_node *> &nodes,
609 tree target, pointer_set_t *inserted,
610 bool can_refer,
611 bool *completep)
613 struct cgraph_node *target_node;
614 enum built_in_function fcode;
616 if (!can_refer)
618 /* The only case when method of anonymous namespace becomes unreferable
619 is when we completely optimized it out. */
620 if (flag_ltrans
621 || !target
622 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
623 *completep = false;
624 return;
627 if (!target
628 /* Those are used to mark impossible scenarios. */
629 || (fcode = DECL_FUNCTION_CODE (target))
630 == BUILT_IN_UNREACHABLE
631 || fcode == BUILT_IN_TRAP)
632 return;
634 target_node = cgraph_get_node (target);
636 if (target_node != NULL
637 && (TREE_PUBLIC (target)
638 || target_node->definition)
639 && symtab_real_symbol_p (target_node))
641 gcc_assert (!target_node->global.inlined_to);
642 gcc_assert (symtab_real_symbol_p (target_node));
643 if (!pointer_set_insert (inserted, target))
645 pointer_set_insert (cached_polymorphic_call_targets,
646 target_node);
647 nodes.safe_push (target_node);
650 else if (completep
651 && !type_in_anonymous_namespace_p
652 (method_class_type (TREE_TYPE (target))))
653 *completep = false;
656 /* See if BINFO's type match OUTER_TYPE. If so, lookup
657 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
658 method in vtable and insert method to NODES array.
659 Otherwise recurse to base BINFOs.
660 This match what get_binfo_at_offset does, but with offset
661 being unknown.
663 TYPE_BINFOS is a stack of BINFOS of types with defined
664 virtual table seen on way from class type to BINFO.
666 MATCHED_VTABLES tracks virtual tables we already did lookup
667 for virtual function in. INSERTED tracks nodes we already
668 inserted.
670 ANONYMOUS is true if BINFO is part of anonymous namespace.
672 Clear COMPLETEP when we hit unreferable target.
675 static void
676 record_target_from_binfo (vec <cgraph_node *> &nodes,
677 tree binfo,
678 tree otr_type,
679 vec <tree> &type_binfos,
680 HOST_WIDE_INT otr_token,
681 tree outer_type,
682 HOST_WIDE_INT offset,
683 pointer_set_t *inserted,
684 pointer_set_t *matched_vtables,
685 bool anonymous,
686 bool *completep)
688 tree type = BINFO_TYPE (binfo);
689 int i;
690 tree base_binfo;
693 if (BINFO_VTABLE (binfo))
694 type_binfos.safe_push (binfo);
695 if (types_same_for_odr (type, outer_type))
697 int i;
698 tree type_binfo = NULL;
700 /* Lookup BINFO with virtual table. For normal types it is always last
701 binfo on stack. */
702 for (i = type_binfos.length () - 1; i >= 0; i--)
703 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
705 type_binfo = type_binfos[i];
706 break;
708 if (BINFO_VTABLE (binfo))
709 type_binfos.pop ();
710 /* If this is duplicated BINFO for base shared by virtual inheritance,
711 we may not have its associated vtable. This is not a problem, since
712 we will walk it on the other path. */
713 if (!type_binfo)
714 return;
715 tree inner_binfo = get_binfo_at_offset (type_binfo,
716 offset, otr_type);
717 if (!inner_binfo)
719 gcc_assert (odr_violation_reported);
720 return;
722 /* For types in anonymous namespace first check if the respective vtable
723 is alive. If not, we know the type can't be called. */
724 if (!flag_ltrans && anonymous)
726 tree vtable = BINFO_VTABLE (inner_binfo);
727 varpool_node *vnode;
729 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
730 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
731 vnode = varpool_get_node (vtable);
732 if (!vnode || !vnode->definition)
733 return;
735 gcc_assert (inner_binfo);
736 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
738 bool can_refer;
739 tree target = gimple_get_virt_method_for_binfo (otr_token,
740 inner_binfo,
741 &can_refer);
742 maybe_record_node (nodes, target, inserted, can_refer, completep);
744 return;
747 /* Walk bases. */
748 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
749 /* Walking bases that have no virtual method is pointless excercise. */
750 if (polymorphic_type_binfo_p (base_binfo))
751 record_target_from_binfo (nodes, base_binfo, otr_type,
752 type_binfos,
753 otr_token, outer_type, offset, inserted,
754 matched_vtables, anonymous, completep);
755 if (BINFO_VTABLE (binfo))
756 type_binfos.pop ();
759 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
760 of TYPE, insert them to NODES, recurse into derived nodes.
761 INSERTED is used to avoid duplicate insertions of methods into NODES.
762 MATCHED_VTABLES are used to avoid duplicate walking vtables.
763 Clear COMPLETEP if unreferable target is found. */
765 static void
766 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
767 pointer_set_t *inserted,
768 pointer_set_t *matched_vtables,
769 tree otr_type,
770 odr_type type,
771 HOST_WIDE_INT otr_token,
772 tree outer_type,
773 HOST_WIDE_INT offset,
774 bool *completep)
776 tree binfo = TYPE_BINFO (type->type);
777 unsigned int i;
778 vec <tree> type_binfos = vNULL;
780 record_target_from_binfo (nodes, binfo, otr_type, type_binfos, otr_token,
781 outer_type, offset,
782 inserted, matched_vtables,
783 type->anonymous_namespace, completep);
784 type_binfos.release ();
785 for (i = 0; i < type->derived_types.length (); i++)
786 possible_polymorphic_call_targets_1 (nodes, inserted,
787 matched_vtables,
788 otr_type,
789 type->derived_types[i],
790 otr_token, outer_type, offset, completep);
793 /* Cache of queries for polymorphic call targets.
795 Enumerating all call targets may get expensive when there are many
796 polymorphic calls in the program, so we memoize all the previous
797 queries and avoid duplicated work. */
799 struct polymorphic_call_target_d
801 HOST_WIDE_INT otr_token;
802 ipa_polymorphic_call_context context;
803 odr_type type;
804 vec <cgraph_node *> targets;
805 int nonconstruction_targets;
806 bool complete;
809 /* Polymorphic call target cache helpers. */
811 struct polymorphic_call_target_hasher
813 typedef polymorphic_call_target_d value_type;
814 typedef polymorphic_call_target_d compare_type;
815 static inline hashval_t hash (const value_type *);
816 static inline bool equal (const value_type *, const compare_type *);
817 static inline void remove (value_type *);
820 /* Return the computed hashcode for ODR_QUERY. */
822 inline hashval_t
823 polymorphic_call_target_hasher::hash (const value_type *odr_query)
825 hashval_t hash;
827 hash = iterative_hash_host_wide_int
828 (odr_query->otr_token,
829 odr_query->type->id);
830 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
831 hash);
832 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
833 return iterative_hash_hashval_t
834 (((int)odr_query->context.maybe_in_construction << 1)
835 | (int)odr_query->context.maybe_derived_type, hash);
838 /* Compare cache entries T1 and T2. */
840 inline bool
841 polymorphic_call_target_hasher::equal (const value_type *t1,
842 const compare_type *t2)
844 return (t1->type == t2->type && t1->otr_token == t2->otr_token
845 && t1->context.offset == t2->context.offset
846 && t1->context.outer_type == t2->context.outer_type
847 && t1->context.maybe_in_construction
848 == t2->context.maybe_in_construction
849 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
852 /* Remove entry in polymorphic call target cache hash. */
854 inline void
855 polymorphic_call_target_hasher::remove (value_type *v)
857 v->targets.release ();
858 free (v);
861 /* Polymorphic call target query cache. */
863 typedef hash_table <polymorphic_call_target_hasher>
864 polymorphic_call_target_hash_type;
865 static polymorphic_call_target_hash_type polymorphic_call_target_hash;
867 /* Destroy polymorphic call target query cache. */
869 static void
870 free_polymorphic_call_targets_hash ()
872 if (cached_polymorphic_call_targets)
874 polymorphic_call_target_hash.dispose ();
875 pointer_set_destroy (cached_polymorphic_call_targets);
876 cached_polymorphic_call_targets = NULL;
880 /* When virtual function is removed, we may need to flush the cache. */
882 static void
883 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
885 if (cached_polymorphic_call_targets
886 && pointer_set_contains (cached_polymorphic_call_targets, n))
887 free_polymorphic_call_targets_hash ();
890 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
891 is contained at CONTEXT->OFFSET. Walk the memory representation of
892 CONTEXT->OUTER_TYPE and find the outermost class type that match
893 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
894 to represent it.
896 For example when CONTEXT represents type
897 class A
899 int a;
900 class B b;
902 and we look for type at offset sizeof(int), we end up with B and offset 0.
903 If the same is produced by multiple inheritance, we end up with A and offset
904 sizeof(int).
906 If we can not find corresponding class, give up by setting
907 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
908 Return true when lookup was sucesful. */
910 static bool
911 get_class_context (ipa_polymorphic_call_context *context,
912 tree expected_type)
914 tree type = context->outer_type;
915 HOST_WIDE_INT offset = context->offset;
917 /* Find the sub-object the constant actually refers to and mark whether it is
918 an artificial one (as opposed to a user-defined one). */
919 while (true)
921 HOST_WIDE_INT pos, size;
922 tree fld;
924 /* On a match, just return what we found. */
925 if (TREE_CODE (type) == TREE_CODE (expected_type)
926 && types_same_for_odr (type, expected_type))
928 /* Type can not contain itself on an non-zero offset. In that case
929 just give up. */
930 if (offset != 0)
931 goto give_up;
932 gcc_assert (offset == 0);
933 return true;
936 /* Walk fields and find corresponding on at OFFSET. */
937 if (TREE_CODE (type) == RECORD_TYPE)
939 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
941 if (TREE_CODE (fld) != FIELD_DECL)
942 continue;
944 pos = int_bit_position (fld);
945 size = tree_to_uhwi (DECL_SIZE (fld));
946 if (pos <= offset && (pos + size) > offset)
947 break;
950 if (!fld)
951 goto give_up;
953 type = TREE_TYPE (fld);
954 offset -= pos;
955 /* DECL_ARTIFICIAL represents a basetype. */
956 if (!DECL_ARTIFICIAL (fld))
958 context->outer_type = type;
959 context->offset = offset;
960 /* As soon as we se an field containing the type,
961 we know we are not looking for derivations. */
962 context->maybe_derived_type = false;
965 else if (TREE_CODE (type) == ARRAY_TYPE)
967 tree subtype = TREE_TYPE (type);
969 /* Give up if we don't know array size. */
970 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
971 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
972 goto give_up;
973 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
974 type = subtype;
975 context->outer_type = type;
976 context->offset = offset;
977 context->maybe_derived_type = false;
979 /* Give up on anything else. */
980 else
981 goto give_up;
984 /* If we failed to find subtype we look for, give up and fall back to the
985 most generic query. */
986 give_up:
987 context->outer_type = expected_type;
988 context->offset = 0;
989 context->maybe_derived_type = true;
990 context->maybe_in_construction = true;
991 /* POD can be changed to an instance of a polymorphic type by
992 placement new. Here we play safe and assume that any
993 non-polymorphic type is POD. */
994 if ((TREE_CODE (type) != RECORD_TYPE
995 || !TYPE_BINFO (type)
996 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
997 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
998 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
999 tree_to_uhwi (TYPE_SIZE (type)))))
1000 return true;
1001 return false;
1004 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1006 static bool
1007 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1008 tree otr_type)
1010 ipa_polymorphic_call_context context = {offset, outer_type,
1011 false, true};
1012 return get_class_context (&context, otr_type);
1015 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1017 static tree
1018 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1019 tree vtable)
1021 tree v = BINFO_VTABLE (binfo);
1022 int i;
1023 tree base_binfo;
1024 unsigned HOST_WIDE_INT this_offset;
1026 if (v)
1028 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1029 gcc_unreachable ();
1031 if (offset == this_offset
1032 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1033 return binfo;
1036 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1037 if (polymorphic_type_binfo_p (base_binfo))
1039 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1040 if (base_binfo)
1041 return base_binfo;
1043 return NULL;
1046 /* T is known constant value of virtual table pointer.
1047 Store virtual table to V and its offset to OFFSET.
1048 Return false if T does not look like virtual table reference. */
1050 bool
1051 vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset)
1053 /* We expect &MEM[(void *)&virtual_table + 16B].
1054 We obtain object's BINFO from the context of the virtual table.
1055 This one contains pointer to virtual table represented via
1056 POINTER_PLUS_EXPR. Verify that this pointer match to what
1057 we propagated through.
1059 In the case of virtual inheritance, the virtual tables may
1060 be nested, i.e. the offset may be different from 16 and we may
1061 need to dive into the type representation. */
1062 if (TREE_CODE (t) == ADDR_EXPR
1063 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1064 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1065 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1066 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1067 == VAR_DECL)
1068 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1069 (TREE_OPERAND (t, 0), 0), 0)))
1071 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1072 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1073 return true;
1076 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1077 We need to handle it when T comes from static variable initializer or
1078 BINFO. */
1079 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1081 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1082 t = TREE_OPERAND (t, 0);
1084 else
1085 *offset = 0;
1087 if (TREE_CODE (t) != ADDR_EXPR)
1088 return false;
1089 *v = TREE_OPERAND (t, 0);
1090 return true;
1093 /* T is known constant value of virtual table pointer. Return BINFO of the
1094 instance type. */
1096 tree
1097 vtable_pointer_value_to_binfo (tree t)
1099 tree vtable;
1100 unsigned HOST_WIDE_INT offset;
1102 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1103 return NULL_TREE;
1105 /* FIXME: for stores of construction vtables we return NULL,
1106 because we do not have BINFO for those. Eventually we should fix
1107 our representation to allow this case to be handled, too.
1108 In the case we see store of BINFO we however may assume
1109 that standard folding will be ale to cope with it. */
1110 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1111 offset, vtable);
1114 /* Proudce polymorphic call context for call method of instance
1115 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1117 static void
1118 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1119 tree base, HOST_WIDE_INT offset)
1121 gcc_assert (DECL_P (base));
1123 context->outer_type = TREE_TYPE (base);
1124 context->offset = offset;
1125 /* Make very conservative assumption that all objects
1126 may be in construction.
1127 TODO: ipa-prop already contains code to tell better.
1128 merge it later. */
1129 context->maybe_in_construction = true;
1130 context->maybe_derived_type = false;
1133 /* CST is an invariant (address of decl), try to get meaningful
1134 polymorphic call context for polymorphic call of method
1135 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1136 Return FALSE if nothing meaningful can be found. */
1138 bool
1139 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1140 tree cst,
1141 tree otr_type,
1142 HOST_WIDE_INT offset)
1144 HOST_WIDE_INT offset2, size, max_size;
1145 tree base;
1147 if (TREE_CODE (cst) != ADDR_EXPR)
1148 return false;
1150 cst = TREE_OPERAND (cst, 0);
1151 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1152 if (!DECL_P (base) || max_size == -1 || max_size != size)
1153 return false;
1155 /* Only type inconsistent programs can have otr_type that is
1156 not part of outer type. */
1157 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1158 return false;
1160 get_polymorphic_call_info_for_decl (context, base, offset);
1161 return true;
1164 /* Given REF call in FNDECL, determine class of the polymorphic
1165 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1166 Return pointer to object described by the context */
1168 tree
1169 get_polymorphic_call_info (tree fndecl,
1170 tree ref,
1171 tree *otr_type,
1172 HOST_WIDE_INT *otr_token,
1173 ipa_polymorphic_call_context *context)
1175 tree base_pointer;
1176 *otr_type = obj_type_ref_class (ref);
1177 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1179 /* Set up basic info in case we find nothing interesting in the analysis. */
1180 context->outer_type = *otr_type;
1181 context->offset = 0;
1182 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1183 context->maybe_derived_type = true;
1184 context->maybe_in_construction = false;
1186 /* Walk SSA for outer object. */
1189 if (TREE_CODE (base_pointer) == SSA_NAME
1190 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1191 && SSA_NAME_DEF_STMT (base_pointer)
1192 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1194 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1195 STRIP_NOPS (base_pointer);
1197 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1199 HOST_WIDE_INT size, max_size;
1200 HOST_WIDE_INT offset2;
1201 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1202 &offset2, &size, &max_size);
1204 /* If this is a varying address, punt. */
1205 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1206 && max_size != -1
1207 && max_size == size)
1209 /* We found dereference of a pointer. Type of the pointer
1210 and MEM_REF is meaningless, but we can look futher. */
1211 if (TREE_CODE (base) == MEM_REF)
1213 base_pointer = TREE_OPERAND (base, 0);
1214 context->offset
1215 += offset2 + mem_ref_offset (base).low * BITS_PER_UNIT;
1216 context->outer_type = NULL;
1218 /* We found base object. In this case the outer_type
1219 is known. */
1220 else if (DECL_P (base))
1222 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
1224 /* Only type inconsistent programs can have otr_type that is
1225 not part of outer type. */
1226 if (!contains_type_p (TREE_TYPE (base),
1227 context->offset + offset2, *otr_type))
1229 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1230 code sequences; we arrange the calls to be builtin_unreachable
1231 later. */
1232 *otr_token = INT_MAX;
1233 return base_pointer;
1235 get_polymorphic_call_info_for_decl (context, base,
1236 context->offset + offset2);
1237 return NULL;
1239 else
1240 break;
1242 else
1243 break;
1245 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1246 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1248 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1249 * BITS_PER_UNIT;
1250 base_pointer = TREE_OPERAND (base_pointer, 0);
1252 else
1253 break;
1255 while (true);
1257 /* Try to determine type of the outer object. */
1258 if (TREE_CODE (base_pointer) == SSA_NAME
1259 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1260 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1262 /* See if parameter is THIS pointer of a method. */
1263 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1264 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1266 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1267 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1269 /* Dynamic casting has possibly upcasted the type
1270 in the hiearchy. In this case outer type is less
1271 informative than inner type and we should forget
1272 about it. */
1273 if (!contains_type_p (context->outer_type, context->offset,
1274 *otr_type))
1276 context->outer_type = NULL;
1277 return base_pointer;
1280 /* If the function is constructor or destructor, then
1281 the type is possibly in construction, but we know
1282 it is not derived type. */
1283 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1284 || DECL_CXX_DESTRUCTOR_P (fndecl))
1286 context->maybe_in_construction = true;
1287 context->maybe_derived_type = false;
1289 else
1291 context->maybe_derived_type = true;
1292 context->maybe_in_construction = false;
1294 return base_pointer;
1296 /* Non-PODs passed by value are really passed by invisible
1297 reference. In this case we also know the type of the
1298 object. */
1299 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1301 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1302 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1303 /* Only type inconsistent programs can have otr_type that is
1304 not part of outer type. */
1305 if (!contains_type_p (context->outer_type, context->offset,
1306 *otr_type))
1308 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1309 code sequences; we arrange the calls to be builtin_unreachable
1310 later. */
1311 *otr_token = INT_MAX;
1312 return base_pointer;
1314 context->maybe_derived_type = false;
1315 context->maybe_in_construction = false;
1316 return base_pointer;
1319 /* TODO: There are multiple ways to derive a type. For instance
1320 if BASE_POINTER is passed to an constructor call prior our refernece.
1321 We do not make this type of flow sensitive analysis yet. */
1322 return base_pointer;
1325 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1326 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1327 and insert them to NODES.
1329 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1331 static void
1332 record_targets_from_bases (tree otr_type,
1333 HOST_WIDE_INT otr_token,
1334 tree outer_type,
1335 HOST_WIDE_INT offset,
1336 vec <cgraph_node *> &nodes,
1337 pointer_set_t *inserted,
1338 pointer_set_t *matched_vtables,
1339 bool *completep)
1341 while (true)
1343 HOST_WIDE_INT pos, size;
1344 tree base_binfo;
1345 tree fld;
1347 if (types_same_for_odr (outer_type, otr_type))
1348 return;
1350 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1352 if (TREE_CODE (fld) != FIELD_DECL)
1353 continue;
1355 pos = int_bit_position (fld);
1356 size = tree_to_shwi (DECL_SIZE (fld));
1357 if (pos <= offset && (pos + size) > offset
1358 /* Do not get confused by zero sized bases. */
1359 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
1360 break;
1362 /* Within a class type we should always find correcponding fields. */
1363 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1365 /* Nonbasetypes should have been stripped by outer_class_type. */
1366 gcc_assert (DECL_ARTIFICIAL (fld));
1368 outer_type = TREE_TYPE (fld);
1369 offset -= pos;
1371 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1372 offset, otr_type);
1373 if (!base_binfo)
1375 gcc_assert (odr_violation_reported);
1376 return;
1378 gcc_assert (base_binfo);
1379 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1381 bool can_refer;
1382 tree target = gimple_get_virt_method_for_binfo (otr_token,
1383 base_binfo,
1384 &can_refer);
1385 maybe_record_node (nodes, target, inserted, can_refer, completep);
1386 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1391 /* When virtual table is removed, we may need to flush the cache. */
1393 static void
1394 devirt_variable_node_removal_hook (varpool_node *n,
1395 void *d ATTRIBUTE_UNUSED)
1397 if (cached_polymorphic_call_targets
1398 && DECL_VIRTUAL_P (n->decl)
1399 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
1400 free_polymorphic_call_targets_hash ();
1403 /* Return vector containing possible targets of polymorphic call of type
1404 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1405 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1406 OTR_TYPE and include their virtual method. This is useful for types
1407 possibly in construction or destruction where the virtual table may
1408 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1409 us to walk the inheritance graph for all derivations.
1411 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1412 undefined and should be redirected to unreachable.
1414 If COMPLETEP is non-NULL, store true if the list is complete.
1415 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1416 in the target cache. If user needs to visit every target list
1417 just once, it can memoize them.
1419 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1420 the type is not in the construction. Those targets appear first in the
1421 vector returned.
1423 Returned vector is placed into cache. It is NOT caller's responsibility
1424 to free it. The vector can be freed on cgraph_remove_node call if
1425 the particular node is a virtual function present in the cache. */
1427 vec <cgraph_node *>
1428 possible_polymorphic_call_targets (tree otr_type,
1429 HOST_WIDE_INT otr_token,
1430 ipa_polymorphic_call_context context,
1431 bool *completep,
1432 void **cache_token,
1433 int *nonconstruction_targetsp)
1435 static struct cgraph_node_hook_list *node_removal_hook_holder;
1436 pointer_set_t *inserted;
1437 pointer_set_t *matched_vtables;
1438 vec <cgraph_node *> nodes = vNULL;
1439 odr_type type, outer_type;
1440 polymorphic_call_target_d key;
1441 polymorphic_call_target_d **slot;
1442 unsigned int i;
1443 tree binfo, target;
1444 bool complete;
1445 bool can_refer;
1447 /* If ODR is not initialized, return empty incomplete list. */
1448 if (!odr_hash.is_created ())
1450 if (completep)
1451 *completep = false;
1452 if (nonconstruction_targetsp)
1453 *nonconstruction_targetsp = 0;
1454 return nodes;
1457 /* If we hit type inconsistency, just return empty list of targets. */
1458 if (otr_token == INT_MAX)
1460 if (completep)
1461 *completep = true;
1462 if (nonconstruction_targetsp)
1463 *nonconstruction_targetsp = 0;
1464 return nodes;
1467 type = get_odr_type (otr_type, true);
1469 /* Lookup the outer class type we want to walk. */
1470 if (context.outer_type
1471 && !get_class_context (&context, otr_type))
1473 if (completep)
1474 *completep = false;
1475 if (nonconstruction_targetsp)
1476 *nonconstruction_targetsp = 0;
1477 return nodes;
1480 /* We canonicalize our query, so we do not need extra hashtable entries. */
1482 /* Without outer type, we have no use for offset. Just do the
1483 basic search from innter type */
1484 if (!context.outer_type)
1486 context.outer_type = otr_type;
1487 context.offset = 0;
1489 /* We need to update our hiearchy if the type does not exist. */
1490 outer_type = get_odr_type (context.outer_type, true);
1491 /* If outer and inner type match, there are no bases to see. */
1492 if (type == outer_type)
1493 context.maybe_in_construction = false;
1494 /* If the type is complete, there are no derivations. */
1495 if (TYPE_FINAL_P (outer_type->type))
1496 context.maybe_derived_type = false;
1498 /* Initialize query cache. */
1499 if (!cached_polymorphic_call_targets)
1501 cached_polymorphic_call_targets = pointer_set_create ();
1502 polymorphic_call_target_hash.create (23);
1503 if (!node_removal_hook_holder)
1505 node_removal_hook_holder =
1506 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1507 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1508 NULL);
1512 /* Lookup cached answer. */
1513 key.type = type;
1514 key.otr_token = otr_token;
1515 key.context = context;
1516 slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
1517 if (cache_token)
1518 *cache_token = (void *)*slot;
1519 if (*slot)
1521 if (completep)
1522 *completep = (*slot)->complete;
1523 if (nonconstruction_targetsp)
1524 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1525 return (*slot)->targets;
1528 complete = true;
1530 /* Do actual search. */
1531 timevar_push (TV_IPA_VIRTUAL_CALL);
1532 *slot = XCNEW (polymorphic_call_target_d);
1533 if (cache_token)
1534 *cache_token = (void *)*slot;
1535 (*slot)->type = type;
1536 (*slot)->otr_token = otr_token;
1537 (*slot)->context = context;
1539 inserted = pointer_set_create ();
1540 matched_vtables = pointer_set_create ();
1542 /* First see virtual method of type itself. */
1543 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1544 context.offset, otr_type);
1545 if (binfo)
1546 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1547 &can_refer);
1548 else
1550 gcc_assert (odr_violation_reported);
1551 target = NULL;
1554 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1556 if (target)
1558 /* In the case we get complete method, we don't need
1559 to walk derivations. */
1560 if (DECL_FINAL_P (target))
1561 context.maybe_derived_type = false;
1563 else
1564 gcc_assert (!complete);
1566 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1568 /* Next walk recursively all derived types. */
1569 if (context.maybe_derived_type)
1571 /* For anonymous namespace types we can attempt to build full type.
1572 All derivations must be in this unit (unless we see partial unit). */
1573 if (!type->anonymous_namespace || flag_ltrans)
1574 complete = false;
1575 for (i = 0; i < outer_type->derived_types.length(); i++)
1576 possible_polymorphic_call_targets_1 (nodes, inserted,
1577 matched_vtables,
1578 otr_type,
1579 outer_type->derived_types[i],
1580 otr_token, outer_type->type,
1581 context.offset, &complete);
1584 /* Finally walk bases, if asked to. */
1585 (*slot)->nonconstruction_targets = nodes.length();
1586 if (context.maybe_in_construction)
1587 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1588 context.offset, nodes, inserted,
1589 matched_vtables, &complete);
1591 (*slot)->targets = nodes;
1592 (*slot)->complete = complete;
1593 if (completep)
1594 *completep = complete;
1595 if (nonconstruction_targetsp)
1596 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1598 pointer_set_destroy (inserted);
1599 pointer_set_destroy (matched_vtables);
1600 timevar_pop (TV_IPA_VIRTUAL_CALL);
1601 return nodes;
1604 /* Dump all possible targets of a polymorphic call. */
1606 void
1607 dump_possible_polymorphic_call_targets (FILE *f,
1608 tree otr_type,
1609 HOST_WIDE_INT otr_token,
1610 const ipa_polymorphic_call_context &ctx)
1612 vec <cgraph_node *> targets;
1613 bool final;
1614 odr_type type = get_odr_type (otr_type, false);
1615 unsigned int i;
1616 int nonconstruction;
1618 if (!type)
1619 return;
1620 targets = possible_polymorphic_call_targets (otr_type, otr_token,
1621 ctx,
1622 &final, NULL, &nonconstruction);
1623 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
1624 print_generic_expr (f, type->type, TDF_SLIM);
1625 fprintf (f, " token %i\n", (int)otr_token);
1626 if (ctx.outer_type || ctx.offset)
1628 fprintf (f, " Contained in type:");
1629 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1630 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
1631 ctx.offset);
1634 fprintf (f, " %s%s%s\n ",
1635 final ? "This is a complete list." :
1636 "This is partial list; extra targets may be defined in other units.",
1637 ctx.maybe_in_construction ? " (base types included)" : "",
1638 ctx.maybe_derived_type ? " (derived types included)" : "");
1639 for (i = 0; i < targets.length (); i++)
1641 char *name = NULL;
1642 if (i == (unsigned)nonconstruction)
1643 fprintf (f, "\n If the type is in construction,"
1644 " then additional tarets are:\n"
1645 " ");
1646 if (in_lto_p)
1647 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
1648 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
1649 if (in_lto_p)
1650 free (name);
1651 if (!targets[i]->definition)
1652 fprintf (f, " (no definition%s)",
1653 DECL_DECLARED_INLINE_P (targets[i]->decl)
1654 ? " inline" : "");
1656 fprintf (f, "\n\n");
1660 /* Return true if N can be possibly target of a polymorphic call of
1661 OTR_TYPE/OTR_TOKEN. */
1663 bool
1664 possible_polymorphic_call_target_p (tree otr_type,
1665 HOST_WIDE_INT otr_token,
1666 const ipa_polymorphic_call_context &ctx,
1667 struct cgraph_node *n)
1669 vec <cgraph_node *> targets;
1670 unsigned int i;
1671 enum built_in_function fcode;
1672 bool final;
1674 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1675 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1676 == BUILT_IN_UNREACHABLE
1677 || fcode == BUILT_IN_TRAP))
1678 return true;
1680 if (!odr_hash.is_created ())
1681 return true;
1682 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
1683 for (i = 0; i < targets.length (); i++)
1684 if (symtab_semantically_equivalent_p (n, targets[i]))
1685 return true;
1687 /* At a moment we allow middle end to dig out new external declarations
1688 as a targets of polymorphic calls. */
1689 if (!final && !n->definition)
1690 return true;
1691 return false;
1695 /* After callgraph construction new external nodes may appear.
1696 Add them into the graph. */
1698 void
1699 update_type_inheritance_graph (void)
1701 struct cgraph_node *n;
1703 if (!odr_hash.is_created ())
1704 return;
1705 free_polymorphic_call_targets_hash ();
1706 timevar_push (TV_IPA_INHERITANCE);
1707 /* We reconstruct the graph starting from types of all methods seen in the
1708 the unit. */
1709 FOR_EACH_FUNCTION (n)
1710 if (DECL_VIRTUAL_P (n->decl)
1711 && !n->definition
1712 && symtab_real_symbol_p (n))
1713 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
1714 timevar_pop (TV_IPA_INHERITANCE);
1718 /* Return true if N looks like likely target of a polymorphic call.
1719 Rule out cxa_pure_virtual, noreturns, function declared cold and
1720 other obvious cases. */
1722 bool
1723 likely_target_p (struct cgraph_node *n)
1725 int flags;
1726 /* cxa_pure_virtual and similar things are not likely. */
1727 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
1728 return false;
1729 flags = flags_from_decl_or_type (n->decl);
1730 if (flags & ECF_NORETURN)
1731 return false;
1732 if (lookup_attribute ("cold",
1733 DECL_ATTRIBUTES (n->decl)))
1734 return false;
1735 if (n->frequency < NODE_FREQUENCY_NORMAL)
1736 return false;
1737 return true;
1740 /* The ipa-devirt pass.
1741 When polymorphic call has only one likely target in the unit,
1742 turn it into speculative call. */
1744 static unsigned int
1745 ipa_devirt (void)
1747 struct cgraph_node *n;
1748 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1749 struct cgraph_edge *e;
1751 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1752 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
1753 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
1755 FOR_EACH_DEFINED_FUNCTION (n)
1757 bool update = false;
1758 if (dump_file && n->indirect_calls)
1759 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
1760 n->name (), n->order);
1761 for (e = n->indirect_calls; e; e = e->next_callee)
1762 if (e->indirect_info->polymorphic)
1764 struct cgraph_node *likely_target = NULL;
1765 void *cache_token;
1766 bool final;
1767 int nonconstruction_targets;
1768 vec <cgraph_node *>targets
1769 = possible_polymorphic_call_targets
1770 (e, &final, &cache_token, &nonconstruction_targets);
1771 unsigned int i;
1773 if (dump_file)
1774 dump_possible_polymorphic_call_targets
1775 (dump_file, e);
1777 npolymorphic++;
1779 if (!cgraph_maybe_hot_edge_p (e))
1781 if (dump_file)
1782 fprintf (dump_file, "Call is cold\n\n");
1783 ncold++;
1784 continue;
1786 if (e->speculative)
1788 if (dump_file)
1789 fprintf (dump_file, "Call is aready speculated\n\n");
1790 nspeculated++;
1792 /* When dumping see if we agree with speculation. */
1793 if (!dump_file)
1794 continue;
1796 if (pointer_set_contains (bad_call_targets,
1797 cache_token))
1799 if (dump_file)
1800 fprintf (dump_file, "Target list is known to be useless\n\n");
1801 nmultiple++;
1802 continue;
1804 for (i = 0; i < targets.length (); i++)
1805 if (likely_target_p (targets[i]))
1807 if (likely_target)
1809 if (i < (unsigned) nonconstruction_targets)
1811 likely_target = NULL;
1812 if (dump_file)
1813 fprintf (dump_file, "More than one likely target\n\n");
1814 nmultiple++;
1816 break;
1818 likely_target = targets[i];
1820 if (!likely_target)
1822 pointer_set_insert (bad_call_targets, cache_token);
1823 continue;
1825 /* This is reached only when dumping; check if we agree or disagree
1826 with the speculation. */
1827 if (e->speculative)
1829 struct cgraph_edge *e2;
1830 struct ipa_ref *ref;
1831 cgraph_speculative_call_info (e, e2, e, ref);
1832 if (cgraph_function_or_thunk_node (e2->callee, NULL)
1833 == cgraph_function_or_thunk_node (likely_target, NULL))
1835 fprintf (dump_file, "We agree with speculation\n\n");
1836 nok++;
1838 else
1840 fprintf (dump_file, "We disagree with speculation\n\n");
1841 nwrong++;
1843 continue;
1845 if (!likely_target->definition)
1847 if (dump_file)
1848 fprintf (dump_file, "Target is not an definition\n\n");
1849 nnotdefined++;
1850 continue;
1852 /* Do not introduce new references to external symbols. While we
1853 can handle these just well, it is common for programs to
1854 incorrectly with headers defining methods they are linked
1855 with. */
1856 if (DECL_EXTERNAL (likely_target->decl))
1858 if (dump_file)
1859 fprintf (dump_file, "Target is external\n\n");
1860 nexternal++;
1861 continue;
1863 /* Don't use an implicitly-declared destructor (c++/58678). */
1864 struct cgraph_node *non_thunk_target
1865 = cgraph_function_node (likely_target);
1866 if (DECL_ARTIFICIAL (non_thunk_target->decl)
1867 && DECL_COMDAT (non_thunk_target->decl))
1869 if (dump_file)
1870 fprintf (dump_file, "Target is artificial\n\n");
1871 nartificial++;
1872 continue;
1874 if (cgraph_function_body_availability (likely_target)
1875 <= AVAIL_OVERWRITABLE
1876 && symtab_can_be_discarded (likely_target))
1878 if (dump_file)
1879 fprintf (dump_file, "Target is overwritable\n\n");
1880 noverwritable++;
1881 continue;
1883 else
1885 if (dump_file)
1886 fprintf (dump_file,
1887 "Speculatively devirtualizing call in %s/%i to %s/%i\n\n",
1888 n->name (), n->order,
1889 likely_target->name (),
1890 likely_target->order);
1891 if (!symtab_can_be_discarded (likely_target))
1893 cgraph_node *alias;
1894 alias = cgraph (symtab_nonoverwritable_alias
1895 (likely_target));
1896 if (alias)
1897 likely_target = alias;
1899 nconverted++;
1900 update = true;
1901 cgraph_turn_edge_to_speculative
1902 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
1905 if (update)
1906 inline_update_overall_summary (n);
1908 pointer_set_destroy (bad_call_targets);
1910 if (dump_file)
1911 fprintf (dump_file,
1912 "%i polymorphic calls, %i devirtualized,"
1913 " %i speculatively devirtualized, %i cold\n"
1914 "%i have multiple targets, %i overwritable,"
1915 " %i already speculated (%i agree, %i disagree),"
1916 " %i external, %i not defined, %i artificial\n",
1917 npolymorphic, ndevirtualized, nconverted, ncold,
1918 nmultiple, noverwritable, nspeculated, nok, nwrong,
1919 nexternal, nnotdefined, nartificial);
1920 return ndevirtualized ? TODO_remove_functions : 0;
1923 /* Gate for speculative IPA devirtualization optimization. */
1925 static bool
1926 gate_ipa_devirt (void)
1928 return (flag_devirtualize
1929 && flag_devirtualize_speculatively
1930 && optimize);
1933 namespace {
1935 const pass_data pass_data_ipa_devirt =
1937 IPA_PASS, /* type */
1938 "devirt", /* name */
1939 OPTGROUP_NONE, /* optinfo_flags */
1940 true, /* has_gate */
1941 true, /* has_execute */
1942 TV_IPA_DEVIRT, /* tv_id */
1943 0, /* properties_required */
1944 0, /* properties_provided */
1945 0, /* properties_destroyed */
1946 0, /* todo_flags_start */
1947 ( TODO_dump_symtab ), /* todo_flags_finish */
1950 class pass_ipa_devirt : public ipa_opt_pass_d
1952 public:
1953 pass_ipa_devirt (gcc::context *ctxt)
1954 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
1955 NULL, /* generate_summary */
1956 NULL, /* write_summary */
1957 NULL, /* read_summary */
1958 NULL, /* write_optimization_summary */
1959 NULL, /* read_optimization_summary */
1960 NULL, /* stmt_fixup */
1961 0, /* function_transform_todo_flags_start */
1962 NULL, /* function_transform */
1963 NULL) /* variable_transform */
1966 /* opt_pass methods: */
1967 bool gate () { return gate_ipa_devirt (); }
1968 unsigned int execute () { return ipa_devirt (); }
1970 }; // class pass_ipa_devirt
1972 } // anon namespace
1974 ipa_opt_pass_d *
1975 make_pass_ipa_devirt (gcc::context *ctxt)
1977 return new pass_ipa_devirt (ctxt);
1980 #include "gt-ipa-devirt.h"